code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from django.conf.urls import url
from django.contrib import admin
try:
# django < 1.10
from django.conf.urls import patterns
from django.conf.urls import include
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
)
except ImportError:
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
[
"django.conf.urls.include",
"django.conf.urls.url"
] |
[((240, 264), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (247, 264), False, 'from django.conf.urls import include\n'), ((321, 352), 'django.conf.urls.url', 'url', (['"""^admin/"""', 'admin.site.urls'], {}), "('^admin/', admin.site.urls)\n", (324, 352), False, 'from django.conf.urls import url\n')]
|
from __future__ import print_function
import FWCore.ParameterSet.Config as cms
#
process = cms.Process("BeamSpotDipServer")
process.load("DQMServices.Core.DQM_cfg")
# message logger
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cerr = cms.untracked.PSet(
threshold = cms.untracked.string('INFO'),
default = cms.untracked.PSet(
limit = cms.untracked.int32(1000)
),
BeamSpotDipServer = cms.untracked.PSet(
limit = cms.untracked.int32(1000)
)
)
# source
process.source = cms.Source("PoolSource",
fileNames=cms.untracked.vstring(
'file:/tmp/sikler/b.root' # lxplus7101
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(100)
)
# beamspot from database
process.load("CondCore.CondDB.CondDB_cfi")
process.load("DQM.Integration.config.FrontierCondition_GT_cfi")
process.GlobalTag.toGet = cms.VPSet(
cms.PSet(
record = cms.string("BeamSpotOnlineLegacyObjectsRcd"),
refreshTime = cms.uint64(1)
),
)
# module
process.load("DQM.BeamMonitor.BeamSpotDipServer_cff")
process.beamSpotDipServer.verbose = True
process.beamSpotDipServer.testing = True
process.beamSpotDipServer.readFromNFS = True
process.beamSpotDipServer.sourceFile = "../../../../../BeamFitResults.txt"
process.beamSpotDipServer.sourceFile1 = "../../../../../TkStatus.txt"
# process customizations
from DQM.Integration.config.online_customizations_cfi import *
process = customise(process)
# path
process.p = cms.Path( process.beamSpotDipServer )
|
[
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.uint64",
"FWCore.ParameterSet.Config.untracked.vstring",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.Path"
] |
[((92, 124), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""BeamSpotDipServer"""'], {}), "('BeamSpotDipServer')\n", (103, 124), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1484, 1519), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.beamSpotDipServer'], {}), '(process.beamSpotDipServer)\n', (1492, 1519), True, 'import FWCore.ParameterSet.Config as cms\n'), ((304, 332), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""INFO"""'], {}), "('INFO')\n", (324, 332), True, 'import FWCore.ParameterSet.Config as cms\n'), ((574, 622), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""file:/tmp/sikler/b.root"""'], {}), "('file:/tmp/sikler/b.root')\n", (595, 622), True, 'import FWCore.ParameterSet.Config as cms\n'), ((697, 721), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(100)'], {}), '(100)\n', (716, 721), True, 'import FWCore.ParameterSet.Config as cms\n'), ((922, 966), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""BeamSpotOnlineLegacyObjectsRcd"""'], {}), "('BeamSpotOnlineLegacyObjectsRcd')\n", (932, 966), True, 'import FWCore.ParameterSet.Config as cms\n'), ((986, 999), 'FWCore.ParameterSet.Config.uint64', 'cms.uint64', (['(1)'], {}), '(1)\n', (996, 999), True, 'import FWCore.ParameterSet.Config as cms\n'), ((383, 408), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1000)'], {}), '(1000)\n', (402, 408), True, 'import FWCore.ParameterSet.Config as cms\n'), ((476, 501), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1000)'], {}), '(1000)\n', (495, 501), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
from flask_rest_jsonapi import ResourceDetail, ResourceList, \
ResourceRelationship
from app.api.bootstrap import api
from app.api.schema.custom_system_roles import CustomSystemRoleSchema
from app.models import db
from app.models.custom_system_role import CustomSysRole
from app.models.panel_permission import PanelPermission
from app.api.helpers.db import safe_query
class CustomSystemRoleList(ResourceList):
"""
List and create Custom System Role
"""
def query(self, view_kwargs):
"""
query method for Panel Permission List
:param view_kwargs:
:return:
"""
query_ = self.session.query(CustomSysRole)
if view_kwargs.get('panel_id'):
panel = safe_query(self, PanelPermission, 'id', view_kwargs['panel_id'], 'panel_id')
query_ = CustomSysRole.query.filter(CustomSysRole.panel_permissions.any(id=panel.id))
return query_
decorators = (api.has_permission('is_admin', methods="POST"),)
schema = CustomSystemRoleSchema
data_layer = {'session': db.session,
'model': CustomSysRole,
'methods': {'query': query}}
class CustomSystemRoleDetail(ResourceDetail):
"""
Custom System Role detail by id
"""
def before_get_object(self, view_kwargs):
"""
before get method for user object
:param view_kwargs:
:return:
"""
if view_kwargs.get('role_id') is not None:
panel_perm = safe_query(self, PanelPermission, 'id', view_kwargs['role_id'], 'role_id')
if panel_perm.role_id is not None:
view_kwargs['id'] = panel_perm.role_id
else:
view_kwargs['id'] = None
decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
schema = CustomSystemRoleSchema
data_layer = {'session': db.session,
'model': CustomSysRole,
'methods': {
'before_get_object': before_get_object
}}
class CustomSystemRoleRelationship(ResourceRelationship):
"""
Custom System Role Relationship
"""
decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
schema = CustomSystemRoleSchema
data_layer = {'session': db.session,
'model': CustomSysRole}
|
[
"app.api.helpers.db.safe_query",
"app.api.bootstrap.api.has_permission",
"app.models.custom_system_role.CustomSysRole.panel_permissions.any"
] |
[((951, 997), 'app.api.bootstrap.api.has_permission', 'api.has_permission', (['"""is_admin"""'], {'methods': '"""POST"""'}), "('is_admin', methods='POST')\n", (969, 997), False, 'from app.api.bootstrap import api\n'), ((1754, 1808), 'app.api.bootstrap.api.has_permission', 'api.has_permission', (['"""is_admin"""'], {'methods': '"""PATCH,DELETE"""'}), "('is_admin', methods='PATCH,DELETE')\n", (1772, 1808), False, 'from app.api.bootstrap import api\n'), ((2173, 2227), 'app.api.bootstrap.api.has_permission', 'api.has_permission', (['"""is_admin"""'], {'methods': '"""PATCH,DELETE"""'}), "('is_admin', methods='PATCH,DELETE')\n", (2191, 2227), False, 'from app.api.bootstrap import api\n'), ((734, 810), 'app.api.helpers.db.safe_query', 'safe_query', (['self', 'PanelPermission', '"""id"""', "view_kwargs['panel_id']", '"""panel_id"""'], {}), "(self, PanelPermission, 'id', view_kwargs['panel_id'], 'panel_id')\n", (744, 810), False, 'from app.api.helpers.db import safe_query\n'), ((1499, 1573), 'app.api.helpers.db.safe_query', 'safe_query', (['self', 'PanelPermission', '"""id"""', "view_kwargs['role_id']", '"""role_id"""'], {}), "(self, PanelPermission, 'id', view_kwargs['role_id'], 'role_id')\n", (1509, 1573), False, 'from app.api.helpers.db import safe_query\n'), ((859, 907), 'app.models.custom_system_role.CustomSysRole.panel_permissions.any', 'CustomSysRole.panel_permissions.any', ([], {'id': 'panel.id'}), '(id=panel.id)\n', (894, 907), False, 'from app.models.custom_system_role import CustomSysRole\n')]
|
from . import arguments
from . import options
from .. import util
import click
import os
@click.command('find')
@options.all()
@options.null()
@options.recursive()
@options.tree()
@arguments.tag()
@arguments.path()
def find_command(all, null, recursive, tree, tag, path):
'''Find files by tag.
\b
TAG tag to find
PATH path to search (default .)
\b
Examples:
- tag find -r my-tag path/to/files/
- tag find my-tag
'''
file_list = []
def handle_file(file):
if tag in file.tags:
file_list.append(file)
util.find_files(path, recursive, all, handle_file)
file_list = sorted(map(lambda f: f.original, file_list))
output = tree_output(path, file_list) if tree else file_list
delimiter = '\0' if null else '\n'
click.echo(delimiter.join(output), nl = not null)
def tree_output(path, files):
head, tail = os.path.split(path)
root = {
'children': [],
'dir': True,
'name': path,
'path': head if tail == '' else path,
}
index = 0;
node = root
done = False
while not done:
head, tail = os.path.split(files[index])
if head == node['path']:
node['children'].append({
'dir': False,
'name': tail,
})
index += 1
done = index == len(files)
continue
if head.startswith(node['path']):
new_path = None
while head != node['path']:
new_path = head
head, tail = os.path.split(head)
node = {
'children': [],
'dir': True,
'name': tail,
'path': new_path,
'parent': node,
}
node['parent']['children'].append(node)
else:
node = node['parent']
return node_output(root, '', [root['name']])
def node_output(node, prefix, output):
children = node['children']
for index, child in enumerate(children):
child_prefix = None
if (index + 1 == len(children)):
child_prefix = f'{prefix} '
output.append(f"{prefix}└── {child['name']}")
else:
child_prefix = f'{prefix}│ '
output.append(f"{prefix}├── {child['name']}")
if child['dir']:
node_output(child, child_prefix, output)
return output
|
[
"os.path.split",
"click.command"
] |
[((91, 112), 'click.command', 'click.command', (['"""find"""'], {}), "('find')\n", (104, 112), False, 'import click\n'), ((893, 912), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (906, 912), False, 'import os\n'), ((1135, 1162), 'os.path.split', 'os.path.split', (['files[index]'], {}), '(files[index])\n', (1148, 1162), False, 'import os\n'), ((1564, 1583), 'os.path.split', 'os.path.split', (['head'], {}), '(head)\n', (1577, 1583), False, 'import os\n')]
|
import pathlib
import os.path
import logging
logger = logging.getLogger(__name__)
curr_path = pathlib.Path(__file__).parent.absolute()
def jl_test_file_path(filename):
return os.path.join(curr_path, "jl_render", filename)
def click_undo(self):
undo_selector = "._dash-undo-redo span:first-child div:last-child"
undo = self.wait_for_element_by_css_selector(undo_selector)
self.wait_for_text_to_equal(undo_selector, "undo")
undo.click()
def click_redo(self):
redo_selector = "._dash-undo-redo span:last-child div:last-child"
self.wait_for_text_to_equal(redo_selector, "redo")
redo = self.wait_for_element_by_css_selector(redo_selector)
redo.click()
def test_jltr001r_undo_redo(dashjl):
fp = jl_test_file_path("jltr001r_undo_redo.jl")
dashjl.start_server(fp)
dashjl.wait_for_element_by_css_selector(
"#a", timeout=4
)
input1 = dashjl.find_element("#a")
input1.send_keys("xyz")
dashjl.wait_for_text_to_equal(
"#b", "xyz", timeout=2
)
click_undo(dashjl)
dashjl.wait_for_text_to_equal(
"#b", "xy", timeout=2
)
click_undo(dashjl)
dashjl.wait_for_text_to_equal(
"#b", "x", timeout=2
)
click_redo(dashjl)
dashjl.wait_for_text_to_equal(
"#b", "xy", timeout=2
)
dashjl.percy_snapshot(name="undo-redo")
click_undo(dashjl)
click_undo(dashjl)
dashjl.wait_for_text_to_equal(
"#b", "", timeout=2
)
|
[
"pathlib.Path",
"logging.getLogger"
] |
[((54, 81), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (71, 81), False, 'import logging\n'), ((95, 117), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (107, 117), False, 'import pathlib\n')]
|
import datetime as dt
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from rest_framework_simplejwt.tokens import RefreshToken
from django.db.models import Avg
from content_api.models import Category, Comment, Genre, Review, Title
from users.models import User, UserCode
class UserSerializer(serializers.ModelSerializer):
class Meta:
fields = ('first_name', 'last_name', 'username', 'bio', 'role',
'email')
model = User
class UserCodeSerializer(serializers.ModelSerializer):
class Meta:
fields = ('email',)
model = UserCode
class TokenObtainPairSerializer(serializers.Serializer):
def validate(self, data):
email = self.context['request'].data.get('email')
if dt.datetime.now(dt.timezone.utc) - UserCode.objects.get(
email=email).created >= dt.timedelta(minutes=720):
raise serializers.ValidationError(
"Your verification code is outdated.")
new_user = User.objects.get(email=email)
refresh = self.get_token(new_user)
data['refresh'] = str(refresh)
data['access'] = str(refresh.access_token)
return data
@classmethod
def get_token(cls, user):
return RefreshToken.for_user(user)
class GenreSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(
max_length=30,
validators=[UniqueValidator(queryset=Genre.objects.all())]
)
class Meta:
fields = ['name', 'slug']
model = Genre
class CategorySerializer(serializers.ModelSerializer):
slug = serializers.SlugField(
max_length=30,
validators=[UniqueValidator(queryset=Category.objects.all())]
)
class Meta:
fields = ['name', 'slug']
model = Category
class TitleListSerializer(serializers.ModelSerializer):
category = CategorySerializer()
genre = GenreSerializer(many=True)
rating = serializers.SerializerMethodField()
class Meta:
fields = ['id', 'name', 'year', 'rating',
'description', 'genre', 'category']
model = Title
def get_rating(self, obj):
rating = obj.reviews.all().aggregate(Avg('score'))
return rating['score__avg']
class TitleWriteSerializer(serializers.ModelSerializer):
genre = serializers.SlugRelatedField(
queryset=Genre.objects.all(),
slug_field='slug', many=True)
category = serializers.SlugRelatedField(
queryset=Category.objects.all(),
slug_field='slug')
class Meta:
fields = ['id', 'name', 'year', 'description', 'genre', 'category']
model = Title
class ReveiwSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
slug_field='username',
read_only=True, many=False,
)
class Meta:
fields = ['id', 'text', 'author', 'score', 'pub_date']
model = Review
class CommentSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
slug_field='username',
read_only=True
)
class Meta:
fields = ['id', 'text', 'author', 'pub_date']
model = Comment
|
[
"users.models.UserCode.objects.get",
"users.models.User.objects.get",
"rest_framework.serializers.SerializerMethodField",
"content_api.models.Genre.objects.all",
"rest_framework_simplejwt.tokens.RefreshToken.for_user",
"datetime.timedelta",
"rest_framework.serializers.SlugRelatedField",
"django.db.models.Avg",
"content_api.models.Category.objects.all",
"datetime.datetime.now",
"rest_framework.serializers.ValidationError"
] |
[((1979, 2014), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2012, 2014), False, 'from rest_framework import serializers\n'), ((2758, 2837), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'slug_field': '"""username"""', 'read_only': '(True)', 'many': '(False)'}), "(slug_field='username', read_only=True, many=False)\n", (2786, 2837), False, 'from rest_framework import serializers\n'), ((3033, 3100), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'slug_field': '"""username"""', 'read_only': '(True)'}), "(slug_field='username', read_only=True)\n", (3061, 3100), False, 'from rest_framework import serializers\n'), ((1036, 1065), 'users.models.User.objects.get', 'User.objects.get', ([], {'email': 'email'}), '(email=email)\n', (1052, 1065), False, 'from users.models import User, UserCode\n'), ((1282, 1309), 'rest_framework_simplejwt.tokens.RefreshToken.for_user', 'RefreshToken.for_user', (['user'], {}), '(user)\n', (1303, 1309), False, 'from rest_framework_simplejwt.tokens import RefreshToken\n'), ((888, 913), 'datetime.timedelta', 'dt.timedelta', ([], {'minutes': '(720)'}), '(minutes=720)\n', (900, 913), True, 'import datetime as dt\n'), ((933, 999), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Your verification code is outdated."""'], {}), "('Your verification code is outdated.')\n", (960, 999), False, 'from rest_framework import serializers\n'), ((2235, 2247), 'django.db.models.Avg', 'Avg', (['"""score"""'], {}), "('score')\n", (2238, 2247), False, 'from django.db.models import Avg\n'), ((2403, 2422), 'content_api.models.Genre.objects.all', 'Genre.objects.all', ([], {}), '()\n', (2420, 2422), False, 'from content_api.models import Category, Comment, Genre, Review, Title\n'), ((2524, 2546), 'content_api.models.Category.objects.all', 'Category.objects.all', ([], {}), '()\n', (2544, 2546), False, 'from content_api.models import Category, Comment, Genre, Review, Title\n'), ((791, 823), 'datetime.datetime.now', 'dt.datetime.now', (['dt.timezone.utc'], {}), '(dt.timezone.utc)\n', (806, 823), True, 'import datetime as dt\n'), ((826, 859), 'users.models.UserCode.objects.get', 'UserCode.objects.get', ([], {'email': 'email'}), '(email=email)\n', (846, 859), False, 'from users.models import User, UserCode\n'), ((1466, 1485), 'content_api.models.Genre.objects.all', 'Genre.objects.all', ([], {}), '()\n', (1483, 1485), False, 'from content_api.models import Category, Comment, Genre, Review, Title\n'), ((1726, 1748), 'content_api.models.Category.objects.all', 'Category.objects.all', ([], {}), '()\n', (1746, 1748), False, 'from content_api.models import Category, Comment, Genre, Review, Title\n')]
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
notifications_group = cfg.OptGroup(
name='notifications',
title='Notifications options',
help="""
Most of the actions in Nova which manipulate the system state generate
notifications which are posted to the messaging component (e.g. RabbitMQ) and
can be consumed by any service outside the OpenStack. More technical details
at https://docs.openstack.org/nova/latest/reference/notifications.html
""")
ALL_OPTS = [
cfg.StrOpt(
'notify_on_state_change',
choices=(None, 'vm_state', 'vm_and_task_state'),
deprecated_group='DEFAULT',
help="""
If set, send compute.instance.update notifications on
instance state changes.
Please refer to
https://docs.openstack.org/nova/latest/reference/notifications.html for
additional information on notifications.
Possible values:
* None - no notifications
* "vm_state" - notifications are sent with VM state transition information in
the ``old_state`` and ``state`` fields. The ``old_task_state`` and
``new_task_state`` fields will be set to the current task_state of the
instance.
* "vm_and_task_state" - notifications are sent with VM and task state
transition information.
"""),
cfg.StrOpt(
'default_level',
default='INFO',
choices=('DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'),
deprecated_group='DEFAULT',
deprecated_name='default_notification_level',
help="Default notification level for outgoing notifications."),
cfg.StrOpt(
'default_publisher_id',
default='$host',
deprecated_group='DEFAULT',
deprecated_for_removal=True,
deprecated_since='17.0.0',
deprecated_reason="""
This option is only used when ``monkey_patch=True`` and
``monkey_patch_modules`` is configured to specify the legacy notify_decorator.
Since the monkey_patch and monkey_patch_modules options are deprecated, this
option is also deprecated.
""",
help="""
Default publisher_id for outgoing notifications. If you consider routing
notifications using different publisher, change this value accordingly.
Possible values:
* Defaults to the current hostname of this host, but it can be any valid
oslo.messaging publisher_id
Related options:
* host - Hostname, FQDN or IP address of this host.
"""),
cfg.StrOpt(
'notification_format',
choices=['unversioned', 'versioned', 'both'],
default='both',
deprecated_group='DEFAULT',
help="""
Specifies which notification format shall be used by nova.
The default value is fine for most deployments and rarely needs to be changed.
This value can be set to 'versioned' once the infrastructure moves closer to
consuming the newer format of notifications. After this occurs, this option
will be removed.
Note that notifications can be completely disabled by setting ``driver=noop``
in the ``[oslo_messaging_notifications]`` group.
Possible values:
* unversioned: Only the legacy unversioned notifications are emitted.
* versioned: Only the new versioned notifications are emitted.
* both: Both the legacy unversioned and the new versioned notifications are
emitted. (Default)
The list of versioned notifications is visible in
https://docs.openstack.org/nova/latest/reference/notifications.html
"""),
cfg.ListOpt(
'versioned_notifications_topics',
default=['versioned_notifications'],
help="""
Specifies the topics for the versioned notifications issued by nova.
The default value is fine for most deployments and rarely needs to be changed.
However, if you have a third-party service that consumes versioned
notifications, it might be worth getting a topic for that service.
Nova will send a message containing a versioned notification payload to each
topic queue in this list.
The list of versioned notifications is visible in
https://docs.openstack.org/nova/latest/reference/notifications.html
"""),
cfg.BoolOpt(
'bdms_in_notifications',
default=False,
help="""
If enabled, include block device information in the versioned notification
payload. Sending block device information is disabled by default as providing
that information can incur some overhead on the system since the information
may need to be loaded from the database.
""")
]
def register_opts(conf):
conf.register_group(notifications_group)
conf.register_opts(ALL_OPTS, group=notifications_group)
def list_opts():
return {notifications_group: ALL_OPTS}
|
[
"oslo_config.cfg.StrOpt",
"oslo_config.cfg.BoolOpt",
"oslo_config.cfg.OptGroup",
"oslo_config.cfg.ListOpt"
] |
[((724, 1110), 'oslo_config.cfg.OptGroup', 'cfg.OptGroup', ([], {'name': '"""notifications"""', 'title': '"""Notifications options"""', 'help': '"""\nMost of the actions in Nova which manipulate the system state generate\nnotifications which are posted to the messaging component (e.g. RabbitMQ) and\ncan be consumed by any service outside the OpenStack. More technical details\nat https://docs.openstack.org/nova/latest/reference/notifications.html\n"""'}), '(name=\'notifications\', title=\'Notifications options\', help=\n """\nMost of the actions in Nova which manipulate the system state generate\nnotifications which are posted to the messaging component (e.g. RabbitMQ) and\ncan be consumed by any service outside the OpenStack. More technical details\nat https://docs.openstack.org/nova/latest/reference/notifications.html\n"""\n )\n', (736, 1110), False, 'from oslo_config import cfg\n'), ((1132, 1854), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""notify_on_state_change"""'], {'choices': "(None, 'vm_state', 'vm_and_task_state')", 'deprecated_group': '"""DEFAULT"""', 'help': '"""\nIf set, send compute.instance.update notifications on\ninstance state changes.\n\nPlease refer to\nhttps://docs.openstack.org/nova/latest/reference/notifications.html for\nadditional information on notifications.\n\nPossible values:\n\n* None - no notifications\n* "vm_state" - notifications are sent with VM state transition information in\n the ``old_state`` and ``state`` fields. The ``old_task_state`` and\n ``new_task_state`` fields will be set to the current task_state of the\n instance.\n* "vm_and_task_state" - notifications are sent with VM and task state\n transition information.\n"""'}), '(\'notify_on_state_change\', choices=(None, \'vm_state\',\n \'vm_and_task_state\'), deprecated_group=\'DEFAULT\', help=\n """\nIf set, send compute.instance.update notifications on\ninstance state changes.\n\nPlease refer to\nhttps://docs.openstack.org/nova/latest/reference/notifications.html for\nadditional information on notifications.\n\nPossible values:\n\n* None - no notifications\n* "vm_state" - notifications are sent with VM state transition information in\n the ``old_state`` and ``state`` fields. The ``old_task_state`` and\n ``new_task_state`` fields will be set to the current task_state of the\n instance.\n* "vm_and_task_state" - notifications are sent with VM and task state\n transition information.\n"""\n )\n', (1142, 1854), False, 'from oslo_config import cfg\n'), ((1880, 2129), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""default_level"""'], {'default': '"""INFO"""', 'choices': "('DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL')", 'deprecated_group': '"""DEFAULT"""', 'deprecated_name': '"""default_notification_level"""', 'help': '"""Default notification level for outgoing notifications."""'}), "('default_level', default='INFO', choices=('DEBUG', 'INFO',\n 'WARN', 'ERROR', 'CRITICAL'), deprecated_group='DEFAULT',\n deprecated_name='default_notification_level', help=\n 'Default notification level for outgoing notifications.')\n", (1890, 2129), False, 'from oslo_config import cfg\n'), ((2172, 2955), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""default_publisher_id"""'], {'default': '"""$host"""', 'deprecated_group': '"""DEFAULT"""', 'deprecated_for_removal': '(True)', 'deprecated_since': '"""17.0.0"""', 'deprecated_reason': '"""\nThis option is only used when ``monkey_patch=True`` and\n``monkey_patch_modules`` is configured to specify the legacy notify_decorator.\nSince the monkey_patch and monkey_patch_modules options are deprecated, this\noption is also deprecated.\n"""', 'help': '"""\nDefault publisher_id for outgoing notifications. If you consider routing\nnotifications using different publisher, change this value accordingly.\n\nPossible values:\n\n* Defaults to the current hostname of this host, but it can be any valid\n oslo.messaging publisher_id\n\nRelated options:\n\n* host - Hostname, FQDN or IP address of this host.\n"""'}), '(\'default_publisher_id\', default=\'$host\', deprecated_group=\n \'DEFAULT\', deprecated_for_removal=True, deprecated_since=\'17.0.0\',\n deprecated_reason=\n """\nThis option is only used when ``monkey_patch=True`` and\n``monkey_patch_modules`` is configured to specify the legacy notify_decorator.\nSince the monkey_patch and monkey_patch_modules options are deprecated, this\noption is also deprecated.\n"""\n , help=\n """\nDefault publisher_id for outgoing notifications. If you consider routing\nnotifications using different publisher, change this value accordingly.\n\nPossible values:\n\n* Defaults to the current hostname of this host, but it can be any valid\n oslo.messaging publisher_id\n\nRelated options:\n\n* host - Hostname, FQDN or IP address of this host.\n"""\n )\n', (2182, 2955), False, 'from oslo_config import cfg\n'), ((2989, 3944), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""notification_format"""'], {'choices': "['unversioned', 'versioned', 'both']", 'default': '"""both"""', 'deprecated_group': '"""DEFAULT"""', 'help': '"""\nSpecifies which notification format shall be used by nova.\n\nThe default value is fine for most deployments and rarely needs to be changed.\nThis value can be set to \'versioned\' once the infrastructure moves closer to\nconsuming the newer format of notifications. After this occurs, this option\nwill be removed.\n\nNote that notifications can be completely disabled by setting ``driver=noop``\nin the ``[oslo_messaging_notifications]`` group.\n\nPossible values:\n* unversioned: Only the legacy unversioned notifications are emitted.\n* versioned: Only the new versioned notifications are emitted.\n* both: Both the legacy unversioned and the new versioned notifications are\n emitted. (Default)\n\nThe list of versioned notifications is visible in\nhttps://docs.openstack.org/nova/latest/reference/notifications.html\n"""'}), '(\'notification_format\', choices=[\'unversioned\', \'versioned\',\n \'both\'], default=\'both\', deprecated_group=\'DEFAULT\', help=\n """\nSpecifies which notification format shall be used by nova.\n\nThe default value is fine for most deployments and rarely needs to be changed.\nThis value can be set to \'versioned\' once the infrastructure moves closer to\nconsuming the newer format of notifications. After this occurs, this option\nwill be removed.\n\nNote that notifications can be completely disabled by setting ``driver=noop``\nin the ``[oslo_messaging_notifications]`` group.\n\nPossible values:\n* unversioned: Only the legacy unversioned notifications are emitted.\n* versioned: Only the new versioned notifications are emitted.\n* both: Both the legacy unversioned and the new versioned notifications are\n emitted. (Default)\n\nThe list of versioned notifications is visible in\nhttps://docs.openstack.org/nova/latest/reference/notifications.html\n"""\n )\n', (2999, 3944), False, 'from oslo_config import cfg\n'), ((3977, 4593), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""versioned_notifications_topics"""'], {'default': "['versioned_notifications']", 'help': '"""\nSpecifies the topics for the versioned notifications issued by nova.\n\nThe default value is fine for most deployments and rarely needs to be changed.\nHowever, if you have a third-party service that consumes versioned\nnotifications, it might be worth getting a topic for that service.\nNova will send a message containing a versioned notification payload to each\ntopic queue in this list.\n\nThe list of versioned notifications is visible in\nhttps://docs.openstack.org/nova/latest/reference/notifications.html\n"""'}), '(\'versioned_notifications_topics\', default=[\n \'versioned_notifications\'], help=\n """\nSpecifies the topics for the versioned notifications issued by nova.\n\nThe default value is fine for most deployments and rarely needs to be changed.\nHowever, if you have a third-party service that consumes versioned\nnotifications, it might be worth getting a topic for that service.\nNova will send a message containing a versioned notification payload to each\ntopic queue in this list.\n\nThe list of versioned notifications is visible in\nhttps://docs.openstack.org/nova/latest/reference/notifications.html\n"""\n )\n', (3988, 4593), False, 'from oslo_config import cfg\n'), ((4609, 4955), 'oslo_config.cfg.BoolOpt', 'cfg.BoolOpt', (['"""bdms_in_notifications"""'], {'default': '(False)', 'help': '"""\nIf enabled, include block device information in the versioned notification\npayload. Sending block device information is disabled by default as providing\nthat information can incur some overhead on the system since the information\nmay need to be loaded from the database.\n"""'}), '(\'bdms_in_notifications\', default=False, help=\n """\nIf enabled, include block device information in the versioned notification\npayload. Sending block device information is disabled by default as providing\nthat information can incur some overhead on the system since the information\nmay need to be loaded from the database.\n"""\n )\n', (4620, 4955), False, 'from oslo_config import cfg\n')]
|
from cnabera import core
class TestCore:
def test_main(self):
expected_msg = 'print from application'
assert core.main() == expected_msg
|
[
"cnabera.core.main"
] |
[((131, 142), 'cnabera.core.main', 'core.main', ([], {}), '()\n', (140, 142), False, 'from cnabera import core\n')]
|
from testing_config import BaseTestConfig
from application.models import User
import json
from application.utils import auth
class TestAPI(BaseTestConfig):
some_user = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"username": "test_user1"
}
def test_get_spa_from_index(self):
result = self.app.get("/")
self.assertIn('<html>', result.data.decode("utf-8"))
def test_create_new_user(self):
self.assertIsNone(User.query.filter_by(
email=self.some_user["email"]
).first())
res = self.app.post(
"/api/create_user",
data=json.dumps(self.some_user),
content_type='application/json'
)
self.assertEqual(res.status_code, 200)
self.assertTrue(json.loads(res.data.decode("utf-8"))["token"])
self.assertEqual(User.query.filter_by(email=self.some_user["email"]).first().email, self.some_user["email"])
res2 = self.app.post(
"/api/create_user",
data=json.dumps(self.some_user),
content_type='application/json'
)
self.assertEqual(res2.status_code, 409)
def test_get_token_and_verify_token(self):
res = self.app.post(
"/api/get_token",
data=json.dumps(self.default_user),
content_type='application/json'
)
token = json.loads(res.data.decode("utf-8"))["token"]
self.assertTrue(auth.verify_token(token))
self.assertEqual(res.status_code, 200)
res2 = self.app.post(
"/api/is_token_valid",
data=json.dumps({"token": token}),
content_type='application/json'
)
self.assertTrue(json.loads(res2.data.decode("utf-8")), ["token_is_valid"])
res3 = self.app.post(
"/api/is_token_valid",
data=json.dumps({"token": token + "something-else"}),
content_type='application/json'
)
self.assertEqual(res3.status_code, 403)
res4 = self.app.post(
"/api/get_token",
data=json.dumps(self.some_user),
content_type='application/json'
)
self.assertEqual(res4.status_code, 403)
def test_protected_route(self):
headers = {
'Authorization': self.token,
}
bad_headers = {
'Authorization': self.token + "bad",
}
response = self.app.get('/api/user', headers=headers)
self.assertEqual(response.status_code, 200)
response2 = self.app.get('/api/user')
self.assertEqual(response2.status_code, 401)
response3 = self.app.get('/api/user', headers=bad_headers)
self.assertEqual(response3.status_code, 401)
def test_get_user_with_email_and_password(self):
self.assertTrue(
User.get_user_with_email_and_password(
self.default_user["email"],
self.default_user["password"])
)
|
[
"application.utils.auth.verify_token",
"application.models.User.query.filter_by",
"application.models.User.get_user_with_email_and_password",
"json.dumps"
] |
[((1501, 1525), 'application.utils.auth.verify_token', 'auth.verify_token', (['token'], {}), '(token)\n', (1518, 1525), False, 'from application.utils import auth\n'), ((2934, 3035), 'application.models.User.get_user_with_email_and_password', 'User.get_user_with_email_and_password', (["self.default_user['email']", "self.default_user['password']"], {}), "(self.default_user['email'], self.\n default_user['password'])\n", (2971, 3035), False, 'from application.models import User\n'), ((650, 676), 'json.dumps', 'json.dumps', (['self.some_user'], {}), '(self.some_user)\n', (660, 676), False, 'import json\n'), ((1059, 1085), 'json.dumps', 'json.dumps', (['self.some_user'], {}), '(self.some_user)\n', (1069, 1085), False, 'import json\n'), ((1325, 1354), 'json.dumps', 'json.dumps', (['self.default_user'], {}), '(self.default_user)\n', (1335, 1354), False, 'import json\n'), ((1665, 1693), 'json.dumps', 'json.dumps', (["{'token': token}"], {}), "({'token': token})\n", (1675, 1693), False, 'import json\n'), ((1928, 1975), 'json.dumps', 'json.dumps', (["{'token': token + 'something-else'}"], {}), "({'token': token + 'something-else'})\n", (1938, 1975), False, 'import json\n'), ((2170, 2196), 'json.dumps', 'json.dumps', (['self.some_user'], {}), '(self.some_user)\n', (2180, 2196), False, 'import json\n'), ((476, 527), 'application.models.User.query.filter_by', 'User.query.filter_by', ([], {'email': "self.some_user['email']"}), "(email=self.some_user['email'])\n", (496, 527), False, 'from application.models import User\n'), ((879, 930), 'application.models.User.query.filter_by', 'User.query.filter_by', ([], {'email': "self.some_user['email']"}), "(email=self.some_user['email'])\n", (899, 930), False, 'from application.models import User\n')]
|
from i18n.strings import LazyI18nString
from pytest import mark
def _query_pages(client, conference_code):
return client.query(
"""query Pages($code: String!) {
pages(code: $code) {
id
title
slug
content
image
}
}""",
variables={"code": conference_code},
)
def _get_image_url(request, image):
if not image:
return None
return request.build_absolute_uri(image.url)
@mark.django_db
def test_query_pages(rf, graphql_client, user_factory, page_factory):
page_factory(published=False, conference__code="pycon11")
page_factory(published=True, conference__code="pycon10")
page = page_factory(published=True, conference__code="pycon11")
request = rf.get("/")
resp = _query_pages(graphql_client, conference_code="pycon11")
assert not resp.get("errors")
assert len(resp["data"]["pages"]) == 1
assert {
"id": str(page.id),
"title": str(page.title),
"slug": str(page.slug),
"content": str(page.content),
"image": _get_image_url(request, page.image),
} == resp["data"]["pages"][0]
@mark.django_db
def test_query_single_page(rf, graphql_client, user_factory, page_factory):
request = rf.get("/")
page = page_factory(
slug=LazyI18nString({"en": "demo"}),
published=True,
image=None,
conference__code="pycon11",
)
resp = graphql_client.query(
"""query {
page(code: "pycon11", slug: "demo") {
id
title
slug
content
image
}
} """
)
assert not resp.get("errors")
assert {
"id": str(page.id),
"title": str(page.title),
"slug": str(page.slug),
"content": str(page.content),
"image": _get_image_url(request, page.image),
} == resp["data"]["page"]
resp = graphql_client.query(
"""query {
page(slug: "demo", code: "pyconb") {
id
}
} """
)
assert resp["data"]["page"] is None
@mark.django_db
def test_passing_language(graphql_client, page_factory):
page_factory(
title=LazyI18nString({"en": "this is a test", "it": "questa è una prova"}),
slug=LazyI18nString({"en": "slug", "it": "lumaca"}),
content=LazyI18nString({"en": "content", "it": "contenuto"}),
published=True,
image=None,
conference__code="pycon11",
)
resp = graphql_client.query(
"""query {
page(code: "pycon11", slug: "slug") {
title(language: "it")
slug(language: "it")
}
} """
)
assert not resp.get("errors")
assert resp["data"]["page"] == {"title": "questa è una prova", "slug": "lumaca"}
@mark.django_db
def test_defaults_on_browser_language(graphql_client, page_factory):
page_factory(
title=LazyI18nString({"en": "this is a test", "it": "questa è una prova"}),
slug=LazyI18nString({"en": "slug", "it": "lumaca"}),
content=LazyI18nString({"en": "content", "it": "contenuto"}),
published=True,
image=None,
conference__code="pycon11",
)
headers = {"HTTP_ACCEPT_LANGUAGE": "it;q=0.8,de;q=0.7,la;q=0.6"}
resp = graphql_client.query(
"""query {
page(code: "pycon11", slug: "slug") {
title
slug
}
} """,
headers=headers,
)
assert not resp.get("errors")
assert resp["data"]["page"] == {"title": "questa è una prova", "slug": "lumaca"}
|
[
"i18n.strings.LazyI18nString"
] |
[((1366, 1396), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'demo'}"], {}), "({'en': 'demo'})\n", (1380, 1396), False, 'from i18n.strings import LazyI18nString\n'), ((2296, 2364), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'this is a test', 'it': 'questa è una prova'}"], {}), "({'en': 'this is a test', 'it': 'questa è una prova'})\n", (2310, 2364), False, 'from i18n.strings import LazyI18nString\n'), ((2379, 2425), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'slug', 'it': 'lumaca'}"], {}), "({'en': 'slug', 'it': 'lumaca'})\n", (2393, 2425), False, 'from i18n.strings import LazyI18nString\n'), ((2443, 2495), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'content', 'it': 'contenuto'}"], {}), "({'en': 'content', 'it': 'contenuto'})\n", (2457, 2495), False, 'from i18n.strings import LazyI18nString\n'), ((3034, 3102), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'this is a test', 'it': 'questa è una prova'}"], {}), "({'en': 'this is a test', 'it': 'questa è una prova'})\n", (3048, 3102), False, 'from i18n.strings import LazyI18nString\n'), ((3117, 3163), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'slug', 'it': 'lumaca'}"], {}), "({'en': 'slug', 'it': 'lumaca'})\n", (3131, 3163), False, 'from i18n.strings import LazyI18nString\n'), ((3181, 3233), 'i18n.strings.LazyI18nString', 'LazyI18nString', (["{'en': 'content', 'it': 'contenuto'}"], {}), "({'en': 'content', 'it': 'contenuto'})\n", (3195, 3233), False, 'from i18n.strings import LazyI18nString\n')]
|
# Standard modules
import datetime
# External modules
# chronicle modules
from chronicle import Responder
from chronicle.responder import ResponderKeyError
class TextScribe(Responder):
def __init__(self, file_name_txt):
self.file_name = file_name_txt
self.messengers = {}
def register(self, messenger, core_object, core_object_state):
time_now = datetime.datetime.utcnow()
self.messengers[messenger] = core_object
self.write_registration(messenger, core_object, core_object_state, time_now)
def notify(self, messenger, notification):
time_now = datetime.datetime.utcnow()
self.write_notification(messenger, notification, time_now)
def write_registration(self, messenger, core_object, core_object_state, registration_time):
state = ','.join(['{0}={1}'.format(member, core_object_state[member]) for member in core_object_state])
log_string = '---\nREGISTRATION:\nMessenger: {0}\nObject: {1}\nState: {2}\nRegistration time: {3}\n'.format(messenger, core_object, state, registration_time)
with open(self.file_name, 'ab') as txt:
txt.write(log_string)
def write_notification(self, messenger, notification, notification_time):
try:
core_object = self.messengers[messenger]
except KeyError:
raise ResponderKeyError('Received notification from unregistered messenger.')
attribute_name = notification[0]
return_value = notification[1]
args = notification[2]
kwargs = notification[3]
access = 'MEMBER'
if args is not None:
access = 'METHOD, ARGS: {0}, KEYWORD ARGS: {1}'.format(args, kwargs)
attribute_string = 'Attribute: {0}\nAccess: {1}\nReturn: {2}'.format(attribute_name, access, return_value)
log_string = '---\nMessenger: {0}\nObject: {1}\n{2}\nTime of notification: {3}\n'.format(messenger, core_object, attribute_string, notification_time)
with open(self.file_name, 'ab') as txt:
txt.write(log_string)
|
[
"chronicle.responder.ResponderKeyError",
"datetime.datetime.utcnow"
] |
[((387, 413), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (411, 413), False, 'import datetime\n'), ((619, 645), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (643, 645), False, 'import datetime\n'), ((1366, 1437), 'chronicle.responder.ResponderKeyError', 'ResponderKeyError', (['"""Received notification from unregistered messenger."""'], {}), "('Received notification from unregistered messenger.')\n", (1383, 1437), False, 'from chronicle.responder import ResponderKeyError\n')]
|
import torch
import torch.nn as nn
import math
import torch.nn.functional as F
class Previewing_aware_Attention(nn.Module):
dim_in: int
dim_k: int
dim_v: int
def __init__(self, opt, dim_in_q, dim_in, dim_k, dim_v, dropout=0.2):
super(Previewing_aware_Attention, self).__init__()
self.dim_in_q = dim_in_q
self.dim_in = dim_in
self.dim_k = dim_k
self.dim_v = dim_v
self.linear_q = nn.Linear(self.dim_in_q, self.dim_k, bias=False)
self.linear_k = nn.Linear(self.dim_in, self.dim_k, bias=False)
self.linear_v = nn.Linear(self.dim_in, self.dim_v, bias=False)
self.fc_v = nn.Linear(self.dim_v, self.dim_in)
self.layer_norm = nn.LayerNorm(dim_v, eps=1e-6)
self._norm_fact = 1 / math.sqrt(self.dim_k)
self.dropout = nn.Dropout(dropout)
self.pooling = opt.pooling
def forward(self, mask, x, y):
# x: batch, n, dim_in
batch, n, dim_in = x.shape
assert dim_in == self.dim_in
if self.pooling == 'mean':
residual = F.avg_pool1d(x.permute(0, 2, 1), x.size(1)).squeeze(2)
else:
residual = F.max_pool1d(x.permute(0, 2, 1), x.size(1)).squeeze(2)
q = self.linear_q(y)
k = self.linear_k(x)
v = self.linear_v(x)
attention_mask = mask.unsqueeze(1)
attention_mask = (1.0 - attention_mask) * -10000.0 # padding的token置为-10000,exp(-1w)=0
dist = torch.bmm(q, k.transpose(1, 2)) * self._norm_fact
attention_scores = dist + attention_mask
attention_probs = torch.softmax(attention_scores, dim=-1)
attention_probs = self.dropout(attention_probs)
att = torch.bmm(attention_probs, v).squeeze(1)
att = self.layer_norm(self.fc_v(att) + residual)
return att
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
del residual
x = self.layer_norm(x)
return x
class qkv_layer(nn.Module):
def __init__(self, opt, q_input_dim, kv_input_dim, qkv_out_dim):
super(qkv_layer, self).__init__()
self.qk_dim = 512
self.qkv = Previewing_aware_Attention(opt, q_input_dim, kv_input_dim, self.qk_dim, qkv_out_dim)
self.ffn = PositionwiseFeedForward(qkv_out_dim, qkv_out_dim * 2)
def forward(self, mask, kv_data, q_data):
qkv_out = self.qkv(mask, kv_data, q_data)
qkv_out = self.ffn(qkv_out)
return qkv_out
|
[
"torch.nn.Dropout",
"torch.bmm",
"math.sqrt",
"torch.softmax",
"torch.nn.LayerNorm",
"torch.nn.Linear"
] |
[((446, 494), 'torch.nn.Linear', 'nn.Linear', (['self.dim_in_q', 'self.dim_k'], {'bias': '(False)'}), '(self.dim_in_q, self.dim_k, bias=False)\n', (455, 494), True, 'import torch.nn as nn\n'), ((519, 565), 'torch.nn.Linear', 'nn.Linear', (['self.dim_in', 'self.dim_k'], {'bias': '(False)'}), '(self.dim_in, self.dim_k, bias=False)\n', (528, 565), True, 'import torch.nn as nn\n'), ((590, 636), 'torch.nn.Linear', 'nn.Linear', (['self.dim_in', 'self.dim_v'], {'bias': '(False)'}), '(self.dim_in, self.dim_v, bias=False)\n', (599, 636), True, 'import torch.nn as nn\n'), ((657, 691), 'torch.nn.Linear', 'nn.Linear', (['self.dim_v', 'self.dim_in'], {}), '(self.dim_v, self.dim_in)\n', (666, 691), True, 'import torch.nn as nn\n'), ((718, 748), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['dim_v'], {'eps': '(1e-06)'}), '(dim_v, eps=1e-06)\n', (730, 748), True, 'import torch.nn as nn\n'), ((823, 842), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (833, 842), True, 'import torch.nn as nn\n'), ((1591, 1630), 'torch.softmax', 'torch.softmax', (['attention_scores'], {'dim': '(-1)'}), '(attention_scores, dim=-1)\n', (1604, 1630), False, 'import torch\n'), ((2005, 2027), 'torch.nn.Linear', 'nn.Linear', (['d_in', 'd_hid'], {}), '(d_in, d_hid)\n', (2014, 2027), True, 'import torch.nn as nn\n'), ((2063, 2085), 'torch.nn.Linear', 'nn.Linear', (['d_hid', 'd_in'], {}), '(d_hid, d_in)\n', (2072, 2085), True, 'import torch.nn as nn\n'), ((2128, 2157), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['d_in'], {'eps': '(1e-06)'}), '(d_in, eps=1e-06)\n', (2140, 2157), True, 'import torch.nn as nn\n'), ((2180, 2199), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (2190, 2199), True, 'import torch.nn as nn\n'), ((778, 799), 'math.sqrt', 'math.sqrt', (['self.dim_k'], {}), '(self.dim_k)\n', (787, 799), False, 'import math\n'), ((1701, 1730), 'torch.bmm', 'torch.bmm', (['attention_probs', 'v'], {}), '(attention_probs, v)\n', (1710, 1730), False, 'import torch\n')]
|
import logging
LOG = logging.getLogger(__name__)
class ConsoleTask(object):
def __repr__(self):
return "Console @ %x" % (id(self),)
def run(self, bridge):
sock = bridge.stdin()
sock.write("{%shell begin %}")
while True:
sock.write(u"> ")
line = sock.readline()
if not line:
break
sock.write(line + "\r\n")
if line == 'exit':
break
sock.write("{%shell end %}")
LOG.info("Console Finished")
|
[
"logging.getLogger"
] |
[((22, 49), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (39, 49), False, 'import logging\n')]
|
# Copyright (c) 2020
# Author: xiaoweixiang
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import logging
import os
import subprocess
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.configuration import (
Configuration,
get_configuration_files,
kinds,
)
from pip._internal.exceptions import PipError
from pip._internal.utils.misc import get_prog, write_output
logger = logging.getLogger(__name__)
class ConfigurationCommand(Command):
"""Manage local and global configuration.
Subcommands:
list: List the active configuration (or from the file specified)
edit: Edit the configuration file in an editor
get: Get the value associated with name
set: Set the name=value
unset: Unset the value associated with name
If none of --user, --global and --site are passed, a virtual
environment configuration file is used if one is active and the file
exists. Otherwise, all modifications happen on the to the user file by
default.
"""
ignore_require_venv = True
usage = """
%prog [<file-option>] list
%prog [<file-option>] [--editor <editor-path>] edit
%prog [<file-option>] get name
%prog [<file-option>] set name value
%prog [<file-option>] unset name
"""
def __init__(self, *args, **kwargs):
super(ConfigurationCommand, self).__init__(*args, **kwargs)
self.configuration = None
self.cmd_opts.add_option(
'--editor',
dest='editor',
action='store',
default=None,
help=(
'Editor to use to edit the file. Uses VISUAL or EDITOR '
'environment variables if not provided.'
)
)
self.cmd_opts.add_option(
'--global',
dest='global_file',
action='store_true',
default=False,
help='Use the system-wide configuration file only'
)
self.cmd_opts.add_option(
'--user',
dest='user_file',
action='store_true',
default=False,
help='Use the user configuration file only'
)
self.cmd_opts.add_option(
'--site',
dest='site_file',
action='store_true',
default=False,
help='Use the current environment configuration file only'
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
handlers = {
"list": self.list_values,
"edit": self.open_in_editor,
"get": self.get_name,
"set": self.set_name_value,
"unset": self.unset_name
}
# Determine action
if not args or args[0] not in handlers:
logger.error("Need an action ({}) to perform.".format(
", ".join(sorted(handlers)))
)
return ERROR
action = args[0]
# Determine which configuration files are to be loaded
# Depends on whether the command is modifying.
try:
load_only = self._determine_file(
options, need_value=(action in ["get", "set", "unset", "edit"])
)
except PipError as e:
logger.error(e.args[0])
return ERROR
# Load a new configuration
self.configuration = Configuration(
isolated=options.isolated_mode, load_only=load_only
)
self.configuration.load()
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
return SUCCESS
def _determine_file(self, options, need_value):
file_options = [key for key, value in (
(kinds.USER, options.user_file),
(kinds.GLOBAL, options.global_file),
(kinds.SITE, options.site_file),
) if value]
if not file_options:
if not need_value:
return None
# Default to user, unless there's a site file.
elif any(
os.path.exists(site_config_file)
for site_config_file in get_configuration_files()[kinds.SITE]
):
return kinds.SITE
else:
return kinds.USER
elif len(file_options) == 1:
return file_options[0]
raise PipError(
"Need exactly one file to operate upon "
"(--user, --site, --global) to perform."
)
def list_values(self, options, args):
self._get_n_args(args, "list", n=0)
for key, value in sorted(self.configuration.items()):
write_output("%s=%r", key, value)
def get_name(self, options, args):
key = self._get_n_args(args, "get [name]", n=1)
value = self.configuration.get_value(key)
write_output("%s", value)
def set_name_value(self, options, args):
key, value = self._get_n_args(args, "set [name] [value]", n=2)
self.configuration.set_value(key, value)
self._save_configuration()
def unset_name(self, options, args):
key = self._get_n_args(args, "unset [name]", n=1)
self.configuration.unset_value(key)
self._save_configuration()
def open_in_editor(self, options, args):
editor = self._determine_editor(options)
fname = self.configuration.get_file_to_edit()
if fname is None:
raise PipError("Could not determine appropriate file.")
try:
subprocess.check_call([editor, fname])
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}"
.format(e.returncode)
)
def _get_n_args(self, args, example, n):
"""Helper to make sure the command got the right number of arguments
"""
if len(args) != n:
msg = (
'Got unexpected number of arguments, expected {}. '
'(example: "{} config {}")'
).format(n, get_prog(), example)
raise PipError(msg)
if n == 1:
return args[0]
else:
return args
def _save_configuration(self):
# We successfully ran a modifying command. Need to save the
# configuration.
try:
self.configuration.save()
except Exception:
logger.error(
"Unable to save configuration. Please report this as a bug.",
exc_info=1
)
raise PipError("Internal Error.")
def _determine_editor(self, options):
if options.editor is not None:
return options.editor
elif "VISUAL" in os.environ:
return os.environ["VISUAL"]
elif "EDITOR" in os.environ:
return os.environ["EDITOR"]
else:
raise PipError("Could not determine editor to use.")
|
[
"pip._internal.exceptions.PipError",
"pip._internal.utils.misc.write_output",
"pip._internal.configuration.get_configuration_files",
"os.path.exists",
"logging.getLogger",
"pip._internal.configuration.Configuration",
"pip._internal.utils.misc.get_prog",
"subprocess.check_call"
] |
[((527, 554), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (544, 554), False, 'import logging\n'), ((3573, 3639), 'pip._internal.configuration.Configuration', 'Configuration', ([], {'isolated': 'options.isolated_mode', 'load_only': 'load_only'}), '(isolated=options.isolated_mode, load_only=load_only)\n', (3586, 3639), False, 'from pip._internal.configuration import Configuration, get_configuration_files, kinds\n'), ((4685, 4783), 'pip._internal.exceptions.PipError', 'PipError', (['"""Need exactly one file to operate upon (--user, --site, --global) to perform."""'], {}), "(\n 'Need exactly one file to operate upon (--user, --site, --global) to perform.'\n )\n", (4693, 4783), False, 'from pip._internal.exceptions import PipError\n'), ((5162, 5187), 'pip._internal.utils.misc.write_output', 'write_output', (['"""%s"""', 'value'], {}), "('%s', value)\n", (5174, 5187), False, 'from pip._internal.utils.misc import get_prog, write_output\n'), ((4973, 5006), 'pip._internal.utils.misc.write_output', 'write_output', (['"""%s=%r"""', 'key', 'value'], {}), "('%s=%r', key, value)\n", (4985, 5006), False, 'from pip._internal.utils.misc import get_prog, write_output\n'), ((5764, 5813), 'pip._internal.exceptions.PipError', 'PipError', (['"""Could not determine appropriate file."""'], {}), "('Could not determine appropriate file.')\n", (5772, 5813), False, 'from pip._internal.exceptions import PipError\n'), ((5840, 5878), 'subprocess.check_call', 'subprocess.check_call', (['[editor, fname]'], {}), '([editor, fname])\n', (5861, 5878), False, 'import subprocess\n'), ((6428, 6441), 'pip._internal.exceptions.PipError', 'PipError', (['msg'], {}), '(msg)\n', (6436, 6441), False, 'from pip._internal.exceptions import PipError\n'), ((6389, 6399), 'pip._internal.utils.misc.get_prog', 'get_prog', ([], {}), '()\n', (6397, 6399), False, 'from pip._internal.utils.misc import get_prog, write_output\n'), ((6896, 6923), 'pip._internal.exceptions.PipError', 'PipError', (['"""Internal Error."""'], {}), "('Internal Error.')\n", (6904, 6923), False, 'from pip._internal.exceptions import PipError\n'), ((7226, 7272), 'pip._internal.exceptions.PipError', 'PipError', (['"""Could not determine editor to use."""'], {}), "('Could not determine editor to use.')\n", (7234, 7272), False, 'from pip._internal.exceptions import PipError\n'), ((4386, 4418), 'os.path.exists', 'os.path.exists', (['site_config_file'], {}), '(site_config_file)\n', (4400, 4418), False, 'import os\n'), ((4459, 4484), 'pip._internal.configuration.get_configuration_files', 'get_configuration_files', ([], {}), '()\n', (4482, 4484), False, 'from pip._internal.configuration import Configuration, get_configuration_files, kinds\n')]
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Procedurally generated Swimmer domain."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from dm_control import mujoco
from dm_control.rl import control
from . import base
from . import common
from dm_control.suite.utils import randomizers
from dm_control.utils import containers
from dm_control.utils import rewards
from lxml import etree
import numpy as np
from six.moves import range
_DEFAULT_TIME_LIMIT = 30
_CONTROL_TIMESTEP = 0.03 # (Seconds)
SUITE = containers.TaggedTasks()
def get_model_and_assets(n_joints):
"""Returns a tuple containing the model XML string and a dict of assets.
Args:
n_joints: An integer specifying the number of joints in the swimmer.
Returns:
A tuple `(model_xml_string, assets)`, where `assets` is a dict consisting of
`{filename: contents_string}` pairs.
"""
return _make_model(n_joints), common.ASSETS
@SUITE.add("benchmarking")
def swimmer6(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns a 6-link swimmer."""
return _make_swimmer(
6, time_limit, random=random, environment_kwargs=environment_kwargs
)
@SUITE.add("benchmarking")
def swimmer15(time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None):
"""Returns a 15-link swimmer."""
return _make_swimmer(
15, time_limit, random=random, environment_kwargs=environment_kwargs
)
def swimmer(
n_links=3, time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None
):
"""Returns a swimmer with n links."""
return _make_swimmer(
n_links, time_limit, random=random, environment_kwargs=environment_kwargs
)
def _make_swimmer(
n_joints, time_limit=_DEFAULT_TIME_LIMIT, random=None, environment_kwargs=None
):
"""Returns a swimmer control environment."""
model_string, assets = get_model_and_assets(n_joints)
physics = Physics.from_xml_string(model_string, assets=assets)
task = Swimmer(random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics,
task,
time_limit=time_limit,
control_timestep=_CONTROL_TIMESTEP,
**environment_kwargs
)
def _make_model(n_bodies):
"""Generates an xml string defining a swimmer with `n_bodies` bodies."""
if n_bodies < 3:
raise ValueError("At least 3 bodies required. Received {}".format(n_bodies))
mjcf = etree.fromstring(common.read_model("swimmer.xml"))
head_body = mjcf.find("./worldbody/body")
actuator = etree.SubElement(mjcf, "actuator")
sensor = etree.SubElement(mjcf, "sensor")
parent = head_body
for body_index in range(n_bodies - 1):
site_name = "site_{}".format(body_index)
child = _make_body(body_index=body_index)
child.append(etree.Element("site", name=site_name))
joint_name = "joint_{}".format(body_index)
joint_limit = 360.0 / n_bodies
joint_range = "{} {}".format(-joint_limit, joint_limit)
child.append(etree.Element("joint", {"name": joint_name, "range": joint_range}))
motor_name = "motor_{}".format(body_index)
actuator.append(etree.Element("motor", name=motor_name, joint=joint_name))
velocimeter_name = "velocimeter_{}".format(body_index)
sensor.append(
etree.Element("velocimeter", name=velocimeter_name, site=site_name)
)
gyro_name = "gyro_{}".format(body_index)
sensor.append(etree.Element("gyro", name=gyro_name, site=site_name))
parent.append(child)
parent = child
# Move tracking cameras further away from the swimmer according to its length.
cameras = mjcf.findall("./worldbody/body/camera")
scale = n_bodies / 6.0
for cam in cameras:
if cam.get("mode") == "trackcom":
old_pos = cam.get("pos").split(" ")
new_pos = " ".join([str(float(dim) * scale) for dim in old_pos])
cam.set("pos", new_pos)
return etree.tostring(mjcf, pretty_print=True)
def _make_body(body_index):
"""Generates an xml string defining a single physical body."""
body_name = "segment_{}".format(body_index)
visual_name = "visual_{}".format(body_index)
inertial_name = "inertial_{}".format(body_index)
body = etree.Element("body", name=body_name)
body.set("pos", "0 .1 0")
etree.SubElement(body, "geom", {"class": "visual", "name": visual_name})
etree.SubElement(body, "geom", {"class": "inertial", "name": inertial_name})
return body
class Physics(mujoco.Physics):
"""Physics simulation with additional features for the swimmer domain."""
def nose_to_target(self):
"""Returns a vector from nose to target in local coordinate of the head."""
nose_to_target = (
self.named.data.geom_xpos["target"] - self.named.data.geom_xpos["nose"]
)
head_orientation = self.named.data.xmat["head"].reshape(3, 3)
return nose_to_target.dot(head_orientation)[:2]
def nose_to_target_dist(self):
"""Returns the distance from the nose to the target."""
return np.linalg.norm(self.nose_to_target())
def body_velocities(self):
"""Returns local body velocities: x,y linear, z rotational."""
xvel_local = self.data.sensordata[12:].reshape((-1, 6))
vx_vy_wz = [0, 1, 5] # Indices for linear x,y vels and rotational z vel.
return xvel_local[:, vx_vy_wz].ravel()
def joints(self):
"""Returns all internal joint angles (excluding root joints)."""
return self.data.qpos[3:].copy()
class Swimmer(base.Task):
"""A swimmer `Task` to reach the target or just swim."""
def __init__(self, random=None):
"""Initializes an instance of `Swimmer`.
Args:
random: Optional, either a `numpy.random.RandomState` instance, an
integer seed for creating a new `RandomState`, or None to select a seed
automatically (default).
"""
super(Swimmer, self).__init__(random=random)
def initialize_episode(self, physics):
"""Sets the state of the environment at the start of each episode.
Initializes the swimmer orientation to [-pi, pi) and the relative joint
angle of each joint uniformly within its range.
Args:
physics: An instance of `Physics`.
"""
# Random joint angles:
randomizers.randomize_limited_and_rotational_joints(physics, self.random)
# Random target position.
close_target = self.random.rand() < 0.2 # Probability of a close target.
target_box = 0.3 if close_target else 2
xpos, ypos = self.random.uniform(-target_box, target_box, size=2)
physics.named.model.geom_pos["target", "x"] = xpos
physics.named.model.geom_pos["target", "y"] = ypos
physics.named.model.light_pos["target_light", "x"] = xpos
physics.named.model.light_pos["target_light", "y"] = ypos
super(Swimmer, self).initialize_episode(physics)
def get_observation(self, physics):
"""Returns an observation of joint angles, body velocities and target."""
obs = collections.OrderedDict()
obs["joints"] = physics.joints()
obs["to_target"] = physics.nose_to_target()
obs["body_velocities"] = physics.body_velocities()
return obs
def get_reward(self, physics):
"""Returns a smooth reward."""
target_size = physics.named.model.geom_size["target", 0]
return rewards.tolerance(
physics.nose_to_target_dist(),
bounds=(0, target_size),
margin=5 * target_size,
sigmoid="long_tail",
)
|
[
"dm_control.rl.control.Environment",
"dm_control.utils.containers.TaggedTasks",
"six.moves.range",
"lxml.etree.Element",
"dm_control.suite.utils.randomizers.randomize_limited_and_rotational_joints",
"lxml.etree.SubElement",
"collections.OrderedDict",
"lxml.etree.tostring"
] |
[((1213, 1237), 'dm_control.utils.containers.TaggedTasks', 'containers.TaggedTasks', ([], {}), '()\n', (1235, 1237), False, 'from dm_control.utils import containers\n'), ((2786, 2906), 'dm_control.rl.control.Environment', 'control.Environment', (['physics', 'task'], {'time_limit': 'time_limit', 'control_timestep': '_CONTROL_TIMESTEP'}), '(physics, task, time_limit=time_limit, control_timestep=\n _CONTROL_TIMESTEP, **environment_kwargs)\n', (2805, 2906), False, 'from dm_control.rl import control\n'), ((3283, 3317), 'lxml.etree.SubElement', 'etree.SubElement', (['mjcf', '"""actuator"""'], {}), "(mjcf, 'actuator')\n", (3299, 3317), False, 'from lxml import etree\n'), ((3331, 3363), 'lxml.etree.SubElement', 'etree.SubElement', (['mjcf', '"""sensor"""'], {}), "(mjcf, 'sensor')\n", (3347, 3363), False, 'from lxml import etree\n'), ((3410, 3429), 'six.moves.range', 'range', (['(n_bodies - 1)'], {}), '(n_bodies - 1)\n', (3415, 3429), False, 'from six.moves import range\n'), ((4725, 4764), 'lxml.etree.tostring', 'etree.tostring', (['mjcf'], {'pretty_print': '(True)'}), '(mjcf, pretty_print=True)\n', (4739, 4764), False, 'from lxml import etree\n'), ((5023, 5060), 'lxml.etree.Element', 'etree.Element', (['"""body"""'], {'name': 'body_name'}), "('body', name=body_name)\n", (5036, 5060), False, 'from lxml import etree\n'), ((5095, 5167), 'lxml.etree.SubElement', 'etree.SubElement', (['body', '"""geom"""', "{'class': 'visual', 'name': visual_name}"], {}), "(body, 'geom', {'class': 'visual', 'name': visual_name})\n", (5111, 5167), False, 'from lxml import etree\n'), ((5172, 5248), 'lxml.etree.SubElement', 'etree.SubElement', (['body', '"""geom"""', "{'class': 'inertial', 'name': inertial_name}"], {}), "(body, 'geom', {'class': 'inertial', 'name': inertial_name})\n", (5188, 5248), False, 'from lxml import etree\n'), ((7145, 7218), 'dm_control.suite.utils.randomizers.randomize_limited_and_rotational_joints', 'randomizers.randomize_limited_and_rotational_joints', (['physics', 'self.random'], {}), '(physics, self.random)\n', (7196, 7218), False, 'from dm_control.suite.utils import randomizers\n'), ((7902, 7927), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (7925, 7927), False, 'import collections\n'), ((3551, 3588), 'lxml.etree.Element', 'etree.Element', (['"""site"""'], {'name': 'site_name'}), "('site', name=site_name)\n", (3564, 3588), False, 'from lxml import etree\n'), ((3765, 3831), 'lxml.etree.Element', 'etree.Element', (['"""joint"""', "{'name': joint_name, 'range': joint_range}"], {}), "('joint', {'name': joint_name, 'range': joint_range})\n", (3778, 3831), False, 'from lxml import etree\n'), ((3908, 3965), 'lxml.etree.Element', 'etree.Element', (['"""motor"""'], {'name': 'motor_name', 'joint': 'joint_name'}), "('motor', name=motor_name, joint=joint_name)\n", (3921, 3965), False, 'from lxml import etree\n'), ((4065, 4132), 'lxml.etree.Element', 'etree.Element', (['"""velocimeter"""'], {'name': 'velocimeter_name', 'site': 'site_name'}), "('velocimeter', name=velocimeter_name, site=site_name)\n", (4078, 4132), False, 'from lxml import etree\n'), ((4214, 4267), 'lxml.etree.Element', 'etree.Element', (['"""gyro"""'], {'name': 'gyro_name', 'site': 'site_name'}), "('gyro', name=gyro_name, site=site_name)\n", (4227, 4267), False, 'from lxml import etree\n')]
|
# 建立文件夹为相对路径,相对当前所在路径
# shutil 最好也使用相对路径
import glob
import shutil
from pathlib import Path
if __name__ == '__main__':
cities = ["aachen", "bochum", "bremen", "cologne", "darmstadt",
"dusseldorf", "erfurt", "hamburg", "hanover", "jena", "krefeld",
"monchengladbach", "strasbourg", "stuttgart", "tubingen", "ulm",
"weimar", "zurich", "frankfurt", "lindau", "munster"]
exp = "0311_alpha1_weights1_cityscapes_synthia_resize512crop256_bs4"
root = "../Cityscapes/leftImg8bit"
origin = "."
Path(root + "/train").mkdir(parents=True, exist_ok=True)
Path(root + "/val").mkdir(parents=True, exist_ok=True)
for i, city in enumerate(cities):
for n, file in enumerate(glob.glob(origin + '/*.png')):
if file[2] in "0123456789":
continue
# print(file)
if i < 18:
Path(root + '/train/' + city).mkdir(exist_ok=True)
if city in file:
shutil.move(file, root + '/train/' + city + '/')
print(file, "to", city)
else:
Path(root + '/val/' + city).mkdir(exist_ok=True)
if city in file:
shutil.move(file, root + '/val/' + city + '/')
print(file, "to", city)
|
[
"pathlib.Path",
"shutil.move",
"glob.glob"
] |
[((563, 584), 'pathlib.Path', 'Path', (["(root + '/train')"], {}), "(root + '/train')\n", (567, 584), False, 'from pathlib import Path\n'), ((625, 644), 'pathlib.Path', 'Path', (["(root + '/val')"], {}), "(root + '/val')\n", (629, 644), False, 'from pathlib import Path\n'), ((755, 783), 'glob.glob', 'glob.glob', (["(origin + '/*.png')"], {}), "(origin + '/*.png')\n", (764, 783), False, 'import glob\n'), ((1027, 1075), 'shutil.move', 'shutil.move', (['file', "(root + '/train/' + city + '/')"], {}), "(file, root + '/train/' + city + '/')\n", (1038, 1075), False, 'import shutil\n'), ((1261, 1307), 'shutil.move', 'shutil.move', (['file', "(root + '/val/' + city + '/')"], {}), "(file, root + '/val/' + city + '/')\n", (1272, 1307), False, 'import shutil\n'), ((921, 950), 'pathlib.Path', 'Path', (["(root + '/train/' + city)"], {}), "(root + '/train/' + city)\n", (925, 950), False, 'from pathlib import Path\n'), ((1157, 1184), 'pathlib.Path', 'Path', (["(root + '/val/' + city)"], {}), "(root + '/val/' + city)\n", (1161, 1184), False, 'from pathlib import Path\n')]
|
# Copyright (c) 2019 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed
# under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import json
from model import ConfigSetSerialiser as CSS, ConfigFactory
from model.base import ConfigElementNotFoundError
def simple_cfg_dict():
types = {
'a_type':
{
'model': {'cfg_type_name': 'a_type', 'type': 'NamedModelElement'}
},
'cfg_set':
{
'model': {'cfg_type_name': 'cfg_set', 'type': 'ConfigurationSet'}
}
}
# config sets
cfg_sets = {
'first_set': {'a_type': 'first_value_of_a'},
'second_set': {'a_type': 'second_value_of_a'},
'set_with_two_of_a_kind':
{
'a_type':
{
'config_names': ['first_value_of_a', 'second_value_of_a'],
'default': 'second_value_of_a'
}
},
}
# value definitions
values = {
'first_value_of_a': {'some_value': 123},
'second_value_of_a': {'some_value': 42},
'ignored_value_of_a': {'some_value': 'xxx'},
}
return {'cfg_types': types, 'cfg_set': cfg_sets, 'a_type': values}
class ConfigSetSerialiserTest(unittest.TestCase):
def setUp(self):
self.factory = ConfigFactory.from_dict(simple_cfg_dict())
self.first_cfg_set = self.factory.cfg_set('first_set')
self.second_cfg_set = self.factory.cfg_set('second_set')
self.set_with_two_of_a_kind = self.factory.cfg_set('set_with_two_of_a_kind')
def exercise(self, cfg_sets):
examinee = CSS(cfg_sets=cfg_sets, cfg_factory=self.factory)
return json.loads(examinee.serialise(output_format='json'))
def deserialise(self, raw_dict):
return ConfigFactory.from_dict(raw_dict)
def test_serialise_empty_set(self):
self.assertEqual(self.exercise({}), {})
def test_serialise_one_set(self):
result = self.exercise({self.first_cfg_set})
# parse result again using cfg-factory so we do not have to make assumptions
# about serialisation format
deserialised = self.deserialise(result)
first_cfg_set = deserialised.cfg_set('first_set')
self.assertEqual(first_cfg_set.raw, self.first_cfg_set.raw)
with self.assertRaises(ValueError):
# second_set must not have been included
deserialised.cfg_set('second_set')
with self.assertRaises(ConfigElementNotFoundError):
# only explicitly referenced values must be included
deserialised._cfg_element('a_type', 'ignored_value_of_a')
def test_serialise_two_sets(self):
result = self.exercise({self.first_cfg_set, self.second_cfg_set})
deserialised = self.deserialise(result)
first_cfg_set = deserialised.cfg_set('first_set')
second_cfg_set = deserialised.cfg_set('second_set')
self.assertEqual(first_cfg_set.raw, self.first_cfg_set.raw)
self.assertEqual(second_cfg_set.raw, self.second_cfg_set.raw)
with self.assertRaises(ConfigElementNotFoundError):
# only explicitly referenced values must be included
deserialised._cfg_element('a_type', 'ignored_value_of_a')
def test_serialise_set_with_two_of_a_kind(self):
result = self.exercise({self.set_with_two_of_a_kind})
deserialised = self.deserialise(result)
two_of_a_kind_set = deserialised.cfg_set('set_with_two_of_a_kind')
# test that the configured default value is returned
second_value = two_of_a_kind_set._cfg_element('a_type')
self.assertEqual(second_value.raw['some_value'], 42)
# ensure first_value is also contained in serialisation result (returned from factory)
first_value = deserialised._cfg_element('a_type', 'first_value_of_a')
self.assertEqual(first_value.raw['some_value'], 123)
# ensure the same value is also returned from the cfg_set
first_value_from_cfg_set = two_of_a_kind_set._cfg_element('a_type', 'first_value_of_a')
self.assertEqual(first_value.raw, first_value_from_cfg_set.raw)
|
[
"model.ConfigFactory.from_dict",
"model.ConfigSetSerialiser"
] |
[((2272, 2320), 'model.ConfigSetSerialiser', 'CSS', ([], {'cfg_sets': 'cfg_sets', 'cfg_factory': 'self.factory'}), '(cfg_sets=cfg_sets, cfg_factory=self.factory)\n', (2275, 2320), True, 'from model import ConfigSetSerialiser as CSS, ConfigFactory\n'), ((2442, 2475), 'model.ConfigFactory.from_dict', 'ConfigFactory.from_dict', (['raw_dict'], {}), '(raw_dict)\n', (2465, 2475), False, 'from model import ConfigSetSerialiser as CSS, ConfigFactory\n')]
|
import tensorflow as tf
import skimage.transform
import numpy as np
def conv2d(x, W, b, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
def maxpool2d(x, k=2):
# Wrapper for maxpolling
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],padding='SAME')
def upsampling(x, k=2):
# Wrapper for resizing. It actually is upsamling, which is reverse operation to pooling
t_shape = x.get_shape()
# print(t_shape)
return tf.image.resize_images(x, size=[t_shape[1]*k, t_shape[2]*k])
def batch_generator(raw_image_data, batch_size):
# this generator take a picture and random rotates it mupltiple to 90 degrees
# angle of rotate is a lable
angls = [0, 90, 180, 270]
x = []
y = []
for i, img in enumerate(raw_image_data):
angle = np.random.choice(angls)
ohe_vector = np.zeros(4)
ohe_vector[angls.index(angle)] = 1
y.append(ohe_vector)
transformed_img = skimage.transform.rotate(img.reshape((28, 28)), angle)
x.append(transformed_img)
if i % batch_size == 0:
if i != 0:
x = np.stack(x)
x_out = x.reshape((-1, 28, 28, 1))
y_out = np.stack(y)
x = []
y = []
yield x_out, y_out
label_dict = {
0: 'T-shirt/top',
1: 'Trouser',
2: 'Pullover',
3: 'Dress',
4: 'Coat',
5: 'Sandal',
6: 'Shirt',
7: 'Sneaker',
8: 'Bag',
9: 'Ankle boot',
}
|
[
"numpy.stack",
"tensorflow.image.resize_images",
"tensorflow.nn.relu",
"numpy.zeros",
"tensorflow.nn.max_pool",
"tensorflow.nn.conv2d",
"numpy.random.choice",
"tensorflow.nn.bias_add"
] |
[((162, 230), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'W'], {'strides': '[1, strides, strides, 1]', 'padding': '"""SAME"""'}), "(x, W, strides=[1, strides, strides, 1], padding='SAME')\n", (174, 230), True, 'import tensorflow as tf\n'), ((239, 259), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['x', 'b'], {}), '(x, b)\n', (253, 259), True, 'import tensorflow as tf\n'), ((271, 284), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (281, 284), True, 'import tensorflow as tf\n'), ((350, 425), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['x'], {'ksize': '[1, k, k, 1]', 'strides': '[1, k, k, 1]', 'padding': '"""SAME"""'}), "(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')\n", (364, 425), True, 'import tensorflow as tf\n'), ((603, 667), 'tensorflow.image.resize_images', 'tf.image.resize_images', (['x'], {'size': '[t_shape[1] * k, t_shape[2] * k]'}), '(x, size=[t_shape[1] * k, t_shape[2] * k])\n', (625, 667), True, 'import tensorflow as tf\n'), ((943, 966), 'numpy.random.choice', 'np.random.choice', (['angls'], {}), '(angls)\n', (959, 966), True, 'import numpy as np\n'), ((988, 999), 'numpy.zeros', 'np.zeros', (['(4)'], {}), '(4)\n', (996, 999), True, 'import numpy as np\n'), ((1262, 1273), 'numpy.stack', 'np.stack', (['x'], {}), '(x)\n', (1270, 1273), True, 'import numpy as np\n'), ((1349, 1360), 'numpy.stack', 'np.stack', (['y'], {}), '(y)\n', (1357, 1360), True, 'import numpy as np\n')]
|
# WSGI start script for gunicorn in docker
# usage: gunicorn -c gunicorn_conf.py main:app --threads 2 -b 0.0.0.0:80
from app import create_app
app = create_app()
|
[
"app.create_app"
] |
[((151, 163), 'app.create_app', 'create_app', ([], {}), '()\n', (161, 163), False, 'from app import create_app\n')]
|
from __future__ import annotations
from typing import Any, Callable, Container, Deque, Dict, Iterable, Iterator, List, NoReturn, Optional, Reversible, Sequence, Set, Sized, TYPE_CHECKING, Tuple, Type, Generic, Union
if TYPE_CHECKING:
from .lookup import Lookup
from .grouping import Grouping
from .ordered_enumerable import OrderedEnumerable
from .cached_enumerable import CachedEnumerable
from .more import MoreEnumerable
from .types_linq_error import InvalidOperationError, IndexOutOfRangeError
from .util import ComposeSet
from .more_typing import (
TCollection,
TDefault,
TInner,
TKey,
TResult,
TSource_co,
TValue,
)
# do not use this value!!!
_signal: Any = object()
class Enumerable(Sequence[TSource_co], Generic[TSource_co]):
_iter_factory: Callable[[], Iterable[TSource_co]]
def __init__(self,
it: Union[Iterable[TSource_co], Callable[[], Iterable[TSource_co]]]
):
if isinstance(it, Iterable):
self._iter_factory = lambda it_=it: it_
else:
self._iter_factory = lambda it_=it: it_()
def _get_iterable(self) -> Iterable[TSource_co]:
return self._iter_factory()
# 'fallback = F' -> calls dunder methods if available
# -> otherwise calls own implementation
# 'fallback = T'-> calls own implementation
def _contains_impl(self, value: object, fallback: bool) -> bool:
iterable = self._get_iterable()
if not fallback and isinstance(iterable, Container):
return value in iterable # type: ignore
for elem in iterable:
if elem == value:
return True
return False
def __contains__(self, value: object) -> bool:
return self._contains_impl(value, fallback=False)
def _every(self, step: int) -> Enumerable[TSource_co]:
return self.where2(lambda _, i: i % step == 0)
def _getitem_impl(self,
index: Union[int, slice],
fallback: bool,
) -> Union[TSource_co, Enumerable[TSource_co]]:
iterable = self._get_iterable()
if isinstance(index, int):
# Sequence is an abstract base class without @runtime_checkable
if not fallback and isinstance(iterable, Sequence):
# an appropriate implementation should raise IndexError, or IndexOutOfRangeError
try:
return iterable[index]
except IndexError as e:
raise IndexOutOfRangeError from e
iterator = iter(iterable)
try:
for _ in range(index):
next(iterator)
return next(iterator)
except StopIteration:
raise IndexOutOfRangeError('Not enough elements in the sequence')
else: # isinstance(index, slice)
if not fallback and isinstance(iterable, Sequence):
try:
res = iterable[index]
except IndexError as e:
raise IndexOutOfRangeError(e)
return res if isinstance(res, Enumerable) else Enumerable(res)
# we do not enumerate all values if the begin and the end only involve
# nonnegative indices since in which case the sliced part can be obtained
# without reversing.
# otherwise have to enumerate all with using list's slice operator.
# (don't enumerate right away in this function, of course)
def inner(s: slice = index):
en = iterable if isinstance(iterable, Enumerable) else Enumerable(iterable)
start_is_none = s.start is None
stop_is_none = s.stop is None
step = s.step if s.step is not None else 1
if (start_is_none and stop_is_none) or (not start_is_none and s.start < 0) \
or (not stop_is_none and s.stop < 0) or (stop_is_none):
yield from en.to_list()[s]
return
elif start_is_none:
if step > 0:
yield from en.take(s.stop)._every(step)
else:
yield from en.skip(s.stop + 1).reverse()._every(-step)
return
elif s.start <= s.stop and step > 0:
yield from en.skip(s.start).take(s.stop - s.start)._every(step)
return
elif step <= 0:
yield from en.skip(s.stop + 1).take(s.start - s.stop) \
.reverse()._every(-step)
return Enumerable(inner)
def __getitem__(self, # type: ignore[override]
index: Union[int, slice],
) -> Union[TSource_co, Enumerable[TSource_co]]:
return self._getitem_impl(index, fallback=False)
def __iter__(self) -> Iterator[TSource_co]:
return iter(self._get_iterable())
def _len_impl(self, fallback: bool) -> int:
iterable = self._get_iterable()
if not fallback and isinstance(iterable, Sized):
return len(iterable)
count = 0
for _ in iterable: count += 1
return count
def __len__(self) -> int:
return self._len_impl(fallback=False)
def _reversed_impl(self, fallback: bool) -> Iterator[TSource_co]:
iterable = self._get_iterable()
# Sequence is an abstract base class without @runtime_checkable
if not fallback and isinstance(iterable, (Sequence, Reversible)):
return reversed(iterable)
return reversed([elem for elem in iterable])
def __reversed__(self) -> Iterator[TSource_co]:
return self._reversed_impl(fallback=False)
@staticmethod
def _raise_empty_sequence() -> NoReturn:
raise InvalidOperationError('Sequence is empty')
def aggregate(self, *args) -> Any:
if len(args) == 3:
seed, func, result_selector = args
for elem in self:
seed = func(seed, elem)
return result_selector(seed)
elif len(args) == 2:
seed, func = args
for elem in self:
seed = func(seed, elem)
return seed
else: # len(args) == 1
func = args[0]
iterator = iter(self)
try:
seed = next(iterator)
except StopIteration:
self._raise_empty_sequence()
for elem in iterator:
seed = func(seed, elem)
return seed
def all(self, predicate: Callable[[TSource_co], bool]) -> bool:
for elem in self:
if not predicate(elem):
return False
return True
def any(self, *args: Callable[[TSource_co], bool]) -> bool:
if len(args) == 0:
for _ in self:
return True
return False
else: # len(args) == 1:
predicate = args[0]
for elem in self:
if predicate(elem):
return True
return False
def append(self, element: TSource_co) -> Enumerable[TSource_co]: # type: ignore
# this method does not mutate the current container
def inner():
yield from self
yield element
return Enumerable(inner)
def as_cached(self, *, cache_capacity: Optional[int] = None) -> CachedEnumerable[TSource_co]:
from .cached_enumerable import CachedEnumerable
return CachedEnumerable(self, cache_capacity)
def as_more(self) -> MoreEnumerable[TSource_co]:
from .more import MoreEnumerable
return MoreEnumerable(self)
def _average_helper(self, selector, when_empty):
count = 0
iterator = iter(self)
try:
sum_ = selector(next(iterator))
count += 1
except StopIteration:
return when_empty()
for elem in iterator:
sum_ += selector(elem)
count += 1
return sum_ / count
def average(self, *args: Callable[[TSource_co], Any]) -> Any:
if len(args) == 0:
selector = lambda x: x
else: # len(args) == 1
selector = args[0]
return self._average_helper(selector, self._raise_empty_sequence)
def average2(self, *args):
if len(args) == 1:
selector, default = lambda x: x, args[0]
else: # len(args) == 2
selector, default = args
return self._average_helper(selector, lambda: default)
def cast(self, _: Type[TResult]) -> Enumerable[TResult]:
return self # type: ignore
def concat(self, second: Iterable[TSource_co]) -> Enumerable[TSource_co]:
def inner():
yield from self
yield from second
return Enumerable(inner)
def contains(self, value: object, *args: Callable[..., bool]):
if len(args) == 0:
return self._contains_impl(value, fallback=True)
else: # len(args) == 1
comparer = args[0]
for elem in self:
if comparer(elem, value):
return True
return False
def count(self, *args: Callable[[TSource_co], bool]) -> int:
if len(args) == 0:
return self._len_impl(fallback=True)
else: # len(args) == 1
predicate = args[0]
count = 0
for elem in self:
if predicate(elem):
count += 1
return count
def default_if_empty(self,
default: TDefault,
) -> Union[Enumerable[TSource_co], Enumerable[TDefault]]:
def inner():
iterator = iter(self)
try:
yield next(iterator)
except StopIteration:
yield default
return
yield from iterator
return Enumerable(inner) # type: ignore
def distinct(self) -> Enumerable[TSource_co]:
return self.except1(())
def element_at(self, index: int, *args: TDefault) -> Union[TSource_co, TDefault]:
if len(args) == 0:
return self._getitem_impl(index, fallback=True) # type: ignore
else: # len(args) == 1
default = args[0]
try:
return self._getitem_impl(index, fallback=True) # type: ignore
except IndexOutOfRangeError:
return default
@staticmethod
def empty() -> Enumerable[TSource_co]:
return Enumerable(())
def except1(self, second: Iterable[TSource_co]) -> Enumerable[TSource_co]:
def inner():
s = ComposeSet(second)
for elem in self:
if elem in s:
continue
s.add(elem)
yield elem
return Enumerable(inner)
@staticmethod
def _raise_no_such_element() -> NoReturn:
raise InvalidOperationError('No element satisfying condition')
def first(self, *args: Callable[[TSource_co], bool]) -> TSource_co:
if len(args) == 0:
try:
return self.element_at(0) # type: ignore
except IndexOutOfRangeError as e:
raise InvalidOperationError(e)
else: # len(args) == 1
predicate = args[0]
for elem in self:
if predicate(elem):
return elem
self._raise_no_such_element()
def first2(self, *args):
if len(args) == 1:
default = args[0]
try:
return self.element_at(0) # type: ignore
except IndexOutOfRangeError:
return default
else: # len(args) == 2
predicate, default = args
for elem in self:
if predicate(elem):
return elem
return default
def group_by(self,
key_selector: Callable[[TSource_co], TKey],
value_selector: Callable[[TSource_co], TValue],
*args: Callable[[TKey, Enumerable[TValue]], TResult],
) -> Union[Enumerable[TResult], Enumerable[Grouping[TKey, TValue]]]:
from .lookup import Lookup
if len(args) == 1:
result_selector = args[0]
inner = lambda: Lookup(self, key_selector, value_selector) \
.apply_result_selector(result_selector) # type: ignore
else: # len(args) == 0:
inner = lambda: Lookup(self, key_selector, value_selector)
return Enumerable(inner)
def group_by2(self,
key_selector: Callable[[TSource_co], TKey],
*args: Callable[[TKey, Enumerable[TSource_co]], TResult],
) -> Union[Enumerable[TResult], Enumerable[Grouping[TKey, TSource_co]]]:
from .lookup import Lookup
if len(args) == 1:
result_selector = args[0]
inner = lambda: Lookup(self, key_selector, lambda x: x) \
.apply_result_selector(result_selector) # type: ignore
else: # len(args) == 0:
inner = lambda: Lookup(self, key_selector, lambda x: x)
return Enumerable(inner)
def group_join(self,
inner: Iterable[TInner],
outer_key_selector: Callable[[TSource_co], TKey],
inner_key_selector: Callable[[TInner], TKey],
result_selector: Callable[[TSource_co, Enumerable[TInner]], TResult],
) -> Enumerable[TResult]:
from .lookup import Lookup
def inner_gen():
lookup = Lookup(inner, inner_key_selector, lambda x: x)
for outer_item in self:
group = lookup[outer_key_selector(outer_item)]
yield result_selector(outer_item, group) # type: ignore
return Enumerable(inner_gen)
def intersect(self, second: Iterable[TSource_co]) -> Enumerable[TSource_co]:
def inner():
s = ComposeSet(second)
for elem in self:
if elem not in s:
continue
s.remove(elem)
yield elem
return Enumerable(inner)
def join(self,
inner: Iterable[TInner],
outer_key_selector: Callable[[TSource_co], TKey],
inner_key_selector: Callable[[TInner], TKey],
result_selector: Callable[[TSource_co, TInner], TResult],
) -> Enumerable[TResult]:
from .lookup import Lookup
def inner_gen():
lookup = Lookup(inner, inner_key_selector, lambda x: x)
for outer_item in self:
for inner_item in lookup[outer_key_selector(outer_item)]:
yield result_selector(outer_item, inner_item)
return Enumerable(inner_gen)
def last(self, *args: Callable[[TSource_co], bool]) -> TSource_co:
ret: Any = _signal
if len(args) == 0:
for elem in self:
ret = elem
if ret is _signal:
self._raise_empty_sequence()
else: # len(args) == 1
predicate = args[0]
for elem in self:
if predicate(elem):
ret = elem
if ret is _signal:
self._raise_no_such_element()
return ret
def last2(self, *args):
if len(args) == 1:
default = args[0]
for elem in self:
default = elem
else: # len(args) == 2
predicate, default = args
for elem in self:
if predicate(elem):
default = elem
return default
def _minmax_helper(self, result_selector, op, when_empty) -> Any:
iterator = iter(self)
try:
curr = result_selector(next(iterator))
except StopIteration:
return when_empty()
for elem in iterator:
mapped = result_selector(elem)
curr = mapped if op(curr, mapped) else curr
return curr
def max(self, *args: Callable[[TSource_co], Any]) -> Any:
if len(args) == 0:
result_selector: Any = lambda x: x
else: # len(args) == 1
result_selector = args[0]
return self._minmax_helper(
result_selector,
lambda l, r: l < r,
self._raise_empty_sequence,
)
def max2(self, *args) -> Any:
if len(args) == 1:
result_selector, default = lambda x: x, args[0]
else: # len(args) == 2
result_selector, default = args
return self._minmax_helper(
result_selector,
lambda l, r: l < r,
lambda: default,
)
def min(self, *args: Callable[[TSource_co], Any]) -> Any:
if len(args) == 0:
result_selector: Any = lambda x: x
else: # len(args) == 1
result_selector = args[0]
return self._minmax_helper(
result_selector,
lambda l, r: r < l,
self._raise_empty_sequence,
)
def min2(self, *args) -> Any:
if len(args) == 1:
result_selector, default = lambda x: x, args[0]
else: # len(args) == 2
result_selector, default = args
return self._minmax_helper(
result_selector,
lambda l, r: r < l,
lambda: default,
)
def of_type(self, t_result: Type[TResult]) -> Enumerable[TResult]:
return self.where(lambda e: isinstance(e, t_result)).cast(t_result)
def order_by(self,
key_selector: Callable[[TSource_co], TKey],
*args: Callable[[TKey, TKey], int],
) -> OrderedEnumerable[TSource_co, TKey]:
from .ordered_enumerable import OrderedEnumerable
if len(args) == 1:
comparer = args[0]
else: # len(args) == 2:
comparer = None
return OrderedEnumerable(
self._get_iterable,
None,
key_selector,
comparer,
False,
)
def order_by_descending(self,
key_selector: Callable[[TSource_co], TKey],
*args: Callable[[TKey, TKey], int],
) -> OrderedEnumerable[TSource_co, TKey]:
from .ordered_enumerable import OrderedEnumerable
if len(args) == 1:
comparer = args[0]
else: # len(args) == 2:
comparer = None
return OrderedEnumerable(
self._get_iterable,
None,
key_selector,
comparer,
True,
)
def prepend(self, element: TSource_co) -> Enumerable[TSource_co]: # type: ignore
# see self.append()
def inner():
yield element
yield from self
return Enumerable(inner)
@staticmethod
def _raise_count_negative() -> NoReturn:
raise InvalidOperationError('count must be nonnegative')
@staticmethod
def range(start: int, count: Optional[int]) -> Enumerable[int]:
if count is not None:
if count < 0:
Enumerable._raise_count_negative()
def inner(curr=start, cnt=count): # type: ignore[misc]
while cnt > 0:
yield curr
curr += 1
cnt -= 1
else:
def inner(curr=start): # type: ignore[misc]
while True:
yield curr
curr += 1
return Enumerable(inner)
@staticmethod
def repeat(value: TResult, count: Optional[int] = None) -> Enumerable[TResult]:
if count is not None:
if count < 0:
Enumerable._raise_count_negative()
def inner(val=value, cnt=count): # type: ignore[misc]
while cnt > 0:
yield val
cnt -= 1
else:
def inner(val=value): # type: ignore[misc]
while True:
yield val
return Enumerable(inner)
def reverse(self) -> Enumerable[TSource_co]:
return Enumerable(lambda: self._reversed_impl(fallback=True))
def select(self, selector: Callable[[TSource_co], TResult]) -> Enumerable[TResult]:
def inner():
for elem in self:
yield selector(elem)
return Enumerable(inner)
def select2(self, selector: Callable[[TSource_co, int], TResult]) -> Enumerable[TResult]:
def inner():
for i, elem in enumerate(self):
yield selector(elem, i)
return Enumerable(inner)
def select_many(self,
collection_selector: Callable[[TSource_co], Iterable[TCollection]],
*args: Callable[[TSource_co, TCollection], TResult],
) -> Union[Enumerable[TCollection], Enumerable[TResult]]:
if len(args) == 0:
result_selector: Any = lambda _, x: x
else: # len(args) == 1
result_selector = args[0]
def inner():
for elem in self:
for sub in collection_selector(elem):
yield result_selector(elem, sub)
return Enumerable(inner)
def select_many2(self,
collection_selector: Callable[[TSource_co, int], Iterable[TCollection]],
*args: Callable[[TSource_co, TCollection], TResult],
) -> Union[Enumerable[TCollection], Enumerable[TResult]]:
if len(args) == 0:
result_selector: Any = lambda _, x: x
else: # len(args) == 1
result_selector = args[0]
def inner():
for i, elem in enumerate(self):
for sub in collection_selector(elem, i):
yield result_selector(elem, sub)
return Enumerable(inner)
def sequence_equal(self,
second: Iterable[TSource_co],
*args: Callable[..., bool],
) -> bool:
if len(args) == 0:
comparer = lambda x, y: x == y
else: # len(args) == 1
comparer = args[0]
me, she = iter(self), iter(second)
while True:
try:
lhs = next(me)
except StopIteration:
try:
next(she)
return False
except StopIteration:
return True
try:
rhs = next(she)
except StopIteration:
return False
if not comparer(lhs, rhs):
return False
def _find_single(self, res):
for elem in self:
if res is not _signal:
raise InvalidOperationError('Sequence does not contain exactly one element')
res = elem
return res
def _find_single_with_predicate(self, res, predicate):
for elem in self:
if predicate(elem):
if res is not _signal:
raise InvalidOperationError(
'There are multiple elements that satisfy condition: '
f'{res} vs. {elem}'
)
res = elem
return res
def single(self, *args: Callable[[TSource_co], bool]) -> TSource_co:
res: Any = _signal
if len(args) == 0:
res = self._find_single(res)
if res is _signal:
self._raise_empty_sequence()
else: # len(args) == 1
predicate = args[0]
res = self._find_single_with_predicate(res, predicate)
if res is _signal:
self._raise_no_such_element()
return res
def single2(self, *args):
res: Any = _signal
if len(args) == 1:
default = args[0]
res = self._find_single(res)
else: # len(args) == 2
predicate, default = args
res = self._find_single_with_predicate(res, predicate)
if res is _signal:
return default
return res
def skip(self, count: int) -> Enumerable[TSource_co]:
def inner():
iterator = iter(self)
try:
for _ in range(count):
next(iterator)
except StopIteration:
return
yield from iterator
return Enumerable(inner)
def skip_last(self, count: int) -> Enumerable[TSource_co]:
if count <= 0:
return self.skip(0)
def inner():
iterator = iter(self)
q = Deque()
for elem in iterator:
if len(q) == count:
while True:
yield q.pop()
q.appendleft(elem)
try:
elem = next(iterator)
except StopIteration:
break
else:
q.appendleft(elem)
return Enumerable(inner)
def skip_while(self, predicate: Callable[[TSource_co], bool]) -> Enumerable[TSource_co]:
def inner():
iterator = iter(self)
for elem in iterator:
if not predicate(elem):
yield elem
break
yield from iterator
return Enumerable(inner)
def skip_while2(self, predicate: Callable[[TSource_co, int], bool]) -> Enumerable[TSource_co]:
def inner():
iterator = iter(self)
for i, elem in enumerate(iterator):
if not predicate(elem, i):
yield elem
break
yield from iterator
return Enumerable(inner)
def _sum_helper(self, selector, when_empty):
iterator = iter(self)
try:
sum_ = selector(next(iterator))
except StopIteration:
return when_empty()
for elem in iterator:
sum_ += selector(elem)
return sum_
def sum(self, *args) -> Any:
if len(args) == 0:
selector: Any = lambda x: x
else: # len(args) == 1
selector = args[0]
return self._sum_helper(selector, lambda: 0)
def sum2(self, *args) -> Any:
if len(args) == 1:
selector, default = lambda x: x, args[0]
else: # len(args) == 2
selector, default = args
return self._sum_helper(selector, lambda: default)
def take(self, count: int) -> Enumerable[TSource_co]:
def inner():
iterator = iter(self)
try:
for _ in range(count):
yield next(iterator)
except StopIteration:
return
return Enumerable(inner)
def take_last(self, count: int) -> Enumerable[TSource_co]:
if count <= 0:
return self.empty()
def inner():
iterator = iter(self)
try:
q = Deque((next(iterator),))
except StopIteration:
return
for elem in iterator:
if len(q) == count:
while True:
q.popleft()
q.append(elem)
try:
elem = next(iterator)
except StopIteration:
break
else:
q.append(elem)
yield from q
return Enumerable(inner)
def take_while(self, predicate: Callable[[TSource_co], bool]) -> Enumerable[TSource_co]:
def inner():
for elem in self:
if not predicate(elem):
break
yield elem
return Enumerable(inner)
def take_while2(self, predicate: Callable[[TSource_co, int], bool]) -> Enumerable[TSource_co]:
def inner():
for i, elem in enumerate(self):
if not predicate(elem, i):
break
yield elem
return Enumerable(inner)
def to_dict(self,
key_selector: Callable[[TSource_co], TKey],
*args: Callable[[TSource_co], TValue],
) -> Union[Dict[TKey, TValue], Dict[TKey, TSource_co]]:
if len(args) == 0:
value_selector: Any = lambda x: x
else: # len(args) == 1
value_selector = args[0]
return {key_selector(e): value_selector(e) for e in self}
def to_set(self) -> Set[TSource_co]:
return {e for e in self}
def to_list(self) -> List[TSource_co]:
return [e for e in self]
def to_lookup(self,
key_selector: Callable[[TSource_co], TKey],
*args: Callable[[TSource_co], TValue],
) -> Union[Lookup[TKey, TValue], Lookup[TKey, TSource_co]]:
from .lookup import Lookup
if len(args) == 0:
value_selector: Any = lambda x: x
else: # len(args) == 1
value_selector = args[0]
res = Lookup(self, key_selector, value_selector)
return res
def union(self, second: Iterable[TSource_co]) -> Enumerable[TSource_co]:
def inner():
# TODO: optimise chained .union() call to reuse s
s = ComposeSet()
for elem in self.concat(second):
if elem in s:
continue
s.add(elem)
yield elem
return Enumerable(inner)
def where(self, predicate: Callable[[TSource_co], bool]) -> Enumerable[TSource_co]:
def inner():
for elem in self:
if predicate(elem):
yield elem
return Enumerable(inner)
def where2(self, predicate: Callable[[TSource_co, int], bool]) -> Enumerable[TSource_co]:
def inner():
for i, elem in enumerate(self):
if predicate(elem, i):
yield elem
return Enumerable(inner)
def zip(self, *iters: Iterable[Any]) -> Enumerable[Any]:
def inner():
yield from zip(self, *iters)
return Enumerable(inner)
def zip2(self, *iters_and_result_selector: Any) -> Enumerable[Any]:
iters = iters_and_result_selector[:-1]
result_selector = iters_and_result_selector[-1]
def inner():
for tup in zip(self, *iters):
yield result_selector(*tup)
return Enumerable(inner)
def elements_in(self, *args) -> Enumerable[TSource_co]:
if len(args) == 1:
index = args[0]
return self._getitem_impl(index, fallback=True) # type: ignore
elif len(args) == 2:
start, stop = args
return self.elements_in(start, stop, 1)
else: # len(args) == 3
return self.elements_in(slice(*args))
def to_tuple(self) -> Tuple[TSource_co, ...]:
return tuple(e for e in self)
|
[
"typing.Deque"
] |
[((24344, 24351), 'typing.Deque', 'Deque', ([], {}), '()\n', (24349, 24351), False, 'from typing import Any, Callable, Container, Deque, Dict, Iterable, Iterator, List, NoReturn, Optional, Reversible, Sequence, Set, Sized, TYPE_CHECKING, Tuple, Type, Generic, Union\n')]
|
"""Example code for the nodes in the example pipeline. This code is meant
just for illustrating basic Kedro features.
Delete this when you start working on your own Kedro project.
"""
# pylint: disable=invalid-name
import logging
from typing import Any, Dict
import numpy as np
import pandas as pd
def train_model(
train_x: pd.DataFrame, train_y: pd.DataFrame, parameters: Dict[str, Any]
) -> np.ndarray:
"""Node for training a simple multi-class logistic regression model. The
number of training iterations as well as the learning rate are taken from
conf/project/parameters.yml. All of the data as well as the parameters
will be provided to this function at the time of execution.
"""
num_iter = parameters["example_num_train_iter"]
lr = parameters["example_learning_rate"]
X = train_x.to_numpy()
Y = train_y.to_numpy()
# Add bias to the features
bias = np.ones((X.shape[0], 1))
X = np.concatenate((bias, X), axis=1)
weights = []
# Train one model for each class in Y
for k in range(Y.shape[1]):
# Initialise weights
theta = np.zeros(X.shape[1])
y = Y[:, k]
for _ in range(num_iter):
z = np.dot(X, theta)
h = _sigmoid(z)
gradient = np.dot(X.T, (h - y)) / y.size
theta -= lr * gradient
# Save the weights for each model
weights.append(theta)
# Return a joint multi-class model with weights for all classes
return np.vstack(weights).transpose()
def predict(model: np.ndarray, test_x: pd.DataFrame) -> np.ndarray:
"""Node for making predictions given a pre-trained model and a test set."""
X = test_x.to_numpy()
# Add bias to the features
bias = np.ones((X.shape[0], 1))
X = np.concatenate((bias, X), axis=1)
# Predict "probabilities" for each class
result = _sigmoid(np.dot(X, model))
# Return the index of the class with max probability for all samples
return np.argmax(result, axis=1)
def report_accuracy(predictions: np.ndarray, test_y: pd.DataFrame) -> None:
"""Node for reporting the accuracy of the predictions performed by the
previous node. Notice that this function has no outputs, except logging.
"""
# Get true class index
target = np.argmax(test_y.to_numpy(), axis=1)
# Calculate accuracy of predictions
accuracy = np.sum(predictions == target) / target.shape[0]
# Log the accuracy of the model
log = logging.getLogger(__name__)
log.info("Model accuracy on test set: %0.2f%%", accuracy * 100)
def _sigmoid(z):
"""A helper sigmoid function used by the training and the scoring nodes."""
return 1 / (1 + np.exp(-z))
|
[
"numpy.sum",
"numpy.concatenate",
"numpy.argmax",
"numpy.zeros",
"numpy.ones",
"numpy.vstack",
"numpy.exp",
"numpy.dot",
"logging.getLogger"
] |
[((910, 934), 'numpy.ones', 'np.ones', (['(X.shape[0], 1)'], {}), '((X.shape[0], 1))\n', (917, 934), True, 'import numpy as np\n'), ((943, 976), 'numpy.concatenate', 'np.concatenate', (['(bias, X)'], {'axis': '(1)'}), '((bias, X), axis=1)\n', (957, 976), True, 'import numpy as np\n'), ((1740, 1764), 'numpy.ones', 'np.ones', (['(X.shape[0], 1)'], {}), '((X.shape[0], 1))\n', (1747, 1764), True, 'import numpy as np\n'), ((1773, 1806), 'numpy.concatenate', 'np.concatenate', (['(bias, X)'], {'axis': '(1)'}), '((bias, X), axis=1)\n', (1787, 1806), True, 'import numpy as np\n'), ((1978, 2003), 'numpy.argmax', 'np.argmax', (['result'], {'axis': '(1)'}), '(result, axis=1)\n', (1987, 2003), True, 'import numpy as np\n'), ((2468, 2495), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2485, 2495), False, 'import logging\n'), ((1114, 1134), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (1122, 1134), True, 'import numpy as np\n'), ((1875, 1891), 'numpy.dot', 'np.dot', (['X', 'model'], {}), '(X, model)\n', (1881, 1891), True, 'import numpy as np\n'), ((2374, 2403), 'numpy.sum', 'np.sum', (['(predictions == target)'], {}), '(predictions == target)\n', (2380, 2403), True, 'import numpy as np\n'), ((1205, 1221), 'numpy.dot', 'np.dot', (['X', 'theta'], {}), '(X, theta)\n', (1211, 1221), True, 'import numpy as np\n'), ((1490, 1508), 'numpy.vstack', 'np.vstack', (['weights'], {}), '(weights)\n', (1499, 1508), True, 'import numpy as np\n'), ((2683, 2693), 'numpy.exp', 'np.exp', (['(-z)'], {}), '(-z)\n', (2689, 2693), True, 'import numpy as np\n'), ((1273, 1291), 'numpy.dot', 'np.dot', (['X.T', '(h - y)'], {}), '(X.T, h - y)\n', (1279, 1291), True, 'import numpy as np\n')]
|
from django.contrib.admin import ModelAdmin, site
from django.utils.translation import gettext_lazy as _
from respa_o365.models import OutlookCalendarLink
class OutlookCalendarLinkAdmin(ModelAdmin):
list_display = ('resource', 'user')
search_fields = ('resource', 'user')
fields = ('resource', 'user', 'reservation_calendar_id', 'availability_calendar_id')
readonly_fields = ('resource', 'reservation_calendar_id', 'availability_calendar_id')
def get_form(self, request, obj=None, **kwargs): # pragma: no cover
form = super(OutlookCalendarLinkAdmin, self).get_form(request, obj, **kwargs)
return form
class Meta:
verbose_name = _("O365 Calendar Link")
verbose_name_plural = _("O365 Calendar Links")
site.register(OutlookCalendarLink, OutlookCalendarLinkAdmin)
|
[
"django.contrib.admin.site.register",
"django.utils.translation.gettext_lazy"
] |
[((768, 828), 'django.contrib.admin.site.register', 'site.register', (['OutlookCalendarLink', 'OutlookCalendarLinkAdmin'], {}), '(OutlookCalendarLink, OutlookCalendarLinkAdmin)\n', (781, 828), False, 'from django.contrib.admin import ModelAdmin, site\n'), ((688, 711), 'django.utils.translation.gettext_lazy', '_', (['"""O365 Calendar Link"""'], {}), "('O365 Calendar Link')\n", (689, 711), True, 'from django.utils.translation import gettext_lazy as _\n'), ((742, 766), 'django.utils.translation.gettext_lazy', '_', (['"""O365 Calendar Links"""'], {}), "('O365 Calendar Links')\n", (743, 766), True, 'from django.utils.translation import gettext_lazy as _\n')]
|
''' Home based pages: home, resume and blog (future implementation) '''
from django.shortcuts import render
from django.views.generic import TemplateView
import helpers.import_common_class.paragraph_helpers as para_helper
from common_classes.paragraphs_for_display_cat import ParagraphsForDisplayCat
def home(request):
return render(request, 'home/home.html')
class ResumeView(TemplateView):
''' This will be a basic category page (like the exercise page or blogs) '''
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
self.template_name = 'category.html'
context = self._add_to_context(context)
return context
def _add_to_context(self, context):
context['slug'] = 'resume'
context = para_helper.paragraph_view_input(context, False, ParagraphsForDisplayCat)
return context
|
[
"django.shortcuts.render",
"helpers.import_common_class.paragraph_helpers.paragraph_view_input"
] |
[((332, 365), 'django.shortcuts.render', 'render', (['request', '"""home/home.html"""'], {}), "(request, 'home/home.html')\n", (338, 365), False, 'from django.shortcuts import render\n'), ((848, 921), 'helpers.import_common_class.paragraph_helpers.paragraph_view_input', 'para_helper.paragraph_view_input', (['context', '(False)', 'ParagraphsForDisplayCat'], {}), '(context, False, ParagraphsForDisplayCat)\n', (880, 921), True, 'import helpers.import_common_class.paragraph_helpers as para_helper\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# Docker-Services-CLI is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Module tests."""
import os
import pytest
from docker_services_cli.env import _is_version, _load_or_set_env
def test_is_version():
assert _is_version("10")
assert _is_version("10.1")
assert _is_version("10.1.2")
assert _is_version("10.1.2a3")
assert not _is_version("SERVICE_10_LATEST")
def test_load_or_set_env_default():
"""Tests the loading of a given default value."""
_load_or_set_env("TEST_VERSION_DEFAULT", "1.0.0")
assert os.environ.get("TEST_VERSION_DEFAULT") == "1.0.0"
del os.environ['TEST_VERSION_DEFAULT']
def test_load_or_set_env_from_value():
"""Tests the loading of a set value."""
os.environ["TEST_VERSION_DEFAULT"] = "2.0.0"
_load_or_set_env("TEST_VERSION_DEFAULT", "1.0.0")
assert os.environ.get("TEST_VERSION_DEFAULT") == "2.0.0"
del os.environ['TEST_VERSION_DEFAULT']
def test_load_or_set_env_from_string():
"""Tests the loading of a service default value from string."""
os.environ["TEST_SERVICE_VERSION_DEFAULT"] = "1.0.0"
os.environ["TEST_VERSION_DEFAULT"] = "TEST_SERVICE_VERSION_DEFAULT"
_load_or_set_env("TEST_VERSION_DEFAULT", "2.0.0")
assert os.environ.get("TEST_VERSION_DEFAULT") == "1.0.0"
del os.environ['TEST_SERVICE_VERSION_DEFAULT']
del os.environ['TEST_VERSION_DEFAULT']
def test_setversion_not_set():
"""Tests the loading when it results in a system exit."""
os.environ["TEST_VERSION_DEFAULT"] = "TEST_NOT_EXISTING"
with pytest.raises(SystemExit) as ex:
_load_or_set_env("TEST_VERSION_DEFAULT", "2.0.0")
assert ex.value.code == 1
del os.environ['TEST_VERSION_DEFAULT']
|
[
"os.environ.get",
"pytest.raises",
"docker_services_cli.env._is_version",
"docker_services_cli.env._load_or_set_env"
] |
[((361, 378), 'docker_services_cli.env._is_version', '_is_version', (['"""10"""'], {}), "('10')\n", (372, 378), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((390, 409), 'docker_services_cli.env._is_version', '_is_version', (['"""10.1"""'], {}), "('10.1')\n", (401, 409), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((421, 442), 'docker_services_cli.env._is_version', '_is_version', (['"""10.1.2"""'], {}), "('10.1.2')\n", (432, 442), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((454, 477), 'docker_services_cli.env._is_version', '_is_version', (['"""10.1.2a3"""'], {}), "('10.1.2a3')\n", (465, 477), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((622, 671), 'docker_services_cli.env._load_or_set_env', '_load_or_set_env', (['"""TEST_VERSION_DEFAULT"""', '"""1.0.0"""'], {}), "('TEST_VERSION_DEFAULT', '1.0.0')\n", (638, 671), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((916, 965), 'docker_services_cli.env._load_or_set_env', '_load_or_set_env', (['"""TEST_VERSION_DEFAULT"""', '"""1.0.0"""'], {}), "('TEST_VERSION_DEFAULT', '1.0.0')\n", (932, 965), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((1314, 1363), 'docker_services_cli.env._load_or_set_env', '_load_or_set_env', (['"""TEST_VERSION_DEFAULT"""', '"""2.0.0"""'], {}), "('TEST_VERSION_DEFAULT', '2.0.0')\n", (1330, 1363), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((493, 525), 'docker_services_cli.env._is_version', '_is_version', (['"""SERVICE_10_LATEST"""'], {}), "('SERVICE_10_LATEST')\n", (504, 525), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n'), ((684, 722), 'os.environ.get', 'os.environ.get', (['"""TEST_VERSION_DEFAULT"""'], {}), "('TEST_VERSION_DEFAULT')\n", (698, 722), False, 'import os\n'), ((978, 1016), 'os.environ.get', 'os.environ.get', (['"""TEST_VERSION_DEFAULT"""'], {}), "('TEST_VERSION_DEFAULT')\n", (992, 1016), False, 'import os\n'), ((1376, 1414), 'os.environ.get', 'os.environ.get', (['"""TEST_VERSION_DEFAULT"""'], {}), "('TEST_VERSION_DEFAULT')\n", (1390, 1414), False, 'import os\n'), ((1686, 1711), 'pytest.raises', 'pytest.raises', (['SystemExit'], {}), '(SystemExit)\n', (1699, 1711), False, 'import pytest\n'), ((1727, 1776), 'docker_services_cli.env._load_or_set_env', '_load_or_set_env', (['"""TEST_VERSION_DEFAULT"""', '"""2.0.0"""'], {}), "('TEST_VERSION_DEFAULT', '2.0.0')\n", (1743, 1776), False, 'from docker_services_cli.env import _is_version, _load_or_set_env\n')]
|
from __future__ import division, absolute_import, print_function
import unittest
import numpy.testing as testing
import numpy as np
import healpy as hp
import healsparse
class UpdateValuesTestCase(unittest.TestCase):
def test_update_values_inorder(self):
"""
Test doing update_values, in coarse pixel order.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
nfine_per_cov = 2**sparse_map._cov_map.bit_shift
test_pix = np.arange(nfine_per_cov) + nfine_per_cov * 10
test_values = np.zeros(nfine_per_cov)
sparse_map.update_values_pix(test_pix, test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(valid_pixels, test_pix)
test_pix2 = np.arange(nfine_per_cov) + nfine_per_cov * 16
test_values2 = np.zeros(nfine_per_cov) + 100
sparse_map.update_values_pix(test_pix2, test_values2)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix2), test_values2)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(np.sort(valid_pixels), np.sort(np.concatenate((test_pix, test_pix2))))
def test_update_values_outoforder(self):
"""
Test doing updateValues, out of order.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
nfine_per_cov = 2**sparse_map._cov_map.bit_shift
test_pix = np.arange(nfine_per_cov) + nfine_per_cov * 16
test_values = np.zeros(nfine_per_cov)
sparse_map.update_values_pix(test_pix, test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(valid_pixels, test_pix)
test_pix2 = np.arange(nfine_per_cov) + nfine_per_cov * 10
test_values2 = np.zeros(nfine_per_cov) + 100
sparse_map.update_values_pix(test_pix2, test_values2)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix2), test_values2)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(np.sort(valid_pixels), np.sort(np.concatenate((test_pix, test_pix2))))
def test_update_values_nonunique(self):
"""
Test doing update_values with non-unique pixels.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixels = np.array([0, 1, 5, 10, 0])
self.assertRaises(ValueError, sparse_map.update_values_pix, pixels, 0.0)
self.assertRaises(ValueError, sparse_map.__setitem__, pixels, 0.0)
def test_update_values_or(self):
"""
Test doing update_values with or operation.
"""
nside_coverage = 32
nside_map = 64
dtype = np.int32
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, sentinel=0)
# Check with new unique pixels
pixels = np.arange(4)
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels], values)
# Check with pre-existing unique pixels
values2 = np.array([2**1, 2**2, 2**3, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values2, operation='or')
testing.assert_array_equal(sparse_map[pixels],
values | values2)
# Check with new non-unique pixels
pixels = np.array([100, 101, 102, 100])
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels],
np.array([2**0 | 2**4, 2**1, 2**2, 2**0 | 2**4]))
# Check with pre-existing non-unique pixels
values = np.array([2**1, 2**2, 2**3, 2**5], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels],
np.array([2**0 | 2**4 | 2**1 | 2**5,
2**1 | 2**2,
2**2 | 2**3,
2**0 | 2**4 | 2**1 | 2**5]))
def test_update_values_and(self):
"""
Test doing update_values with and operation.
"""
nside_coverage = 32
nside_map = 64
dtype = np.int32
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, sentinel=0)
# Check with new unique pixels
pixels = np.arange(4)
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values*0)
# Check with pre-existing unique pixels
sparse_map[pixels] = values
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values)
# Check with new non-unique pixels
pixels = np.array([100, 101, 102, 100])
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values*0)
# Check with pre-existing non-unique pixels
sparse_map[100] = 2**0 | 2**4
sparse_map[101] = 2**1
sparse_map[102] = 2**2
sparse_map.update_values_pix(pixels, values, operation='and')
# The first and last will be 0 because we get anded sequentially.
testing.assert_array_equal(sparse_map[pixels],
[0, 2**1, 2**2, 0])
def test_update_values_pos(self):
"""
Test doing update_values with positions (unique and non-unique).
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixels = np.array([0, 1, 5, 10, 20])
ra, dec = hp.pix2ang(nside_map, pixels, lonlat=True, nest=True)
sparse_map.update_values_pos(ra, dec, 0.0)
testing.assert_array_almost_equal(sparse_map[pixels], 0.0)
# Test non-unique raise
pixels = np.array([0, 1, 5, 10, 0])
ra, dec = hp.pix2ang(nside_map, pixels, lonlat=True, nest=True)
self.assertRaises(ValueError, sparse_map.update_values_pos, ra, dec, 0.0)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"healpy.pix2ang",
"numpy.testing.assert_array_almost_equal",
"numpy.testing.assert_array_equal",
"numpy.zeros",
"numpy.sort",
"numpy.array",
"numpy.arange",
"numpy.testing.assert_equal",
"healsparse.HealSparseMap.make_empty",
"numpy.concatenate"
] |
[((7166, 7181), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7179, 7181), False, 'import unittest\n'), ((443, 512), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {}), '(nside_coverage, nside_map, dtype)\n', (478, 512), False, 'import healsparse\n'), ((659, 682), 'numpy.zeros', 'np.zeros', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (667, 682), True, 'import numpy as np\n'), ((886, 930), 'numpy.testing.assert_equal', 'testing.assert_equal', (['valid_pixels', 'test_pix'], {}), '(valid_pixels, test_pix)\n', (906, 930), True, 'import numpy.testing as testing\n'), ((1654, 1723), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {}), '(nside_coverage, nside_map, dtype)\n', (1689, 1723), False, 'import healsparse\n'), ((1870, 1893), 'numpy.zeros', 'np.zeros', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (1878, 1893), True, 'import numpy as np\n'), ((2097, 2141), 'numpy.testing.assert_equal', 'testing.assert_equal', (['valid_pixels', 'test_pix'], {}), '(valid_pixels, test_pix)\n', (2117, 2141), True, 'import numpy.testing as testing\n'), ((2873, 2942), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {}), '(nside_coverage, nside_map, dtype)\n', (2908, 2942), False, 'import healsparse\n'), ((2961, 2987), 'numpy.array', 'np.array', (['[0, 1, 5, 10, 0]'], {}), '([0, 1, 5, 10, 0])\n', (2969, 2987), True, 'import numpy as np\n'), ((3357, 3442), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {'sentinel': '(0)'}), '(nside_coverage, nside_map, dtype,\n sentinel=0)\n', (3392, 3442), False, 'import healsparse\n'), ((3496, 3508), 'numpy.arange', 'np.arange', (['(4)'], {}), '(4)\n', (3505, 3508), True, 'import numpy as np\n'), ((3526, 3581), 'numpy.array', 'np.array', (['[2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4]'], {'dtype': 'dtype'}), '([2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4], dtype=dtype)\n', (3534, 3581), True, 'import numpy as np\n'), ((3652, 3706), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', 'values'], {}), '(sparse_map[pixels], values)\n', (3678, 3706), True, 'import numpy.testing as testing\n'), ((3774, 3829), 'numpy.array', 'np.array', (['[2 ** 1, 2 ** 2, 2 ** 3, 2 ** 4]'], {'dtype': 'dtype'}), '([2 ** 1, 2 ** 2, 2 ** 3, 2 ** 4], dtype=dtype)\n', (3782, 3829), True, 'import numpy as np\n'), ((3901, 3965), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', '(values | values2)'], {}), '(sparse_map[pixels], values | values2)\n', (3927, 3965), True, 'import numpy.testing as testing\n'), ((4062, 4092), 'numpy.array', 'np.array', (['[100, 101, 102, 100]'], {}), '([100, 101, 102, 100])\n', (4070, 4092), True, 'import numpy as np\n'), ((4110, 4165), 'numpy.array', 'np.array', (['[2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4]'], {'dtype': 'dtype'}), '([2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4], dtype=dtype)\n', (4118, 4165), True, 'import numpy as np\n'), ((4438, 4493), 'numpy.array', 'np.array', (['[2 ** 1, 2 ** 2, 2 ** 3, 2 ** 5]'], {'dtype': 'dtype'}), '([2 ** 1, 2 ** 2, 2 ** 3, 2 ** 5], dtype=dtype)\n', (4446, 4493), True, 'import numpy as np\n'), ((5087, 5172), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {'sentinel': '(0)'}), '(nside_coverage, nside_map, dtype,\n sentinel=0)\n', (5122, 5172), False, 'import healsparse\n'), ((5226, 5238), 'numpy.arange', 'np.arange', (['(4)'], {}), '(4)\n', (5235, 5238), True, 'import numpy as np\n'), ((5256, 5311), 'numpy.array', 'np.array', (['[2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4]'], {'dtype': 'dtype'}), '([2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4], dtype=dtype)\n', (5264, 5311), True, 'import numpy as np\n'), ((5383, 5441), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', '(values * 0)'], {}), '(sparse_map[pixels], values * 0)\n', (5409, 5441), True, 'import numpy.testing as testing\n'), ((5603, 5657), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', 'values'], {}), '(sparse_map[pixels], values)\n', (5629, 5657), True, 'import numpy.testing as testing\n'), ((5719, 5749), 'numpy.array', 'np.array', (['[100, 101, 102, 100]'], {}), '([100, 101, 102, 100])\n', (5727, 5749), True, 'import numpy as np\n'), ((5767, 5822), 'numpy.array', 'np.array', (['[2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4]'], {'dtype': 'dtype'}), '([2 ** 0, 2 ** 1, 2 ** 2, 2 ** 4], dtype=dtype)\n', (5775, 5822), True, 'import numpy as np\n'), ((5894, 5952), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', '(values * 0)'], {}), '(sparse_map[pixels], values * 0)\n', (5920, 5952), True, 'import numpy.testing as testing\n'), ((6257, 6327), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['sparse_map[pixels]', '[0, 2 ** 1, 2 ** 2, 0]'], {}), '(sparse_map[pixels], [0, 2 ** 1, 2 ** 2, 0])\n', (6283, 6327), True, 'import numpy.testing as testing\n'), ((6595, 6664), 'healsparse.HealSparseMap.make_empty', 'healsparse.HealSparseMap.make_empty', (['nside_coverage', 'nside_map', 'dtype'], {}), '(nside_coverage, nside_map, dtype)\n', (6630, 6664), False, 'import healsparse\n'), ((6683, 6710), 'numpy.array', 'np.array', (['[0, 1, 5, 10, 20]'], {}), '([0, 1, 5, 10, 20])\n', (6691, 6710), True, 'import numpy as np\n'), ((6729, 6782), 'healpy.pix2ang', 'hp.pix2ang', (['nside_map', 'pixels'], {'lonlat': '(True)', 'nest': '(True)'}), '(nside_map, pixels, lonlat=True, nest=True)\n', (6739, 6782), True, 'import healpy as hp\n'), ((6843, 6901), 'numpy.testing.assert_array_almost_equal', 'testing.assert_array_almost_equal', (['sparse_map[pixels]', '(0.0)'], {}), '(sparse_map[pixels], 0.0)\n', (6876, 6901), True, 'import numpy.testing as testing\n'), ((6952, 6978), 'numpy.array', 'np.array', (['[0, 1, 5, 10, 0]'], {}), '([0, 1, 5, 10, 0])\n', (6960, 6978), True, 'import numpy as np\n'), ((6997, 7050), 'healpy.pix2ang', 'hp.pix2ang', (['nside_map', 'pixels'], {'lonlat': '(True)', 'nest': '(True)'}), '(nside_map, pixels, lonlat=True, nest=True)\n', (7007, 7050), True, 'import healpy as hp\n'), ((591, 615), 'numpy.arange', 'np.arange', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (600, 615), True, 'import numpy as np\n'), ((952, 976), 'numpy.arange', 'np.arange', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (961, 976), True, 'import numpy as np\n'), ((1021, 1044), 'numpy.zeros', 'np.zeros', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (1029, 1044), True, 'import numpy as np\n'), ((1365, 1386), 'numpy.sort', 'np.sort', (['valid_pixels'], {}), '(valid_pixels)\n', (1372, 1386), True, 'import numpy as np\n'), ((1802, 1826), 'numpy.arange', 'np.arange', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (1811, 1826), True, 'import numpy as np\n'), ((2163, 2187), 'numpy.arange', 'np.arange', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (2172, 2187), True, 'import numpy as np\n'), ((2232, 2255), 'numpy.zeros', 'np.zeros', (['nfine_per_cov'], {}), '(nfine_per_cov)\n', (2240, 2255), True, 'import numpy as np\n'), ((2576, 2597), 'numpy.sort', 'np.sort', (['valid_pixels'], {}), '(valid_pixels)\n', (2583, 2597), True, 'import numpy as np\n'), ((4318, 4378), 'numpy.array', 'np.array', (['[2 ** 0 | 2 ** 4, 2 ** 1, 2 ** 2, 2 ** 0 | 2 ** 4]'], {}), '([2 ** 0 | 2 ** 4, 2 ** 1, 2 ** 2, 2 ** 0 | 2 ** 4])\n', (4326, 4378), True, 'import numpy as np\n'), ((4646, 4764), 'numpy.array', 'np.array', (['[2 ** 0 | 2 ** 4 | 2 ** 1 | 2 ** 5, 2 ** 1 | 2 ** 2, 2 ** 2 | 2 ** 3, 2 ** \n 0 | 2 ** 4 | 2 ** 1 | 2 ** 5]'], {}), '([2 ** 0 | 2 ** 4 | 2 ** 1 | 2 ** 5, 2 ** 1 | 2 ** 2, 2 ** 2 | 2 **\n 3, 2 ** 0 | 2 ** 4 | 2 ** 1 | 2 ** 5])\n', (4654, 4764), True, 'import numpy as np\n'), ((1396, 1433), 'numpy.concatenate', 'np.concatenate', (['(test_pix, test_pix2)'], {}), '((test_pix, test_pix2))\n', (1410, 1433), True, 'import numpy as np\n'), ((2607, 2644), 'numpy.concatenate', 'np.concatenate', (['(test_pix, test_pix2)'], {}), '((test_pix, test_pix2))\n', (2621, 2644), True, 'import numpy as np\n')]
|
import os
# 1100533005張庭維
class Ping:
@property
def Description(self):
return('Ping單個ip')
def Run(self):
hostname = input('請輸入目標:')
response = os.system("ping " + hostname)
if response == 0:
print(hostname, 'is up!')
else:
print(hostname, 'is down!')
class Ping2:
@property
def Description(self):
return('查找/24內的主機')
def Run(self):
hostname = input('請輸入目標:')
x = hostname.split(sep=".")
uphost = set()
for i in range(1, 256):
testhostname = x[0]+"."+x[1]+"."+x[2]+"."+str(i)
response = os.system("ping -n 1 -w 1 " + testhostname)
if response == 0:
uphost.add(testhostname)
os.system("cls")
for i in uphost:
print(i, end=' is up.\n')
class Ping3:
@property
def Description(self):
return('以CIDR查找主機')
def Run(self):
hostname = input('請輸入目標(格式example:192.168.136.0/24):')
cidr = hostname.split(sep="/")
number = 2 ** (32-int(cidr[1]))
x = cidr[0].split(sep=".")
uphost = set()
for i in range(1, number):
# 加上去
x[3] = str(int(x[3]) + 1)
# 判斷進位~~~
if int(x[3]) + 1 == 256:
x[3] = "0"
x[2] = str(int(x[2]) + 1)
if int(x[2]) + 1 == 256:
x[2] = "0"
x[1] = str(int(x[1]) + 1)
if int(x[1]) + 1 == 256:
x[1] = "0"
x[0] = str(int(x[0]) + 1)
# 跑起來
testhostname = x[0]+"."+x[1]+"."+x[2]+"."+x[3]
response = os.system("ping -n 1 -w 1 " + testhostname)
if response == 0:
uphost.add(testhostname)
os.system("cls")
for i in uphost:
print(i, end=' is up.\n')
|
[
"os.system"
] |
[((183, 212), 'os.system', 'os.system', (["('ping ' + hostname)"], {}), "('ping ' + hostname)\n", (192, 212), False, 'import os\n'), ((646, 689), 'os.system', 'os.system', (["('ping -n 1 -w 1 ' + testhostname)"], {}), "('ping -n 1 -w 1 ' + testhostname)\n", (655, 689), False, 'import os\n'), ((773, 789), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (782, 789), False, 'import os\n'), ((1691, 1734), 'os.system', 'os.system', (["('ping -n 1 -w 1 ' + testhostname)"], {}), "('ping -n 1 -w 1 ' + testhostname)\n", (1700, 1734), False, 'import os\n'), ((1818, 1834), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (1827, 1834), False, 'import os\n')]
|
from conans import ConanFile, CMake, tools
class ScLogger(ConanFile):
name = "sc_logger"
version = "1.0.4"
description = "Self check logger"
author = "BoykoSO <<EMAIL>>"
settings = "os", "compiler", "arch", "build_type"
generators = "cmake"
options = {
"shared": [True, False],
"fPIC": [True, False],
"build_testing": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"build_testing": False,
}
_source_subfolder = "source_subfolder"
def requirements(self):
if self.options.build_testing:
self.requires("gtest/1.8.1@bincrafters/stable")
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["BUILD_TESTING"] = self.options.build_testing
cmake.configure(source_folder = self._source_subfolder)
return cmake
def source(self):
git = tools.Git(folder=self._source_subfolder)
git.clone("https://github.com/tomsksoft-llc/sc-logger")
git.checkout("v%s" % self.version);
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package2(self):
self.copy("*.h", dst="include", src="include")
self.copy("libsc_logger.a", dst="lib")
self.copy("libsc_logger.lib", dst="lib")
self.copy("libsc_logger.so", dst="lib")
self.copy("libsc_logger.dll", dst="lib")
def package_info(self):
self.cpp_info.libs = ["sc_logger"]
def imports(self):
self.copy("FindFilesystem.cmake", dst="cmake/modules", src="cmake/modules")
|
[
"conans.CMake",
"conans.tools.Git"
] |
[((721, 732), 'conans.CMake', 'CMake', (['self'], {}), '(self)\n', (726, 732), False, 'from conans import ConanFile, CMake, tools\n'), ((927, 967), 'conans.tools.Git', 'tools.Git', ([], {'folder': 'self._source_subfolder'}), '(folder=self._source_subfolder)\n', (936, 967), False, 'from conans import ConanFile, CMake, tools\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-03 06:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0004_auto_20160903_0546'),
]
operations = [
migrations.AlterField(
model_name='post',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='posts.Category'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((431, 542), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""posts.Category"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='posts.Category')\n", (448, 542), False, 'from django.db import migrations, models\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class FirstBlock(nn.Module):
def __init__(self, classes = 809):
super(FirstBlock, self).__init__()
self.classes=classes
# classes
self.fc1_c = nn.Linear(classes, 512)
self.fc2_c = nn.Linear(512, 512)
# views
self.fc1_v = nn.Linear(4, 512)
self.fc2_v = nn.Linear(512, 512)
# transforms
self.fc1_t = nn.Linear(12, 512)
self.fc2_t = nn.Linear(512, 512)
def forward(self, x):
c,v,t=torch.split(x,[self.classes,4,12],dim=1)
# process each input separately
c = F.relu(self.fc1_c(c))
c = F.relu(self.fc2_c(c))
v = F.relu(self.fc1_v(v))
v = F.relu(self.fc2_v(v))
t = F.relu(self.fc1_t(t))
t = F.relu(self.fc2_t(t))
# concatenate three tensors
x = torch.cat((c, v, t), dim=1)
return x
class FC5(nn.Module):
def __init__(self):
super(FC5, self).__init__()
self.fc5 = nn.Sequential(nn.Linear(1024, 16384),nn.ReLU(inplace=True))
def forward(self,x):
x=self.fc5(x)
x = x.view(-1, 256, 8, 8)
return x
class Chair(nn.Module):
# paper used a dataset of 809 cleaned up classes
def __init__(self, classes = 809):
super(Chair, self).__init__()
self.layers = []
self.layers.append(FirstBlock(classes))
self.layers.append(nn.Sequential(nn.Linear(1536, 1024),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.Linear(1024, 1024),nn.ReLU(inplace=True)))
self.layers.append(FC5())
# upsample layers
self.layers.append(nn.Sequential(nn.ConvTranspose2d(256, 256, kernel_size=4,stride=2,padding=1),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.Conv2d(256, 256, kernel_size=3, padding=1),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.ConvTranspose2d(256, 92, kernel_size=4, stride=2, padding=1),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.Conv2d(92, 92, kernel_size=3, padding=1),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.ConvTranspose2d(92, 48, kernel_size=4, stride=2, padding=1),nn.ReLU(inplace=True)))
self.layers.append(nn.Sequential(nn.Conv2d(48, 48, kernel_size=3, padding=1),nn.ReLU(inplace=True)))
# upconv4 for generating the target color image
self.layers.append(nn.Sequential(nn.ConvTranspose2d(48, 3, kernel_size=4, stride=2, padding=1),nn.ReLU(inplace=True)))
|
[
"torch.nn.ReLU",
"torch.nn.ConvTranspose2d",
"torch.split",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.Linear"
] |
[((258, 281), 'torch.nn.Linear', 'nn.Linear', (['classes', '(512)'], {}), '(classes, 512)\n', (267, 281), True, 'import torch.nn as nn\n'), ((304, 323), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (313, 323), True, 'import torch.nn as nn\n'), ((365, 382), 'torch.nn.Linear', 'nn.Linear', (['(4)', '(512)'], {}), '(4, 512)\n', (374, 382), True, 'import torch.nn as nn\n'), ((405, 424), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (414, 424), True, 'import torch.nn as nn\n'), ((471, 489), 'torch.nn.Linear', 'nn.Linear', (['(12)', '(512)'], {}), '(12, 512)\n', (480, 489), True, 'import torch.nn as nn\n'), ((512, 531), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (521, 531), True, 'import torch.nn as nn\n'), ((576, 620), 'torch.split', 'torch.split', (['x', '[self.classes, 4, 12]'], {'dim': '(1)'}), '(x, [self.classes, 4, 12], dim=1)\n', (587, 620), False, 'import torch\n'), ((932, 959), 'torch.cat', 'torch.cat', (['(c, v, t)'], {'dim': '(1)'}), '((c, v, t), dim=1)\n', (941, 959), False, 'import torch\n'), ((1101, 1123), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(16384)'], {}), '(1024, 16384)\n', (1110, 1123), True, 'import torch.nn as nn\n'), ((1124, 1145), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1131, 1145), True, 'import torch.nn as nn\n'), ((1540, 1561), 'torch.nn.Linear', 'nn.Linear', (['(1536)', '(1024)'], {}), '(1536, 1024)\n', (1549, 1561), True, 'import torch.nn as nn\n'), ((1562, 1583), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1569, 1583), True, 'import torch.nn as nn\n'), ((1628, 1649), 'torch.nn.Linear', 'nn.Linear', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (1637, 1649), True, 'import torch.nn as nn\n'), ((1650, 1671), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1657, 1671), True, 'import torch.nn as nn\n'), ((1788, 1852), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(256)', '(256)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(256, 256, kernel_size=4, stride=2, padding=1)\n', (1806, 1852), True, 'import torch.nn as nn\n'), ((1851, 1872), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1858, 1872), True, 'import torch.nn as nn\n'), ((1917, 1962), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(256, 256, kernel_size=3, padding=1)\n', (1926, 1962), True, 'import torch.nn as nn\n'), ((1963, 1984), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1970, 1984), True, 'import torch.nn as nn\n'), ((2029, 2092), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(256)', '(92)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(256, 92, kernel_size=4, stride=2, padding=1)\n', (2047, 2092), True, 'import torch.nn as nn\n'), ((2093, 2114), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2100, 2114), True, 'import torch.nn as nn\n'), ((2159, 2202), 'torch.nn.Conv2d', 'nn.Conv2d', (['(92)', '(92)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(92, 92, kernel_size=3, padding=1)\n', (2168, 2202), True, 'import torch.nn as nn\n'), ((2203, 2224), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2210, 2224), True, 'import torch.nn as nn\n'), ((2269, 2331), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(92)', '(48)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(92, 48, kernel_size=4, stride=2, padding=1)\n', (2287, 2331), True, 'import torch.nn as nn\n'), ((2332, 2353), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2339, 2353), True, 'import torch.nn as nn\n'), ((2398, 2441), 'torch.nn.Conv2d', 'nn.Conv2d', (['(48)', '(48)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(48, 48, kernel_size=3, padding=1)\n', (2407, 2441), True, 'import torch.nn as nn\n'), ((2442, 2463), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2449, 2463), True, 'import torch.nn as nn\n'), ((2565, 2626), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(48)', '(3)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(48, 3, kernel_size=4, stride=2, padding=1)\n', (2583, 2626), True, 'import torch.nn as nn\n'), ((2627, 2648), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2634, 2648), True, 'import torch.nn as nn\n')]
|
# -*- coding: utf-8 -*-
# Copyright (C) 2019 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Unit test suite for util url_parser."""
import unittest
import ddt
from ggrc.utils import url_parser
@ddt.ddt
class TestUrlParser(unittest.TestCase):
"""Unittests for user generator module."""
@ddt.data(
[
"https://www.google.com/",
'<a href="https://www.google.com/">https://www.google.com/</a>'
],
[
"http://www.google.com/",
'<a href="http://www.google.com/">http://www.google.com/</a>'
],
[
"http://www.google.com",
'<a href="http://www.google.com">http://www.google.com</a>'
],
[
u"http://www.тест.com",
u'<a href="http://www.тест.com">http://www.тест.com</a>'
],
)
@ddt.unpack
def test_wrap_raw_url(self, test_data, expected_result):
"""Url parser should wrap urls (http or hhtps)."""
self.assertEqual(url_parser.parse(test_data), expected_result)
@ddt.data('<a href="https://www.google.com/">https://www.google.com/</a>',
'<a href="http://www.google.com/">http://www.google.com/</a>')
def test_not_wraps_links(self, data):
"""Url parser should not change wrapped urls."""
self.assertEqual(url_parser.parse(data), data)
@ddt.data([
('test <a href="https://www.google.com/">'
'https://www.google.com/</a> link http://www.google.com/'),
('test <a href="https://www.google.com/">'
'https://www.google.com/</a> link '
'<a href="http://www.google.com/">http://www.google.com/</a>')
], [
(u'тест <a href="https://www.тест.com/">тест</a> '
u'тест http://тест.com/'),
(u'тест <a href="https://www.тест.com/">тест</a> '
u'тест <a href="http://тест.com/">http://тест.com/</a>')
])
@ddt.unpack
def test_parse_mixed_urls(self, test_data, expected_result):
""" Url parser should parse a string with both
wrapped and not wrapped urls.
"""
self.assertEqual(url_parser.parse(test_data), expected_result)
@ddt.data(["<a>https://www.google.com/",
"<a>https://www.google.com/"],
["http://www.google.com/</a>",
('<a href="http://www.google.com/">'
'http://www.google.com/</a></a>')])
@ddt.unpack
def test_parse_broken_tags(self, test_data, expected_result):
"""Url parser should work with invalid tags."""
self.assertEqual(url_parser.parse(test_data), expected_result)
@ddt.data(None, "")
def test_parse_empty_values(self, test_data):
"""Url parser should ignore None values and empty strings."""
self.assertEqual(url_parser.parse(test_data), test_data)
|
[
"ddt.data",
"ggrc.utils.url_parser.parse"
] |
[((344, 740), 'ddt.data', 'ddt.data', (['[\'https://www.google.com/\',\n \'<a href="https://www.google.com/">https://www.google.com/</a>\']', '[\'http://www.google.com/\',\n \'<a href="http://www.google.com/">http://www.google.com/</a>\']', '[\'http://www.google.com\',\n \'<a href="http://www.google.com">http://www.google.com</a>\']', '[u\'http://www.тест.com\',\n u\'<a href="http://www.тест.com">http://www.тест.com</a>\']'], {}), '([\'https://www.google.com/\',\n \'<a href="https://www.google.com/">https://www.google.com/</a>\'], [\n \'http://www.google.com/\',\n \'<a href="http://www.google.com/">http://www.google.com/</a>\'], [\n \'http://www.google.com\',\n \'<a href="http://www.google.com">http://www.google.com</a>\'], [\n u\'http://www.тест.com\',\n u\'<a href="http://www.тест.com">http://www.тест.com</a>\'])\n', (352, 740), False, 'import ddt\n'), ((1050, 1190), 'ddt.data', 'ddt.data', (['"""<a href="https://www.google.com/">https://www.google.com/</a>"""', '"""<a href="http://www.google.com/">http://www.google.com/</a>"""'], {}), '(\'<a href="https://www.google.com/">https://www.google.com/</a>\',\n \'<a href="http://www.google.com/">http://www.google.com/</a>\')\n', (1058, 1190), False, 'import ddt\n'), ((1347, 1800), 'ddt.data', 'ddt.data', (['[\'test <a href="https://www.google.com/">https://www.google.com/</a> link http://www.google.com/\'\n ,\n \'test <a href="https://www.google.com/">https://www.google.com/</a> link <a href="http://www.google.com/">http://www.google.com/</a>\'\n ]', '[u\'тест <a href="https://www.тест.com/">тест</a> тест http://тест.com/\',\n u\'тест <a href="https://www.тест.com/">тест</a> тест <a href="http://тест.com/">http://тест.com/</a>\'\n ]'], {}), '([\n \'test <a href="https://www.google.com/">https://www.google.com/</a> link http://www.google.com/\'\n ,\n \'test <a href="https://www.google.com/">https://www.google.com/</a> link <a href="http://www.google.com/">http://www.google.com/</a>\'\n ], [\n u\'тест <a href="https://www.тест.com/">тест</a> тест http://тест.com/\',\n u\'тест <a href="https://www.тест.com/">тест</a> тест <a href="http://тест.com/">http://тест.com/</a>\'\n ])\n', (1355, 1800), False, 'import ddt\n'), ((2105, 2283), 'ddt.data', 'ddt.data', (["['<a>https://www.google.com/', '<a>https://www.google.com/']", '[\'http://www.google.com/</a>\',\n \'<a href="http://www.google.com/">http://www.google.com/</a></a>\']'], {}), '([\'<a>https://www.google.com/\', \'<a>https://www.google.com/\'], [\n \'http://www.google.com/</a>\',\n \'<a href="http://www.google.com/">http://www.google.com/</a></a>\'])\n', (2113, 2283), False, 'import ddt\n'), ((2533, 2551), 'ddt.data', 'ddt.data', (['None', '""""""'], {}), "(None, '')\n", (2541, 2551), False, 'import ddt\n'), ((1000, 1027), 'ggrc.utils.url_parser.parse', 'url_parser.parse', (['test_data'], {}), '(test_data)\n', (1016, 1027), False, 'from ggrc.utils import url_parser\n'), ((1313, 1335), 'ggrc.utils.url_parser.parse', 'url_parser.parse', (['data'], {}), '(data)\n', (1329, 1335), False, 'from ggrc.utils import url_parser\n'), ((2055, 2082), 'ggrc.utils.url_parser.parse', 'url_parser.parse', (['test_data'], {}), '(test_data)\n', (2071, 2082), False, 'from ggrc.utils import url_parser\n'), ((2483, 2510), 'ggrc.utils.url_parser.parse', 'url_parser.parse', (['test_data'], {}), '(test_data)\n', (2499, 2510), False, 'from ggrc.utils import url_parser\n'), ((2687, 2714), 'ggrc.utils.url_parser.parse', 'url_parser.parse', (['test_data'], {}), '(test_data)\n', (2703, 2714), False, 'from ggrc.utils import url_parser\n')]
|
# -*- coding: utf-8 -*-
import setuptools
try:
import shuup_setup_utils
except ImportError:
shuup_setup_utils = None
if __name__ == "__main__":
setuptools.setup(
cmdclass=(shuup_setup_utils.COMMANDS if shuup_setup_utils else {}),
setup_requires=["setuptools>=34.0", "setuptools-gitver"],
gitver=True,
)
|
[
"setuptools.setup"
] |
[((159, 316), 'setuptools.setup', 'setuptools.setup', ([], {'cmdclass': '(shuup_setup_utils.COMMANDS if shuup_setup_utils else {})', 'setup_requires': "['setuptools>=34.0', 'setuptools-gitver']", 'gitver': '(True)'}), "(cmdclass=shuup_setup_utils.COMMANDS if shuup_setup_utils else\n {}, setup_requires=['setuptools>=34.0', 'setuptools-gitver'], gitver=True)\n", (175, 316), False, 'import setuptools\n')]
|
try:
import tkinter as tk
except ImportError:
import Tkinter as tk
from .canvases import ScrolledCanvas
from .variables import NodeVar
from collections import namedtuple
import math
Element = namedtuple("Element", ["id", "coords"])
BezierElement = namedtuple("BezierElement", ["id", "nodes"])
class NodeView(ScrolledCanvas):
def __init__(self, master=None, **kwargs):
super(NodeView, self).__init__(master, **kwargs)
self.nodes = {}
self.connections = []
self.first_node_of_connection = None
self._bezier_id = 0
def create_node(self, coords, node_name, node_data=None, node_connections=None,
tags=None, width=1.0, state=tk.NORMAL):
if node_data is None:
node_data = {}
if node_connections is None:
node_connections = []
if tags is None:
tags = []
tags.append(node_name)
rectangle_bbox = list(coords) + [60, 30]
text_pos = coords[0] + 5, coords[1] + 5
rectangle_id = self.canv.create_rectangle(rectangle_bbox, tags=tags, width=width,
state=state)
text_id = self.canv.create_text(text_pos, state=state, width=width)
self.nodes[node_name] = {"variable": NodeVar(node_name, node_data, node_connections),
"element_ids": [Element(rectangle_id, rectangle_bbox),
Element(text_id, text_pos)],
"bezier_ids": []}
self.canv.tag_bind(node_name, '<B1-Motion>', lambda evt, n=node_name: self._on_node_drag(evt, n))
self.canv.tag_bind(node_name, '<Button-1>', lambda evt, n=node_name: self._create_connection(evt, n))
return node_name
def _on_node_drag(self, event, node):
for e in self.nodes[node]["element_ids"]:
eid = e.id
coords = e.coords
coords[0] = coords[0] + event.x
coords[1] = coords[1] + event.y
if len(coords) == 2:
self.canv.itemconfig(eid, coords=coords)
elif len(coords) == 4:
self.canv.itemconfig(eid, bbox=coords)
def _create_connection(self, event, node):
if self.first_node_of_connection is None:
self.first_node_of_connection = (node, (event.x, event.y))
else:
cp1 = self.first_node_of_connection[1]
cp4 = (event.x, event.y)
x_avg = (cp4[0] - cp1[0]) / 2
cp2 = (x_avg, cp1[1])
cp3 = (x_avg, cp4[1])
bezier_id = self.create_bezier([cp1, cp2, cp3, cp4])
node_a = self.first_node_of_connection[0]
node_b = node
self.nodes[node_a]["bezier_ids"].append(BezierElement(bezier_id, [node_a, node_b]))
self.nodes[node_a]["variable"].connect(self.nodes[node]["variable"])
self.first_node_of_connection = None
def create_bezier(self, control_points):
def bernstein(idx, degree):
m = degree
binom = math.comb(m, idx)
return lambda u: binom * u ** idx * (1 - u) ** (m - idx)
def bezier(cpts, u):
rv = [0, 0]
for idx, cp in enumerate(cpts):
rv[0] += bernstein(idx, len(cpts))(u) * cp[0]
rv[1] += bernstein(idx, len(cpts))(u) * cp[1]
return rv
tag_name = f"bezier{self._bezier_id}"
self._bezier_id += 1
# Start x and y coordinates, when t = 0
x_start = control_points[0][0]
y_start = control_points[0][1]
# loops through
n = 50
for i in range(n):
t = i / n
x, y = bezier(control_points, t)
self.canv.create_line(x, y, x_start, y_start, tags=[tag_name])
# updates initial values
x_start = x
y_start = y
return tag_name
|
[
"math.comb",
"collections.namedtuple"
] |
[((202, 241), 'collections.namedtuple', 'namedtuple', (['"""Element"""', "['id', 'coords']"], {}), "('Element', ['id', 'coords'])\n", (212, 241), False, 'from collections import namedtuple\n'), ((258, 302), 'collections.namedtuple', 'namedtuple', (['"""BezierElement"""', "['id', 'nodes']"], {}), "('BezierElement', ['id', 'nodes'])\n", (268, 302), False, 'from collections import namedtuple\n'), ((3088, 3105), 'math.comb', 'math.comb', (['m', 'idx'], {}), '(m, idx)\n', (3097, 3105), False, 'import math\n')]
|
"""
Allows the player to manage his team
--
Author : DrLarck
Last update : 11/09/19 (DrLarck)
"""
# dependancies
import asyncio
from discord.ext import commands
# utils
from utility.cog.player.player import Player
from utility.command._fighter import Fighter
from utility.cog.character.getter import Character_getter
from utility.command._fighter import Fighter
# graphic
from utility.graphic.embed import Custom_embed
from utility.cog.displayer.icon import Icon_displayer
# check
from utility.command.checker.basic import Basic_checker
# command
class Cmd_fighter(commands.Cog):
def __init__(self, client):
self.client = client
self.getter = Character_getter()
self.icon = Icon_displayer()
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@commands.group()
async def fighter(self, ctx):
"""
The fighter command group
"""
# display the fighter help here
################ FIGHTER ###################
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@fighter.command()
async def remove(self, ctx, slot):
"""
Allows the player to remove a character from a slot.
"""
# init
player = Player(ctx, self.client, ctx.message.author)
player_team = await player.team.get_team()
getter = Character_getter()
possible_slot = ["a", "b", "c"]
# remove the slot
if slot.lower() in possible_slot:
await player.team.remove(slot)
removed_character = await getter.get_from_unique(self.client, player_team[slot])
await removed_character.init()
await ctx.send(f"<@{player.id}> You have successfully removed {removed_character.image.icon}**{removed_character.info.name}** {removed_character.type.icon}{removed_character.rarity.icon} from the slot **{slot.upper()}**.")
else: # unexisting slot
await ctx.send(f"<@{player.id}> Slot **{slot.upper()}** not found.")
return
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@fighter.command()
async def team(self, ctx):
"""
Displays the player's team.
"""
# init
player = Player(ctx, self.client, ctx.message.author)
player_team = await player.team.get_team()
player_team_info = await player.team.get_info()
embed = await Custom_embed(self.client).setup_embed()
char_a, char_b, char_c = None, None, None
# set the info icon
player_team_info["rarity"] = await self.icon.get_rarity_icon(player_team_info["rarity"])
# set the player team display
### a
if(player_team["a"] == None):
player_team["a"] = "--"
else:
char_a = await self.getter.get_from_unique(self.client, player_team["a"])
char_a = f"`{player_team['a']}` | {char_a.image.icon}{char_a.info.name} {char_a.type.icon}{char_a.rarity.icon} lv.{char_a.level:,}"
### b
if(player_team["b"] == None):
player_team["b"] = "--"
else:
char_b = await self.getter.get_from_unique(self.client, player_team["b"])
char_b = f"`{player_team['b']}` | {char_b.image.icon}{char_b.info.name} {char_b.type.icon}{char_b.rarity.icon} lv.{char_b.level:,}"
### c
if(player_team["c"] == None):
player_team["c"] = "--"
else:
char_c = await self.getter.get_from_unique(self.client, player_team["c"])
char_c = f"`{player_team['c']}` | {char_c.image.icon}{char_c.info.name} {char_c.type.icon}{char_c.rarity.icon} lv.{char_c.level:,}"
# set display
display_infos = f"""
*Average level* : {player_team_info['level']}
*Average rarity* : {player_team_info['rarity']}
"""
display_character = f"""
A : **{char_a}** 👑
B : **{char_b}**
C : **{char_c}**
"""
# set the embed
embed.set_thumbnail(url = player.avatar)
embed.add_field(
name = f"{player.name}'s team",
value = display_infos,
inline = False
)
embed.add_field(
name = "Fighters",
value = display_character,
inline = False
)
await ctx.send(embed = embed)
#################### SET ####################
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@fighter.group(invoke_without_command = True)
async def set(self, cxt):
"""
Allow the player to set a fighter
"""
# display the set help here
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@set.command()
async def a(self, ctx, character_id):
"""
Set the fighter slot a
"""
# init
player = Player(ctx, self.client, ctx.message.author)
fighter = Fighter(ctx, self.client, player)
await fighter.fighter_command("a", character_id)
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@set.command()
async def b(self, ctx, character_id):
"""
Set the fighter slot b
"""
# init
player = Player(ctx, self.client, ctx.message.author)
fighter = Fighter(ctx, self.client, player)
await fighter.fighter_command("b", character_id)
@commands.check(Basic_checker().is_game_ready)
@commands.check(Basic_checker().is_registered)
@set.command()
async def c(self, ctx, character_id):
"""
Set the fighter slot c
"""
# init
player = Player(ctx, self.client, ctx.message.author)
fighter = Fighter(ctx, self.client, player)
await fighter.fighter_command("c", character_id)
def setup(client):
client.add_cog(Cmd_fighter(client))
|
[
"utility.command._fighter.Fighter",
"utility.cog.displayer.icon.Icon_displayer",
"utility.cog.character.getter.Character_getter",
"utility.cog.player.player.Player",
"utility.graphic.embed.Custom_embed",
"utility.command.checker.basic.Basic_checker",
"discord.ext.commands.group"
] |
[((840, 856), 'discord.ext.commands.group', 'commands.group', ([], {}), '()\n', (854, 856), False, 'from discord.ext import commands\n'), ((676, 694), 'utility.cog.character.getter.Character_getter', 'Character_getter', ([], {}), '()\n', (692, 694), False, 'from utility.cog.character.getter import Character_getter\n'), ((715, 731), 'utility.cog.displayer.icon.Icon_displayer', 'Icon_displayer', ([], {}), '()\n', (729, 731), False, 'from utility.cog.displayer.icon import Icon_displayer\n'), ((1322, 1366), 'utility.cog.player.player.Player', 'Player', (['ctx', 'self.client', 'ctx.message.author'], {}), '(ctx, self.client, ctx.message.author)\n', (1328, 1366), False, 'from utility.cog.player.player import Player\n'), ((1435, 1453), 'utility.cog.character.getter.Character_getter', 'Character_getter', ([], {}), '()\n', (1451, 1453), False, 'from utility.cog.character.getter import Character_getter\n'), ((2368, 2412), 'utility.cog.player.player.Player', 'Player', (['ctx', 'self.client', 'ctx.message.author'], {}), '(ctx, self.client, ctx.message.author)\n', (2374, 2412), False, 'from utility.cog.player.player import Player\n'), ((5113, 5157), 'utility.cog.player.player.Player', 'Player', (['ctx', 'self.client', 'ctx.message.author'], {}), '(ctx, self.client, ctx.message.author)\n', (5119, 5157), False, 'from utility.cog.player.player import Player\n'), ((5176, 5209), 'utility.command._fighter.Fighter', 'Fighter', (['ctx', 'self.client', 'player'], {}), '(ctx, self.client, player)\n', (5183, 5209), False, 'from utility.command._fighter import Fighter\n'), ((5524, 5568), 'utility.cog.player.player.Player', 'Player', (['ctx', 'self.client', 'ctx.message.author'], {}), '(ctx, self.client, ctx.message.author)\n', (5530, 5568), False, 'from utility.cog.player.player import Player\n'), ((5587, 5620), 'utility.command._fighter.Fighter', 'Fighter', (['ctx', 'self.client', 'player'], {}), '(ctx, self.client, player)\n', (5594, 5620), False, 'from utility.command._fighter import Fighter\n'), ((5935, 5979), 'utility.cog.player.player.Player', 'Player', (['ctx', 'self.client', 'ctx.message.author'], {}), '(ctx, self.client, ctx.message.author)\n', (5941, 5979), False, 'from utility.cog.player.player import Player\n'), ((5998, 6031), 'utility.command._fighter.Fighter', 'Fighter', (['ctx', 'self.client', 'player'], {}), '(ctx, self.client, player)\n', (6005, 6031), False, 'from utility.command._fighter import Fighter\n'), ((753, 768), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (766, 768), False, 'from utility.command.checker.basic import Basic_checker\n'), ((804, 819), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (817, 819), False, 'from utility.command.checker.basic import Basic_checker\n'), ((1060, 1075), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (1073, 1075), False, 'from utility.command.checker.basic import Basic_checker\n'), ((1111, 1126), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (1124, 1126), False, 'from utility.command.checker.basic import Basic_checker\n'), ((2139, 2154), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (2152, 2154), False, 'from utility.command.checker.basic import Basic_checker\n'), ((2190, 2205), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (2203, 2205), False, 'from utility.command.checker.basic import Basic_checker\n'), ((4596, 4611), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (4609, 4611), False, 'from utility.command.checker.basic import Basic_checker\n'), ((4647, 4662), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (4660, 4662), False, 'from utility.command.checker.basic import Basic_checker\n'), ((4882, 4897), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (4895, 4897), False, 'from utility.command.checker.basic import Basic_checker\n'), ((4933, 4948), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (4946, 4948), False, 'from utility.command.checker.basic import Basic_checker\n'), ((5293, 5308), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (5306, 5308), False, 'from utility.command.checker.basic import Basic_checker\n'), ((5344, 5359), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (5357, 5359), False, 'from utility.command.checker.basic import Basic_checker\n'), ((5704, 5719), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (5717, 5719), False, 'from utility.command.checker.basic import Basic_checker\n'), ((5755, 5770), 'utility.command.checker.basic.Basic_checker', 'Basic_checker', ([], {}), '()\n', (5768, 5770), False, 'from utility.command.checker.basic import Basic_checker\n'), ((2542, 2567), 'utility.graphic.embed.Custom_embed', 'Custom_embed', (['self.client'], {}), '(self.client)\n', (2554, 2567), False, 'from utility.graphic.embed import Custom_embed\n')]
|
from pathlib import Path
import numpy as np
import pytest
from npe2 import DynamicPlugin
from npe2.manifest.contributions import SampleDataURI
import napari
from napari.layers._source import Source
from napari.viewer import ViewerModel
def test_sample_hook(builtins, tmp_plugin: DynamicPlugin):
viewer = ViewerModel()
NAME = tmp_plugin.name
KEY = 'random data'
with pytest.raises(KeyError, match=f"Plugin {NAME!r} does not provide"):
viewer.open_sample(NAME, KEY)
@tmp_plugin.contribute.sample_data(key=KEY)
def _generate_random_data(shape=(512, 512)):
data = np.random.rand(*shape)
return [(data, {'name': KEY})]
LOGO = str(Path(napari.__file__).parent / 'resources' / 'logo.png')
tmp_plugin.manifest.contributions.sample_data.append(
SampleDataURI(uri=LOGO, key='napari logo', display_name='Napari logo')
)
assert len(viewer.layers) == 0
viewer.open_sample(NAME, KEY)
assert viewer.layers[-1].source == Source(
path=None, reader_plugin=None, sample=(NAME, KEY)
)
assert len(viewer.layers) == 1
viewer.open_sample(NAME, 'napari logo')
assert viewer.layers[-1].source == Source(
path=LOGO, reader_plugin='napari', sample=(NAME, 'napari logo')
)
# test calling with kwargs
viewer.open_sample(NAME, KEY, shape=(256, 256))
assert len(viewer.layers) == 3
assert viewer.layers[-1].source == Source(sample=(NAME, KEY))
|
[
"napari.viewer.ViewerModel",
"npe2.manifest.contributions.SampleDataURI",
"pytest.raises",
"pathlib.Path",
"numpy.random.rand",
"napari.layers._source.Source"
] |
[((313, 326), 'napari.viewer.ViewerModel', 'ViewerModel', ([], {}), '()\n', (324, 326), False, 'from napari.viewer import ViewerModel\n'), ((387, 453), 'pytest.raises', 'pytest.raises', (['KeyError'], {'match': 'f"""Plugin {NAME!r} does not provide"""'}), "(KeyError, match=f'Plugin {NAME!r} does not provide')\n", (400, 453), False, 'import pytest\n'), ((606, 628), 'numpy.random.rand', 'np.random.rand', (['*shape'], {}), '(*shape)\n', (620, 628), True, 'import numpy as np\n'), ((807, 877), 'npe2.manifest.contributions.SampleDataURI', 'SampleDataURI', ([], {'uri': 'LOGO', 'key': '"""napari logo"""', 'display_name': '"""Napari logo"""'}), "(uri=LOGO, key='napari logo', display_name='Napari logo')\n", (820, 877), False, 'from npe2.manifest.contributions import SampleDataURI\n'), ((993, 1050), 'napari.layers._source.Source', 'Source', ([], {'path': 'None', 'reader_plugin': 'None', 'sample': '(NAME, KEY)'}), '(path=None, reader_plugin=None, sample=(NAME, KEY))\n', (999, 1050), False, 'from napari.layers._source import Source\n'), ((1183, 1254), 'napari.layers._source.Source', 'Source', ([], {'path': 'LOGO', 'reader_plugin': '"""napari"""', 'sample': "(NAME, 'napari logo')"}), "(path=LOGO, reader_plugin='napari', sample=(NAME, 'napari logo'))\n", (1189, 1254), False, 'from napari.layers._source import Source\n'), ((1427, 1453), 'napari.layers._source.Source', 'Source', ([], {'sample': '(NAME, KEY)'}), '(sample=(NAME, KEY))\n', (1433, 1453), False, 'from napari.layers._source import Source\n'), ((684, 705), 'pathlib.Path', 'Path', (['napari.__file__'], {}), '(napari.__file__)\n', (688, 705), False, 'from pathlib import Path\n')]
|
# Copyright (c) 2015-2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import collections
import io
import json
import sys
try:
import docker
except ImportError: # pragma: no cover
sys.exit('ERROR: Driver missing, install docker-py.')
from molecule import util
from molecule.driver import basedriver
class DockerDriver(basedriver.BaseDriver):
def __init__(self, molecule):
super(DockerDriver, self).__init__(molecule)
self._docker = docker.Client(
version='auto', **docker.utils.kwargs_from_env())
self._containers = self.molecule.config.config['docker']['containers']
self._provider = self._get_provider()
self._platform = self._get_platform()
self.image_tag = 'molecule_local/{}:{}'
if 'build_image' not in self.molecule.config.config['docker']:
self.molecule.config.config['docker']['build_image'] = True
@property
def name(self):
return 'docker'
@property
def instances(self):
created_containers = self._docker.containers(all=True)
created_container_names = [
container.get('Names')[0][1:].encode('utf-8')
for container in created_containers
]
for container in self._containers:
if container.get('name') in created_container_names:
container['created'] = True
else:
container['created'] = False
return self._containers
@property
def default_provider(self):
return self._provider
@property
def default_platform(self):
return self._platform
@property
def provider(self):
return self._provider
@property
def platform(self):
return self._platform
@platform.setter
def platform(self, val):
self._platform = val
@property
def valid_providers(self):
return [{'name': self.provider}]
@property
def valid_platforms(self):
return [{'name': self.platform}]
@property
def ssh_config_file(self):
return
@property
def ansible_connection_params(self):
return {'user': 'root', 'connection': 'docker'}
@property
def testinfra_args(self):
return {'connection': 'docker'}
@property
def serverspec_args(self):
return {}
def up(self, no_provision=True):
self.molecule.state.change_state('driver', self.name)
if self.molecule.config.config['docker']['build_image']:
self._build_ansible_compatible_image()
else:
self.image_tag = '{}:{}'
for container in self.instances:
privileged = container.get('privileged', False)
port_bindings = container.get('port_bindings', {})
volume_mounts = container.get('volume_mounts', [])
links = container.get('links', {})
cap_add = container.get('cap_add', [])
cap_drop = container.get('cap_drop', [])
command = container.get('command', '')
environment = container.get('environment')
docker_host_config = self._docker.create_host_config(
privileged=privileged,
port_bindings=port_bindings,
binds=volume_mounts,
links=links,
cap_add=cap_add,
cap_drop=cap_drop)
if (container['created'] is not True):
msg = ('Creating container {} '
'with base image {}:{}...').format(
container['name'], container['image'],
container['image_version'])
util.print_warn(msg)
container = self._docker.create_container(
image=self.image_tag.format(container['image'],
container['image_version']),
tty=True,
detach=False,
name=container['name'],
ports=port_bindings.keys(),
host_config=docker_host_config,
environment=environment,
command=command)
self._docker.start(container=container.get('Id'))
container['created'] = True
util.print_success('Container created.')
else:
self._docker.start(container['name'])
msg = 'Starting container {}...'.format(container['name'])
util.print_info(msg)
def destroy(self):
for container in self.instances:
if (container['created']):
msg = 'Stopping container {}...'.format(container['name'])
util.print_warn(msg)
self._docker.stop(container['name'], timeout=0)
self._docker.remove_container(container['name'])
msg = 'Removed container {}.'.format(container['name'])
util.print_success(msg)
container['created'] = False
def status(self):
Status = collections.namedtuple(
'Status', ['name', 'state', 'provider', 'ports'])
status_list = []
for container in self.instances:
name = container.get('name')
try:
d = self._docker.containers(filters={'name': name})[0]
state = d.get('Status')
ports = d.get('Ports')
except IndexError:
state = 'not_created'
ports = []
status_list.append(
Status(
name=name,
state=state,
provider=self.provider,
ports=ports))
return status_list
def conf(self, vm_name=None, ssh_config=False):
pass
def inventory_entry(self, instance):
template = '{} ansible_connection=docker\n'
return template.format(instance['name'])
def login_cmd(self, instance):
return 'docker exec -ti {} bash'
def login_args(self, instance):
return [instance]
def _get_platform(self):
return 'docker'
def _get_provider(self):
return 'docker'
def _build_ansible_compatible_image(self):
available_images = [
tag.encode('utf-8')
for image in self._docker.images()
for tag in image.get('RepoTags', [])
]
for container in self.instances:
if container.get('build_image'):
msg = ('Creating Ansible compatible '
'image of {}:{} ...').format(container['image'],
container['image_version'])
util.print_info(msg)
if 'registry' in container:
container['registry'] += '/'
else:
container['registry'] = ''
dockerfile = '''
FROM {container_image}:{container_version}
{container_environment}
RUN bash -c 'if [ -x "$(command -v apt-get)" ]; then apt-get update && apt-get install -y python sudo; fi'
RUN bash -c 'if [ -x "$(command -v yum)" ]; then yum makecache fast && yum update -y && yum install -y python sudo yum-plugin-ovl && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf; fi'
RUN bash -c 'if [ -x "$(command -v zypper)" ]; then zypper refresh && zypper update -y && zypper install -y python sudo; fi'
''' # noqa
if 'dockerfile' in container:
dockerfile = container['dockerfile']
f = io.open(dockerfile)
else:
environment = container.get('environment')
if environment:
environment = '\n'.join(
'ENV {} {}'.format(k, v)
for k, v in environment.iteritems())
else:
environment = ''
dockerfile = dockerfile.format(
container_image=container['registry'] + container['image'],
container_version=container['image_version'],
container_environment=environment)
f = io.BytesIO(dockerfile.encode('utf-8'))
container['image'] = container['registry'].replace(
'/', '_').replace(':', '_') + container['image']
tag_string = self.image_tag.format(container['image'],
container['image_version'])
errors = False
if tag_string not in available_images or 'dockerfile' in container:
util.print_info('Building ansible compatible image...')
previous_line = ''
for line in self._docker.build(fileobj=f, tag=tag_string):
for line_split in line.split('\n'):
if len(line_split) > 0:
line = json.loads(line_split)
if 'stream' in line:
msg = '\t{}'.format(line['stream'])
util.print_warn(msg)
if 'errorDetail' in line:
ed = line['errorDetail']['message']
msg = '\t{}'.format(ed)
util.print_warn(msg)
errors = True
if 'status' in line:
if previous_line not in line['status']:
msg = '\t{} ...'.format(line['status'])
util.print_warn(msg)
previous_line = line['status']
if errors:
msg = 'Build failed for {}.'.format(tag_string)
util.print_error(msg)
return
else:
util.print_success('Finished building {}.'.format(
tag_string))
|
[
"molecule.util.print_info",
"molecule.util.print_error",
"json.loads",
"molecule.util.print_success",
"docker.utils.kwargs_from_env",
"collections.namedtuple",
"io.open",
"sys.exit",
"molecule.util.print_warn"
] |
[((1240, 1293), 'sys.exit', 'sys.exit', (['"""ERROR: Driver missing, install docker-py."""'], {}), "('ERROR: Driver missing, install docker-py.')\n", (1248, 1293), False, 'import sys\n'), ((6128, 6200), 'collections.namedtuple', 'collections.namedtuple', (['"""Status"""', "['name', 'state', 'provider', 'ports']"], {}), "('Status', ['name', 'state', 'provider', 'ports'])\n", (6150, 6200), False, 'import collections\n'), ((1560, 1590), 'docker.utils.kwargs_from_env', 'docker.utils.kwargs_from_env', ([], {}), '()\n', (1588, 1590), False, 'import docker\n'), ((4719, 4739), 'molecule.util.print_warn', 'util.print_warn', (['msg'], {}), '(msg)\n', (4734, 4739), False, 'from molecule import util\n'), ((5361, 5401), 'molecule.util.print_success', 'util.print_success', (['"""Container created."""'], {}), "('Container created.')\n", (5379, 5401), False, 'from molecule import util\n'), ((5565, 5585), 'molecule.util.print_info', 'util.print_info', (['msg'], {}), '(msg)\n', (5580, 5585), False, 'from molecule import util\n'), ((5781, 5801), 'molecule.util.print_warn', 'util.print_warn', (['msg'], {}), '(msg)\n', (5796, 5801), False, 'from molecule import util\n'), ((6019, 6042), 'molecule.util.print_success', 'util.print_success', (['msg'], {}), '(msg)\n', (6037, 6042), False, 'from molecule import util\n'), ((7792, 7812), 'molecule.util.print_info', 'util.print_info', (['msg'], {}), '(msg)\n', (7807, 7812), False, 'from molecule import util\n'), ((8675, 8694), 'io.open', 'io.open', (['dockerfile'], {}), '(dockerfile)\n', (8682, 8694), False, 'import io\n'), ((9735, 9790), 'molecule.util.print_info', 'util.print_info', (['"""Building ansible compatible image..."""'], {}), "('Building ansible compatible image...')\n", (9750, 9790), False, 'from molecule import util\n'), ((10943, 10964), 'molecule.util.print_error', 'util.print_error', (['msg'], {}), '(msg)\n', (10959, 10964), False, 'from molecule import util\n'), ((10040, 10062), 'json.loads', 'json.loads', (['line_split'], {}), '(line_split)\n', (10050, 10062), False, 'import json\n'), ((10212, 10232), 'molecule.util.print_warn', 'util.print_warn', (['msg'], {}), '(msg)\n', (10227, 10232), False, 'from molecule import util\n'), ((10443, 10463), 'molecule.util.print_warn', 'util.print_warn', (['msg'], {}), '(msg)\n', (10458, 10463), False, 'from molecule import util\n'), ((10743, 10763), 'molecule.util.print_warn', 'util.print_warn', (['msg'], {}), '(msg)\n', (10758, 10763), False, 'from molecule import util\n')]
|
def example(Simulator):
import numpy as np
from csdl import Model
import csdl
class ExampleReorderMatrixSparse(Model):
def define(self):
shape2 = (5, 4)
b = np.arange(20).reshape(shape2)
mat = self.declare_variable('b', val=b)
self.register_output(
'einsum_reorder1_sparse_derivs',
csdl.einsum(
mat,
subscripts='ij->ji',
partial_format='sparse',
))
sim = Simulator(ExampleReorderMatrixSparse())
sim.run()
print('b', sim['b'].shape)
print(sim['b'])
print('einsum_reorder1_sparse_derivs', sim['einsum_reorder1_sparse_derivs'].shape)
print(sim['einsum_reorder1_sparse_derivs'])
return sim
|
[
"csdl.einsum",
"numpy.arange"
] |
[((406, 468), 'csdl.einsum', 'csdl.einsum', (['mat'], {'subscripts': '"""ij->ji"""', 'partial_format': '"""sparse"""'}), "(mat, subscripts='ij->ji', partial_format='sparse')\n", (417, 468), False, 'import csdl\n'), ((220, 233), 'numpy.arange', 'np.arange', (['(20)'], {}), '(20)\n', (229, 233), True, 'import numpy as np\n')]
|
import argparse
from dataloader import picked_train_test_data_loader
from sklearn import preprocessing
from classifier import train_best
import numpy
from bert_serving.client import BertClient
bc = BertClient()
def train_test(pickled_train_path, pickled_test_path):
train, test = picked_train_test_data_loader(pickled_train_path, pickled_test_path)
def vectorize_dataset(data):
X = []
Y = []
sentences = []
for row in data:
sentences.append(" ".join(row[3]))
Y.append(row[0])
if len(sentences)%20 == 0:
X.extend([e for e in bc.encode(sentences)])
sentences = []
if len(sentences) != 0:
X.extend([e for e in bc.encode(sentences)])
return numpy.vstack(X), Y
X_train, Y_train = vectorize_dataset(train)
X_test, Y_test = vectorize_dataset(test)
X_train = numpy.asarray(X_train)
X_test = numpy.asarray(X_test)
le = preprocessing.LabelEncoder()
le.fit(Y_train)
Y_train = le.transform(Y_train)
Y_test = le.transform(Y_test)
print ("Length of vector: %s"%X_train.shape[1])
return train_best(X_train, Y_train, X_test, Y_test)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Evaluate ELMo based sentence embedding')
parser.add_argument("pickled_training_data_path", help="pickled train path")
parser.add_argument("pickled_test_data_path", help="pickled test path")
args = parser.parse_args()
pickled_training_data_path = args.pickled_training_data_path
pickled_test_data_path = args.pickled_test_data_path
results = train_test(pickled_training_data_path, pickled_test_data_path)
results = results.split("\n")[-2]
print (results)
|
[
"argparse.ArgumentParser",
"numpy.asarray",
"dataloader.picked_train_test_data_loader",
"sklearn.preprocessing.LabelEncoder",
"classifier.train_best",
"bert_serving.client.BertClient",
"numpy.vstack"
] |
[((200, 212), 'bert_serving.client.BertClient', 'BertClient', ([], {}), '()\n', (210, 212), False, 'from bert_serving.client import BertClient\n'), ((287, 355), 'dataloader.picked_train_test_data_loader', 'picked_train_test_data_loader', (['pickled_train_path', 'pickled_test_path'], {}), '(pickled_train_path, pickled_test_path)\n', (316, 355), False, 'from dataloader import picked_train_test_data_loader\n'), ((842, 864), 'numpy.asarray', 'numpy.asarray', (['X_train'], {}), '(X_train)\n', (855, 864), False, 'import numpy\n'), ((876, 897), 'numpy.asarray', 'numpy.asarray', (['X_test'], {}), '(X_test)\n', (889, 897), False, 'import numpy\n'), ((906, 934), 'sklearn.preprocessing.LabelEncoder', 'preprocessing.LabelEncoder', ([], {}), '()\n', (932, 934), False, 'from sklearn import preprocessing\n'), ((1080, 1124), 'classifier.train_best', 'train_best', (['X_train', 'Y_train', 'X_test', 'Y_test'], {}), '(X_train, Y_train, X_test, Y_test)\n', (1090, 1124), False, 'from classifier import train_best\n'), ((1166, 1243), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Evaluate ELMo based sentence embedding"""'}), "(description='Evaluate ELMo based sentence embedding')\n", (1189, 1243), False, 'import argparse\n'), ((720, 735), 'numpy.vstack', 'numpy.vstack', (['X'], {}), '(X)\n', (732, 735), False, 'import numpy\n')]
|
#!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resolve single.
This resolver simply renders all objects into a single folder.
"""
import logging
import d1_onedrive.impl.resolver.resolver_base
import d1_onedrive.impl.resolver.resource_map
from d1_onedrive.impl import attributes
from d1_onedrive.impl import directory
from d1_onedrive.impl import onedrive_exceptions
from d1_onedrive.impl import util
log = logging.getLogger(__name__)
# log.setLevel(logging.DEBUG)
README_TXT = """All Folder
This folder contains all the items of the object_tree folder (the parent
of this folder) combined into a single folder.
"""
class Resolver(d1_onedrive.impl.resolver.resolver_base.Resolver):
def __init__(self, options, object_tree):
super().__init__(options, object_tree)
self._resource_map_resolver = d1_onedrive.impl.resolver.resource_map.Resolver(
options, object_tree
)
self._readme_txt = util.os_format(README_TXT)
def get_attributes(self, object_tree_root, path):
log.debug("get_attributes: {}".format(util.string_from_path_elements(path)))
if not path:
return attributes.Attributes(is_dir=True)
if self._is_readme_file(path):
return self._get_readme_file_attributes()
return self._resource_map_resolver.get_attributes(object_tree_root, path)
def get_directory(self, object_tree_root, path):
log.debug("get_directory: {}".format(util.string_from_path_elements(path)))
if not path:
return self._get_directory(object_tree_root, path)
return self._resource_map_resolver.get_directory(object_tree_root, path)
def read_file(self, object_tree_root, path, size, offset):
log.debug(
"read_file: {}, {}, {}".format(
util.string_from_path_elements(path), size, offset
)
)
if not path:
raise onedrive_exceptions.PathException("Invalid file")
if self._is_readme_file(path):
return self._get_readme_text(size, offset)
return self._resource_map_resolver.read_file(
object_tree_root, path, size, offset
)
# Private.
def _get_attributes(self, object_tree_root, path):
return attributes.Attributes(0, is_dir=True)
def _get_directory(self, object_tree_root, path):
d = directory.Directory()
d.append(self._get_readme_filename())
d.extend(object_tree_root["items"])
return d
|
[
"d1_onedrive.impl.onedrive_exceptions.PathException",
"d1_onedrive.impl.util.string_from_path_elements",
"d1_onedrive.impl.directory.Directory",
"d1_onedrive.impl.attributes.Attributes",
"d1_onedrive.impl.util.os_format",
"logging.getLogger"
] |
[((1178, 1205), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1195, 1205), False, 'import logging\n'), ((1707, 1733), 'd1_onedrive.impl.util.os_format', 'util.os_format', (['README_TXT'], {}), '(README_TXT)\n', (1721, 1733), False, 'from d1_onedrive.impl import util\n'), ((3028, 3065), 'd1_onedrive.impl.attributes.Attributes', 'attributes.Attributes', (['(0)'], {'is_dir': '(True)'}), '(0, is_dir=True)\n', (3049, 3065), False, 'from d1_onedrive.impl import attributes\n'), ((3133, 3154), 'd1_onedrive.impl.directory.Directory', 'directory.Directory', ([], {}), '()\n', (3152, 3154), False, 'from d1_onedrive.impl import directory\n'), ((1914, 1948), 'd1_onedrive.impl.attributes.Attributes', 'attributes.Attributes', ([], {'is_dir': '(True)'}), '(is_dir=True)\n', (1935, 1948), False, 'from d1_onedrive.impl import attributes\n'), ((2684, 2733), 'd1_onedrive.impl.onedrive_exceptions.PathException', 'onedrive_exceptions.PathException', (['"""Invalid file"""'], {}), "('Invalid file')\n", (2717, 2733), False, 'from d1_onedrive.impl import onedrive_exceptions\n'), ((1835, 1871), 'd1_onedrive.impl.util.string_from_path_elements', 'util.string_from_path_elements', (['path'], {}), '(path)\n', (1865, 1871), False, 'from d1_onedrive.impl import util\n'), ((2223, 2259), 'd1_onedrive.impl.util.string_from_path_elements', 'util.string_from_path_elements', (['path'], {}), '(path)\n', (2253, 2259), False, 'from d1_onedrive.impl import util\n'), ((2570, 2606), 'd1_onedrive.impl.util.string_from_path_elements', 'util.string_from_path_elements', (['path'], {}), '(path)\n', (2600, 2606), False, 'from d1_onedrive.impl import util\n')]
|
from django.contrib.auth import get_user_model
from django.test import SimpleTestCase, TestCase
from django.urls import reverse
class HomePageTests(SimpleTestCase):
def test_home_page_status_code(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_view_url_by_name(self):
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'home.html')
class SignupPageTests(TestCase):
username = 'newuser'
email = '<EMAIL>'
def test_signup_page_status_code(self):
response = self.client.get('/accounts/signup/')
self.assertEqual(response.status_code, 200)
def test_view_url_by_name(self):
response = self.client.get(reverse('signup'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('signup'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/signup.html')
def test_signup_form(self):
new_user = get_user_model().objects.create_user(
self.username, self.email)
self.assertEqual(get_user_model().objects.all().count(), 1)
self.assertEqual(get_user_model().objects.all()
[0].username, self.username)
self.assertEqual(get_user_model().objects.all()
[0].email, self.email)
|
[
"django.urls.reverse",
"django.contrib.auth.get_user_model"
] |
[((375, 390), 'django.urls.reverse', 'reverse', (['"""home"""'], {}), "('home')\n", (382, 390), False, 'from django.urls import reverse\n'), ((527, 542), 'django.urls.reverse', 'reverse', (['"""home"""'], {}), "('home')\n", (534, 542), False, 'from django.urls import reverse\n'), ((960, 977), 'django.urls.reverse', 'reverse', (['"""signup"""'], {}), "('signup')\n", (967, 977), False, 'from django.urls import reverse\n'), ((1114, 1131), 'django.urls.reverse', 'reverse', (['"""signup"""'], {}), "('signup')\n", (1121, 1131), False, 'from django.urls import reverse\n'), ((1307, 1323), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1321, 1323), False, 'from django.contrib.auth import get_user_model\n'), ((1409, 1425), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1423, 1425), False, 'from django.contrib.auth import get_user_model\n'), ((1477, 1493), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1491, 1493), False, 'from django.contrib.auth import get_user_model\n'), ((1587, 1603), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1601, 1603), False, 'from django.contrib.auth import get_user_model\n')]
|
from setuptools import setup, find_packages, Extension
setup(
name="pyfor",
version="0.3.6",
author="<NAME>",
author_email="<EMAIL>",
packages=["pyfor", "pyfortest"],
url="https://github.com/brycefrank/pyfor",
license="LICENSE.txt",
description="Tools for forest resource point cloud analysis.",
install_requires=["laspy", "laxpy", "python-coveralls"], # Dependencies from pip
)
|
[
"setuptools.setup"
] |
[((56, 370), 'setuptools.setup', 'setup', ([], {'name': '"""pyfor"""', 'version': '"""0.3.6"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'packages': "['pyfor', 'pyfortest']", 'url': '"""https://github.com/brycefrank/pyfor"""', 'license': '"""LICENSE.txt"""', 'description': '"""Tools for forest resource point cloud analysis."""', 'install_requires': "['laspy', 'laxpy', 'python-coveralls']"}), "(name='pyfor', version='0.3.6', author='<NAME>', author_email=\n '<EMAIL>', packages=['pyfor', 'pyfortest'], url=\n 'https://github.com/brycefrank/pyfor', license='LICENSE.txt',\n description='Tools for forest resource point cloud analysis.',\n install_requires=['laspy', 'laxpy', 'python-coveralls'])\n", (61, 370), False, 'from setuptools import setup, find_packages, Extension\n')]
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import uuid
from spyne.util import six
from spyne.model._base import SimpleModel
from spyne.model.primitive._base import re_match_with_span
from spyne.model.primitive.string import Unicode, AnyUri
UUID_PATTERN = "%(x)s{8}-%(x)s{4}-%(x)s{4}-%(x)s{4}-%(x)s{12}" % \
{'x': '[a-fA-F0-9]'}
class ImageUri(AnyUri):
"""A special kind of String that holds the uri of an image."""
def _uuid_validate_string(cls, value):
return ( SimpleModel.validate_string(cls, value)
and (value is None or (
cls.Attributes.min_len <= len(value) <= cls.Attributes.max_len
and re_match_with_span(cls.Attributes, value)
)))
def _Tuuid_validate(key):
from uuid import UUID
def _uvalid(cls, v):
try:
UUID(**{key:v})
except ValueError:
return False
return True
return _uvalid
_uuid_validate = {
None: _uuid_validate_string,
'hex': _Tuuid_validate('hex'),
'urn': _Tuuid_validate('urn'),
six.binary_type: _Tuuid_validate('bytes'),
'bytes': _Tuuid_validate('bytes'),
'bytes_le': _Tuuid_validate('bytes_le'),
'fields': _Tuuid_validate('fields'),
int: _Tuuid_validate('int'),
'int': _Tuuid_validate('int'),
}
class Uuid(Unicode(pattern=UUID_PATTERN)):
"""Unicode subclass for Universially-Unique Identifiers."""
__namespace__ = 'http://spyne.io/schema'
__type_name__ = 'uuid'
Value = uuid.UUID
class Attributes(Unicode(pattern=UUID_PATTERN).Attributes):
serialize_as = None
@staticmethod
def validate_string(cls, value):
return _uuid_validate[cls.Attributes.serialize_as](cls, value)
@staticmethod
def validate_native(cls, value):
return SimpleModel.validate_native(cls, value)
|
[
"spyne.model._base.SimpleModel.validate_native",
"spyne.model.primitive.string.Unicode",
"spyne.model._base.SimpleModel.validate_string",
"spyne.model.primitive._base.re_match_with_span",
"uuid.UUID"
] |
[((2082, 2111), 'spyne.model.primitive.string.Unicode', 'Unicode', ([], {'pattern': 'UUID_PATTERN'}), '(pattern=UUID_PATTERN)\n', (2089, 2111), False, 'from spyne.model.primitive.string import Unicode, AnyUri\n'), ((1274, 1313), 'spyne.model._base.SimpleModel.validate_string', 'SimpleModel.validate_string', (['cls', 'value'], {}), '(cls, value)\n', (1301, 1313), False, 'from spyne.model._base import SimpleModel\n'), ((2295, 2324), 'spyne.model.primitive.string.Unicode', 'Unicode', ([], {'pattern': 'UUID_PATTERN'}), '(pattern=UUID_PATTERN)\n', (2302, 2324), False, 'from spyne.model.primitive.string import Unicode, AnyUri\n'), ((2564, 2603), 'spyne.model._base.SimpleModel.validate_native', 'SimpleModel.validate_native', (['cls', 'value'], {}), '(cls, value)\n', (2591, 2603), False, 'from spyne.model._base import SimpleModel\n'), ((1596, 1612), 'uuid.UUID', 'UUID', ([], {}), '(**{key: v})\n', (1600, 1612), False, 'from uuid import UUID\n'), ((1437, 1478), 'spyne.model.primitive._base.re_match_with_span', 're_match_with_span', (['cls.Attributes', 'value'], {}), '(cls.Attributes, value)\n', (1455, 1478), False, 'from spyne.model.primitive._base import re_match_with_span\n')]
|
#!/usr/bin/env python
# Copyright 2016 Toyota Research Institute
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import rospy
import sys
import nose
from nose.tools import assert_equal
from sensor_msgs.msg import BatteryState
from task_behavior_engine.tree import NodeStatus
from task_behavior_engine.tree import Blackboard
from task_behavior_ros import batterystate
class TestChargeCompleteMonitor(object):
def setUp(self):
self.blackboard = Blackboard()
self.charge_monitor = batterystate.ChargeCompleteMonitor(
name="monitor",
topic_name="battery",
latch=True,
blackboard=self.blackboard)
def test_init(self):
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.ACTIVE)
# empty message should fail
msg = BatteryState()
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_charging(self):
msg = BatteryState()
# Charging no max set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_CHARGING
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Charging at max, report SUCCESS
msg.percentage = 0.9
self.blackboard.save("max_charge", 0.9, self.charge_monitor._id)
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Charging but above max, report SUCCESS
msg.percentage = 1.0
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Charging but below max, report ACTIVE
msg.percentage = 0.
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.ACTIVE)
def test_discharging(self):
msg = BatteryState()
# Discharging no max set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_DISCHARGING
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Discharging at max, report FAIL
msg.percentage = 0.95
self.blackboard.save("max_charge", 0.95, self.charge_monitor._id)
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Discharging but above max, report FAIL
msg.percentage = 1.0
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Discharging but below max, report FAIL
msg.percentage = 0.
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_full(self):
msg = BatteryState()
# Full but no max set, report SUCCESS
# If the battery is reporting full, regardless of charge percentage
# report SUCCESS
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_FULL
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Full but at max, report SUCCESS.
msg.percentage = 0.95
self.blackboard.save("max_charge", 0.95, self.charge_monitor._id)
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Full but above max, report SUCCESS
msg.percentage = 1.0
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Full but below max, still report success
msg.percentage = 0.
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
def test_unknown(self):
msg = BatteryState()
# Unknown but no max set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_UNKNOWN
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Unkown at max, report FAIL
self.blackboard.save("max_charge", 0.95, self.charge_monitor._id)
msg.percentage = 0.95
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Unkown but above max, report FAIL
msg.percentage = 1.0
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Unknown but below max, report FAIL
msg.percentage = 0.
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
class TestChargeOKMonitor(object):
def setUp(self):
self.blackboard = Blackboard()
self.charge_monitor = batterystate.ChargeOKMonitor(
name="monitor", topic_name="battery", latch=True, blackboard=self.blackboard)
def test_init(self):
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.ACTIVE)
# empty message should fail
msg = BatteryState()
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_charging(self):
msg = BatteryState()
# Charging but no min set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_CHARGING
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Charging at min, report FAIL
self.blackboard.save("min_charge", 0.5, self.charge_monitor._id)
msg.percentage = 0.5
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Charging but above min, report SUCCESS
msg.percentage = 0.8
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Charging but below min, report FAIL
msg.percentage = .1
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_discharging(self):
msg = BatteryState()
# Dischaging but no min set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_DISCHARGING
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Discharging at min, report FAIL
self.blackboard.save("min_charge", 0.5, self.charge_monitor._id)
msg.percentage = 0.5
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Discharging but above min, report SUCCESS
msg.percentage = 0.8
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Discharging but below min, report FAIL
msg.percentage = .1
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_full(self):
msg = BatteryState()
# Full but no min set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_FULL
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Full but at min, report FAIL.
self.blackboard.save("min_charge", 0.5, self.charge_monitor._id)
msg.percentage = 0.5
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Full but above min, report SUCCESS
self.blackboard.save("min_charge", 0.5, self.charge_monitor._id)
msg.percentage = 0.8
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Full but below min, report FAIL
msg.percentage = .1
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
def test_unknown(self):
msg = BatteryState()
# Unknown but no min set, FAIL
msg.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_UNKNOWN
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Unknown but at min, report FAIL
self.blackboard.save("min_charge", 0.5, self.charge_monitor._id)
msg.percentage = 0.5
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
# Unknown but above min, report SUCCESS
msg.percentage = 0.8
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.SUCCESS)
# Unknown but below min, report FAIL
msg.percentage = .1
self.charge_monitor.topic_sub.callback(msg)
result = self.charge_monitor.tick()
assert_equal(result.status, NodeStatus.FAIL)
if __name__ == '__main__':
# This code will run the test in this file.'
module_name = sys.modules[__name__].__file__
parser = argparse.ArgumentParser(description='Perform unit test.')
parser.add_argument(
'--gtest_output', nargs='?', default='test.xml')
args, unknown = parser.parse_known_args()
noseargs = [sys.argv[0], module_name, '--with-xunit',
'--xunit-file='+str(args.gtest_output.lstrip('xml:'))]
nose.run(argv=noseargs)
|
[
"task_behavior_ros.batterystate.ChargeCompleteMonitor",
"task_behavior_engine.tree.Blackboard",
"argparse.ArgumentParser",
"task_behavior_ros.batterystate.ChargeOKMonitor",
"nose.tools.assert_equal",
"nose.run",
"sensor_msgs.msg.BatteryState"
] |
[((10999, 11056), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Perform unit test."""'}), "(description='Perform unit test.')\n", (11022, 11056), False, 'import argparse\n'), ((11320, 11343), 'nose.run', 'nose.run', ([], {'argv': 'noseargs'}), '(argv=noseargs)\n', (11328, 11343), False, 'import nose\n'), ((970, 982), 'task_behavior_engine.tree.Blackboard', 'Blackboard', ([], {}), '()\n', (980, 982), False, 'from task_behavior_engine.tree import Blackboard\n'), ((1013, 1129), 'task_behavior_ros.batterystate.ChargeCompleteMonitor', 'batterystate.ChargeCompleteMonitor', ([], {'name': '"""monitor"""', 'topic_name': '"""battery"""', 'latch': '(True)', 'blackboard': 'self.blackboard'}), "(name='monitor', topic_name='battery',\n latch=True, blackboard=self.blackboard)\n", (1047, 1129), False, 'from task_behavior_ros import batterystate\n'), ((1253, 1299), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.ACTIVE'], {}), '(result.status, NodeStatus.ACTIVE)\n', (1265, 1299), False, 'from nose.tools import assert_equal\n'), ((1351, 1365), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (1363, 1365), False, 'from sensor_msgs.msg import BatteryState\n'), ((1470, 1514), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (1482, 1514), False, 'from nose.tools import assert_equal\n'), ((1559, 1573), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (1571, 1573), False, 'from sensor_msgs.msg import BatteryState\n'), ((1791, 1835), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (1803, 1835), False, 'from nose.tools import assert_equal\n'), ((2085, 2132), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (2097, 2132), False, 'from nose.tools import assert_equal\n'), ((2316, 2363), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (2328, 2363), False, 'from nose.tools import assert_equal\n'), ((2545, 2591), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.ACTIVE'], {}), '(result.status, NodeStatus.ACTIVE)\n', (2557, 2591), False, 'from nose.tools import assert_equal\n'), ((2639, 2653), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (2651, 2653), False, 'from sensor_msgs.msg import BatteryState\n'), ((2877, 2921), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (2889, 2921), False, 'from nose.tools import assert_equal\n'), ((3173, 3217), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (3185, 3217), False, 'from nose.tools import assert_equal\n'), ((3401, 3445), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (3413, 3445), False, 'from nose.tools import assert_equal\n'), ((3628, 3672), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (3640, 3672), False, 'from nose.tools import assert_equal\n'), ((3713, 3727), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (3725, 3727), False, 'from sensor_msgs.msg import BatteryState\n'), ((4052, 4099), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (4064, 4099), False, 'from nose.tools import assert_equal\n'), ((4352, 4399), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (4364, 4399), False, 'from nose.tools import assert_equal\n'), ((4579, 4626), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (4591, 4626), False, 'from nose.tools import assert_equal\n'), ((4811, 4858), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (4823, 4858), False, 'from nose.tools import assert_equal\n'), ((4902, 4916), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (4914, 4916), False, 'from sensor_msgs.msg import BatteryState\n'), ((5136, 5180), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (5148, 5180), False, 'from nose.tools import assert_equal\n'), ((5427, 5471), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (5439, 5471), False, 'from nose.tools import assert_equal\n'), ((5650, 5694), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (5662, 5694), False, 'from nose.tools import assert_equal\n'), ((5873, 5917), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (5885, 5917), False, 'from nose.tools import assert_equal\n'), ((6003, 6015), 'task_behavior_engine.tree.Blackboard', 'Blackboard', ([], {}), '()\n', (6013, 6015), False, 'from task_behavior_engine.tree import Blackboard\n'), ((6046, 6157), 'task_behavior_ros.batterystate.ChargeOKMonitor', 'batterystate.ChargeOKMonitor', ([], {'name': '"""monitor"""', 'topic_name': '"""battery"""', 'latch': '(True)', 'blackboard': 'self.blackboard'}), "(name='monitor', topic_name='battery', latch=\n True, blackboard=self.blackboard)\n", (6074, 6157), False, 'from task_behavior_ros import batterystate\n'), ((6244, 6290), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.ACTIVE'], {}), '(result.status, NodeStatus.ACTIVE)\n', (6256, 6290), False, 'from nose.tools import assert_equal\n'), ((6342, 6356), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (6354, 6356), False, 'from sensor_msgs.msg import BatteryState\n'), ((6461, 6505), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (6473, 6505), False, 'from nose.tools import assert_equal\n'), ((6550, 6564), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (6562, 6564), False, 'from sensor_msgs.msg import BatteryState\n'), ((6786, 6830), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (6798, 6830), False, 'from nose.tools import assert_equal\n'), ((7077, 7121), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (7089, 7121), False, 'from nose.tools import assert_equal\n'), ((7305, 7352), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (7317, 7352), False, 'from nose.tools import assert_equal\n'), ((7532, 7576), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (7544, 7576), False, 'from nose.tools import assert_equal\n'), ((7624, 7638), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (7636, 7638), False, 'from sensor_msgs.msg import BatteryState\n'), ((7865, 7909), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (7877, 7909), False, 'from nose.tools import assert_equal\n'), ((8159, 8203), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (8171, 8203), False, 'from nose.tools import assert_equal\n'), ((8390, 8437), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (8402, 8437), False, 'from nose.tools import assert_equal\n'), ((8620, 8664), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (8632, 8664), False, 'from nose.tools import assert_equal\n'), ((8705, 8719), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (8717, 8719), False, 'from sensor_msgs.msg import BatteryState\n'), ((8933, 8977), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (8945, 8977), False, 'from nose.tools import assert_equal\n'), ((9225, 9269), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (9237, 9269), False, 'from nose.tools import assert_equal\n'), ((9522, 9569), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (9534, 9569), False, 'from nose.tools import assert_equal\n'), ((9745, 9789), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (9757, 9789), False, 'from nose.tools import assert_equal\n'), ((9833, 9847), 'sensor_msgs.msg.BatteryState', 'BatteryState', ([], {}), '()\n', (9845, 9847), False, 'from sensor_msgs.msg import BatteryState\n'), ((10067, 10111), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (10079, 10111), False, 'from nose.tools import assert_equal\n'), ((10361, 10405), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (10373, 10405), False, 'from nose.tools import assert_equal\n'), ((10588, 10635), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.SUCCESS'], {}), '(result.status, NodeStatus.SUCCESS)\n', (10600, 10635), False, 'from nose.tools import assert_equal\n'), ((10814, 10858), 'nose.tools.assert_equal', 'assert_equal', (['result.status', 'NodeStatus.FAIL'], {}), '(result.status, NodeStatus.FAIL)\n', (10826, 10858), False, 'from nose.tools import assert_equal\n')]
|
from sympy import Integral, Symbol
x = Symbol( 'x' )
k = Symbol( 'k' )
Integral( k*x, x ).doit()
Integral( k*x, ( x, 0, 2 ) ).doit()
Integral( x, ( x, 2, 4 ) ).doit()
|
[
"sympy.Symbol",
"sympy.Integral"
] |
[((40, 51), 'sympy.Symbol', 'Symbol', (['"""x"""'], {}), "('x')\n", (46, 51), False, 'from sympy import Integral, Symbol\n'), ((58, 69), 'sympy.Symbol', 'Symbol', (['"""k"""'], {}), "('k')\n", (64, 69), False, 'from sympy import Integral, Symbol\n'), ((72, 90), 'sympy.Integral', 'Integral', (['(k * x)', 'x'], {}), '(k * x, x)\n', (80, 90), False, 'from sympy import Integral, Symbol\n'), ((99, 125), 'sympy.Integral', 'Integral', (['(k * x)', '(x, 0, 2)'], {}), '(k * x, (x, 0, 2))\n', (107, 125), False, 'from sympy import Integral, Symbol\n'), ((136, 158), 'sympy.Integral', 'Integral', (['x', '(x, 2, 4)'], {}), '(x, (x, 2, 4))\n', (144, 158), False, 'from sympy import Integral, Symbol\n')]
|
import pytest
from app.models import Notification, INVITE_PENDING
from tests.app.db import create_invited_org_user
@pytest.mark.parametrize('extra_args, expected_start_of_invite_url', [
(
{},
'http://localhost:6012/organisation-invitation/'
),
(
{'invite_link_host': 'https://www.example.com'},
'https://www.example.com/organisation-invitation/'
),
])
def test_create_invited_org_user(
admin_request,
sample_organisation,
sample_user,
mocker,
org_invite_email_template,
extra_args,
expected_start_of_invite_url,
):
mocked = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
email_address = '<EMAIL>'
data = dict(
organisation=str(sample_organisation.id),
email_address=email_address,
invited_by=str(sample_user.id),
**extra_args
)
json_resp = admin_request.post(
'organisation_invite.invite_user_to_org',
organisation_id=sample_organisation.id,
_data=data,
_expected_status=201
)
assert json_resp['data']['organisation'] == str(sample_organisation.id)
assert json_resp['data']['email_address'] == email_address
assert json_resp['data']['invited_by'] == str(sample_user.id)
assert json_resp['data']['status'] == INVITE_PENDING
assert json_resp['data']['id']
notification = Notification.query.first()
assert notification.reply_to_text == sample_user.email_address
assert len(notification.personalisation.keys()) == 3
assert notification.personalisation['organisation_name'] == 'sample organisation'
assert notification.personalisation['user_name'] == 'Test User'
assert notification.personalisation['url'].startswith(expected_start_of_invite_url)
assert len(notification.personalisation['url']) > len(expected_start_of_invite_url)
mocked.assert_called_once_with([(str(notification.id))], queue="notify-internal-tasks")
def test_create_invited_user_invalid_email(admin_request, sample_organisation, sample_user, mocker):
mocked = mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
email_address = 'notanemail'
data = {
'service': str(sample_organisation.id),
'email_address': email_address,
'invited_by': str(sample_user.id),
}
json_resp = admin_request.post(
'organisation_invite.invite_user_to_org',
organisation_id=sample_organisation.id,
_data=data,
_expected_status=400
)
assert json_resp['errors'][0]['message'] == 'email_address Not a valid email address'
assert mocked.call_count == 0
def test_get_all_invited_users_by_service(admin_request, sample_organisation, sample_user):
for i in range(5):
create_invited_org_user(
sample_organisation,
sample_user,
email_address='inv<EMAIL>'.format(i)
)
json_resp = admin_request.get(
'organisation_invite.get_invited_org_users_by_organisation',
organisation_id=sample_organisation.id
)
assert len(json_resp['data']) == 5
for invite in json_resp['data']:
assert invite['organisation'] == str(sample_organisation.id)
assert invite['invited_by'] == str(sample_user.id)
assert invite['id']
def test_get_invited_users_by_service_with_no_invites(admin_request, sample_organisation):
json_resp = admin_request.get(
'organisation_invite.get_invited_org_users_by_organisation',
organisation_id=sample_organisation.id
)
assert len(json_resp['data']) == 0
def test_update_org_invited_user_set_status_to_cancelled(admin_request, sample_invited_org_user):
data = {'status': 'cancelled'}
json_resp = admin_request.post(
'organisation_invite.update_org_invite_status',
organisation_id=sample_invited_org_user.organisation_id,
invited_org_user_id=sample_invited_org_user.id,
_data=data
)
assert json_resp['data']['status'] == 'cancelled'
def test_update_org_invited_user_for_wrong_service_returns_404(admin_request, sample_invited_org_user, fake_uuid):
data = {'status': 'cancelled'}
json_resp = admin_request.post(
'organisation_invite.update_org_invite_status',
organisation_id=fake_uuid,
invited_org_user_id=sample_invited_org_user.id,
_data=data,
_expected_status=404
)
assert json_resp['message'] == 'No result found'
def test_update_org_invited_user_for_invalid_data_returns_400(admin_request, sample_invited_org_user):
data = {'status': 'garbage'}
json_resp = admin_request.post(
'organisation_invite.update_org_invite_status',
organisation_id=sample_invited_org_user.organisation_id,
invited_org_user_id=sample_invited_org_user.id,
_data=data,
_expected_status=400
)
assert len(json_resp['errors']) == 1
assert json_resp['errors'][0]['message'] == 'status garbage is not one of [pending, accepted, cancelled]'
|
[
"app.models.Notification.query.first",
"pytest.mark.parametrize"
] |
[((120, 360), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""extra_args, expected_start_of_invite_url"""', "[({}, 'http://localhost:6012/organisation-invitation/'), ({\n 'invite_link_host': 'https://www.example.com'},\n 'https://www.example.com/organisation-invitation/')]"], {}), "('extra_args, expected_start_of_invite_url', [({},\n 'http://localhost:6012/organisation-invitation/'), ({'invite_link_host':\n 'https://www.example.com'},\n 'https://www.example.com/organisation-invitation/')])\n", (143, 360), False, 'import pytest\n'), ((1386, 1412), 'app.models.Notification.query.first', 'Notification.query.first', ([], {}), '()\n', (1410, 1412), False, 'from app.models import Notification, INVITE_PENDING\n')]
|
"""Generate typescript API files from the server spec"""
import json
import re
import collections
from . import base
def generate(state: base.ParserState, meta):
g = _Generator(state, meta)
return g.run()
##
class _Generator:
def __init__(self, state, meta):
self.state = state
self.meta = meta
self.commands = {}
self.declarations = collections.defaultdict(list)
self.stub = []
self.done = {}
self.tmp_names = {}
self.object_names = {}
def run(self):
for t in self.state.types.values():
# export all API commands
if isinstance(t, base.TCommand) and t.cmd_method == 'api':
self.commands[t.cmd_name] = base.Data(
cmd_name=t.cmd_name,
doc=t.doc,
arg=self.make(t.arg_t),
ret=self.make(t.ret_t)
)
# # export all Props
# if spec.abc == base.ABC.object and self.is_instance(spec, 'gws.Props'):
# self.make(spec.name)
text = _indent(self.write_api()) + '\n\n' + _indent(self.write_stub())
for tmp, name in self.tmp_names.items():
text = text.replace(tmp, name)
return text
def write_api(self):
api_tpl = """
/**
* Gws Server API.
* Version $VERSION
*
*/
export const VERSION = '$VERSION';
type _int = number;
type _float = number;
type _bytes = any;
type _dict = {[k: string]: any};
$declarations
export interface Api {
$actions
}
"""
action_tpl = """
/// $doc
$name (p: $arg, options?: any): Promise<$ret>;
"""
actions = [
self.format(action_tpl, name=cc.cmd_name, doc=cc.doc, arg=cc.arg, ret=cc.ret)
for _, cc in sorted(self.commands.items())
]
namespace_tpl = """
export namespace $ns {
$declarations
}
"""
decls = []
d = self.declarations.pop('core')
decls.append(self.format(namespace_tpl, ns='core', declarations=_nl2(d)))
for ns, d in sorted(self.declarations.items()):
decls.append(self.format(namespace_tpl, ns=ns, declarations=_nl2(d)))
return self.format(api_tpl, declarations=_nl2(decls), actions=_nl2(actions))
def write_stub(self):
stub_tpl = """
export abstract class BaseServer implements Api {
abstract _call(cmd, p, options): Promise<any>;
$actions
}
"""
action_tpl = """
$name(p: $arg, options?: any): Promise<$ret> {
return this._call("$name", p, options);
}
"""
actions = [
self.format(action_tpl, name=cc.cmd_name, doc=cc.doc, arg=cc.arg, ret=cc.ret)
for _, cc in sorted(self.commands.items())
]
return self.format(stub_tpl, actions=_nl(actions))
_builtins_map = {
'any': 'any',
'bool': 'boolean',
'bytes': '_bytes',
'float': '_float',
'int': '_int',
'str': 'string',
}
def make(self, name):
if name in self._builtins_map:
return self._builtins_map[name]
if name in self.done:
return self.done[name]
t = self.state.types[name]
tmp_name = f'[TMP:%d]' % (len(self.tmp_names) + 1)
self.done[name] = self.tmp_names[tmp_name] = tmp_name
type_name = self.make_type(t)
self.done[name] = self.tmp_names[tmp_name] = type_name
return type_name
def make_type(self, t):
if isinstance(t, base.TDict):
k = self.make(t.key_t)
v = self.make(t.value_t)
if k == 'string' and v == 'any':
return '_dict'
return '{[key: %s]: %s}' % (k, v)
if isinstance(t, base.TList):
return 'Array<%s>' % self.make(t.item_t)
if isinstance(t, base.TSet):
return 'Array<%s>' % self.make(t.item_t)
if isinstance(t, base.TLiteral):
return _pipe(_val(v) for v in t.values)
if isinstance(t, base.TOptional):
return _pipe([self.make(t.target_t), 'null'])
if isinstance(t, base.TTuple):
return '[%s]' % _comma(self.make(it) for it in t.items)
if isinstance(t, base.TUnion):
return _pipe(self.make(it) for it in t.items)
if isinstance(t, base.TVariant):
return _pipe(self.make(it) for it in t.members.values())
if isinstance(t, base.TRecord):
tpl = """
/// $doc
export interface $name$ext {
$props
}
"""
ns, name, full = self.object_name_parts(t.name)
self.declarations[ns].append(self.format(
tpl,
name=name,
doc=t.doc,
ext=' extends ' + self.make(t.supers[0]) if t.supers else '',
props=self.make_props(t)
))
return full
if isinstance(t, base.TEnum):
tpl = '''
/// $doc
export enum $name {
$items
}
'''
ns, name, full = self.object_name_parts(t.name)
self.declarations[ns].append(self.format(
tpl,
name=name,
doc=t.doc,
items=_nl('%s = %s,' % (k, _val(v)) for k, v in sorted(t.values.items()))
))
return full
if isinstance(t, base.TAlias):
tpl = '''
/// $doc
export type $name = $target;
'''
ns, name, full = self.object_name_parts(t.name)
self.declarations[ns].append(self.format(
tpl,
name=name,
doc=t.doc,
target=self.make(t.target_t)
))
return full
raise base.Error(f'unhandled type {t.name!r}')
def make_props(self, t):
tpl = """
/// $doc
$name$opt: $type
"""
props = []
for name, key in t.props.items():
property_type = self.state.types[key]
if property_type.owner_t == t.name:
props.append(self.format(
tpl,
name=name,
doc=property_type.doc,
opt='?' if property_type.has_default else '',
type=self.make(property_type.property_t)))
return _nl(props)
_replace = [
[r'^gws\.core\.(data|ext|types)\.', ''],
[r'^gws\.base.(\w+).(action|core|types).', r'\1'],
[r'^gws\.(base|core|lib)\.', ''],
[r'^gws\.ext\.', ''],
[r'^gws\.', ''],
]
def object_name_parts(self, name):
els = [
e for e in name.replace('_', '.').split('.')
if e not in {'gws', 'core', 'types'}
]
if len(els) == 1:
els.insert(0, 'core')
if len(els) == 2 and els[0] == 'data':
els[0] = 'core'
return '.'.join(els[:-1]), els[-1], '.'.join(els)
def object_name(self, name):
res = name.replace('_', '.')
for k, v in self._replace:
res = re.sub(k, v, res)
res = ''.join(_ucfirst(s) for s in res.split('.'))
if res in self.object_names and self.object_names[res] != name:
raise base.Error(f'name conflict: {res!r} for {name!r} and {self.object_names[res]!r}')
self.object_names[res] = name
return res
def format(self, template, **kwargs):
kwargs['VERSION'] = self.meta.version
return re.sub(
r'\$(\w+)',
lambda m: kwargs[m.group(1)],
template
).strip()
_pipe = ' | '.join
_comma = ', '.join
_nl = '\n'.join
_nl2 = '\n\n'.join
def _indent(txt):
r = []
spaces = ' ' * 4
indent = 0
for ln in txt.strip().split('\n'):
ln = ln.strip()
if ln == '}':
indent -= 1
ln = (spaces * indent) + ln
if ln.endswith('{'):
indent += 1
r.append(ln)
return _nl(r)
def _val(s):
return json.dumps(s)
def _ucfirst(s):
return s[0].upper() + s[1:]
|
[
"collections.defaultdict",
"re.sub",
"json.dumps"
] |
[((8536, 8549), 'json.dumps', 'json.dumps', (['s'], {}), '(s)\n', (8546, 8549), False, 'import json\n'), ((384, 413), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (407, 413), False, 'import collections\n'), ((7605, 7622), 're.sub', 're.sub', (['k', 'v', 'res'], {}), '(k, v, res)\n', (7611, 7622), False, 'import re\n')]
|
import abc
from collections import deque
import hashlib
import io
import logging
import mimetypes
import os
import os.path as osp
import tempfile
from typing import Deque
from smqtk.exceptions import InvalidUriError, NoUriResolutionError, \
ReadOnlyError
from smqtk.representation import SmqtkRepresentation
from smqtk.utils.file import safe_create_dir
from smqtk.utils.plugin import Pluggable
MIMETYPES = mimetypes.MimeTypes()
class DataElement (SmqtkRepresentation, Pluggable):
"""
Abstract interface for a byte data container.
The primary "value" of a ``DataElement`` is the byte content wrapped. Since
this can technically change due to external forces, we cannot guarantee that
an element is immutable. Thus ``DataElement`` instances are not considered
generally hashable. Specific implementations may define a ``__hash__``
method if that implementation reflects a data source that guarantees
immutability.
UUIDs should be cast-able to a string and maintain unique-ness after
conversion.
"""
@classmethod
def from_uri(cls, uri):
"""
Construct a new instance based on the given URI.
This function may not be implemented for all DataElement types.
:param uri: URI string to resolve into an element instance
:type uri: str
:raises NoUriResolutionError: This element type does not implement URI
resolution.
:raises smqtk.exceptions.InvalidUriError: This element type could not
resolve the provided URI string.
:return: New element instance of our type.
:rtype: DataElement
"""
raise NoUriResolutionError()
def __init__(self):
super(DataElement, self).__init__()
self._temp_filepath_stack = []
# Because we can't generally guarantee external data immutability.
__hash__ = None # type: ignore
def __del__(self):
self.clean_temp()
def __eq__(self, other):
return isinstance(other, DataElement) and \
self.get_bytes() == other.get_bytes()
def __ne__(self, other):
return not (self == other)
@abc.abstractmethod
def __repr__(self):
return self.__class__.__name__
def _write_new_temp(self, d):
"""
Actually write our bytes to a new temp file
Always creates new file.
:param d: directory to write temp file in or None to use system default.
:returns: path to file written
"""
if d:
safe_create_dir(d)
ext = MIMETYPES.guess_extension(self.content_type() or '')
# Exceptions because mimetypes is apparently REALLY OLD
if ext in {'.jpe', '.jfif'}:
ext = '.jpg'
fd, fp = tempfile.mkstemp(
suffix=ext or '',
dir=d
)
os.close(fd)
with open(fp, 'wb') as f:
f.write(self.get_bytes())
return fp
def _clear_no_exist(self):
"""
Clear paths in temp stack that don't exist on the system.
"""
no_exist_paths: Deque[str] = deque() # tmp list of paths to remove
for fp in self._temp_filepath_stack:
if not osp.isfile(fp):
no_exist_paths.append(fp)
for fp in no_exist_paths:
self._temp_filepath_stack.remove(fp)
def md5(self):
"""
Get the MD5 checksum of this element's binary content.
:return: MD5 hex checksum of the data content.
:rtype: str
"""
return hashlib.md5(self.get_bytes()).hexdigest()
def sha1(self):
"""
Get the SHA1 checksum of this element's binary content.
:return: SHA1 hex checksum of the data content.
:rtype: str
"""
return hashlib.sha1(self.get_bytes()).hexdigest()
def sha512(self):
"""
Get the SHA512 checksum of this element's binary content.
:return: SHA512 hex checksum of the data content.
:rtype: str
"""
return hashlib.sha512(self.get_bytes()).hexdigest()
def write_temp(self, temp_dir=None):
"""
Write this data's bytes to a temporary file on disk, returning the path
to the written file, whose extension is guessed based on this data's
content type.
It is not guaranteed that the returned file path does not point to the
original data, i.e. writing to the returned filepath may modify the
original data.
NOTE:
The file path returned should not be explicitly removed by the user.
Instead, the ``clean_temp()`` method should be called on this
object.
:param temp_dir: Optional directory to write temporary file in,
otherwise we use the platform default temporary files directory.
If this is an empty string, we count it the same as having provided
None.
:type temp_dir: None or str
:return: Path to the temporary file
:rtype: str
"""
# Write a new temp file if there aren't any in the stack, or if the none
# of the entries' base directory is the provided temp_dir (when one is
# provided).
# Clear out paths that don't exist.
self._clear_no_exist()
if temp_dir:
abs_temp_dir = osp.abspath(osp.expanduser(temp_dir))
# Check if dir is the base of any path in the current stack.
for tf in self._temp_filepath_stack:
if osp.dirname(tf) == abs_temp_dir:
return tf
# nothing in stack with given base directory, create new temp file
self._temp_filepath_stack.append(self._write_new_temp(temp_dir))
elif not self._temp_filepath_stack:
# write new temp file to platform specific temp directory
self._temp_filepath_stack.append(self._write_new_temp(None))
# return last written temp file.
return self._temp_filepath_stack[-1]
def clean_temp(self):
"""
Clean any temporary files created by this element. This does nothing if
no temporary files have been generated for this element yet.
"""
if len(self._temp_filepath_stack):
for fp in self._temp_filepath_stack:
if os.path.isfile(fp):
os.remove(fp)
self._temp_filepath_stack = []
def uuid(self):
"""
UUID for this data element.
This many take different forms from integers to strings to a uuid.UUID
instance. This must return a hashable data type.
By default, this ends up being the hex stringification of the SHA1 hash
of this data's bytes. Specific implementations may provide other UUIDs,
however.
:return: UUID value for this data element. This return value should be
hashable.
:rtype: collections.abc.Hashable
"""
# TODO(paul.tunison): Change to SHA512.
return self.sha1()
def to_buffered_reader(self):
"""
Wrap this element's bytes in a ``io.BufferedReader`` instance for use as
file-like object for reading.
As we use the ``get_bytes`` function, this element's bytes must safely
fit in memory for this method to be usable.
:return: New BufferedReader instance
:rtype: io.BufferedReader
"""
return io.BytesIO(self.get_bytes())
def is_read_only(self):
"""
:return: If this element can only be read from.
:rtype: bool
"""
return not self.writable()
###
# Abstract methods
#
@abc.abstractmethod
def content_type(self):
"""
:return: Standard type/subtype string for this data element, or None if
the content type is unknown.
:rtype: str or None
"""
@abc.abstractmethod
def is_empty(self):
"""
Check if this element contains no bytes.
The intend of this method is to quickly check if there is any data
behind this element, ideally without having to read all/any of the
underlying data.
:return: If this element contains 0 bytes.
:rtype: bool
"""
@abc.abstractmethod
def get_bytes(self):
"""
:return: Get the bytes for this data element.
:rtype: bytes
"""
@abc.abstractmethod
def writable(self):
"""
:return: if this instance supports setting bytes.
:rtype: bool
"""
@abc.abstractmethod
def set_bytes(self, b):
"""
Set bytes to this data element.
Not all implementations may support setting bytes (check ``writable``
method return).
This base abstract method should be called by sub-class implementations
first. We check for mutability based on ``writable()`` method return.
:param b: bytes to set.
:type b: bytes
:raises ReadOnlyError: This data element can only be read from / does
not support writing.
"""
if not self.writable():
raise ReadOnlyError("This %s element is read only." % self)
def from_uri(uri, impl_generator=DataElement.get_impls):
"""
Create a data element instance from available plugin implementations.
The first implementation that can resolve the URI is what is returned. If no
implementations can resolve the URL, an ``InvalidUriError`` is raised.
:param uri: URI to try to resolve into a DataElement instance.
:type uri: str
:param impl_generator: Function that returns a dictionary mapping
implementation type names to the class type. By default this refers to
the standard ``*.get_impls()`` function, however this can be
changed to refer to a custom set of classes if desired.
:type impl_generator: () -> collections.abc.Iterable[type[DataElement]]
:raises smqtk.exceptions.InvalidUriError: No data element implementations
could resolve the given URI.
:return: New data element instance providing access to the data pointed to
by the input URI.
:rtype: DataElement
"""
log = logging.getLogger(__name__)
log.debug("Trying to parse URI: '%s'", uri)
de_type_iter = impl_generator()
inst = None
for de_type in de_type_iter:
try:
# noinspection PyUnresolvedReferences
inst = de_type.from_uri(uri)
except NoUriResolutionError:
# Expected error signaling that DataElement implementation does not
# or cannot resolve from a URI.
pass
except InvalidUriError as ex:
log.debug("Implementation '%s' failed to parse URI: %s",
de_type.__name__, ex.reason)
if inst is not None:
break
if inst is None:
# TODO: Assume final fallback of FileElement?
# Since any string could be a file?
raise InvalidUriError(uri, "No available implementation to handle URI.")
return inst
|
[
"os.remove",
"tempfile.mkstemp",
"os.path.dirname",
"collections.deque",
"smqtk.exceptions.ReadOnlyError",
"os.path.isfile",
"os.close",
"smqtk.utils.file.safe_create_dir",
"mimetypes.MimeTypes",
"os.path.expanduser",
"smqtk.exceptions.NoUriResolutionError",
"logging.getLogger",
"smqtk.exceptions.InvalidUriError"
] |
[((413, 434), 'mimetypes.MimeTypes', 'mimetypes.MimeTypes', ([], {}), '()\n', (432, 434), False, 'import mimetypes\n'), ((10255, 10282), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (10272, 10282), False, 'import logging\n'), ((1666, 1688), 'smqtk.exceptions.NoUriResolutionError', 'NoUriResolutionError', ([], {}), '()\n', (1686, 1688), False, 'from smqtk.exceptions import InvalidUriError, NoUriResolutionError, ReadOnlyError\n'), ((2764, 2805), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': "(ext or '')", 'dir': 'd'}), "(suffix=ext or '', dir=d)\n", (2780, 2805), False, 'import tempfile\n'), ((2848, 2860), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (2856, 2860), False, 'import os\n'), ((3110, 3117), 'collections.deque', 'deque', ([], {}), '()\n', (3115, 3117), False, 'from collections import deque\n'), ((11043, 11109), 'smqtk.exceptions.InvalidUriError', 'InvalidUriError', (['uri', '"""No available implementation to handle URI."""'], {}), "(uri, 'No available implementation to handle URI.')\n", (11058, 11109), False, 'from smqtk.exceptions import InvalidUriError, NoUriResolutionError, ReadOnlyError\n'), ((2535, 2553), 'smqtk.utils.file.safe_create_dir', 'safe_create_dir', (['d'], {}), '(d)\n', (2550, 2553), False, 'from smqtk.utils.file import safe_create_dir\n'), ((9192, 9245), 'smqtk.exceptions.ReadOnlyError', 'ReadOnlyError', (["('This %s element is read only.' % self)"], {}), "('This %s element is read only.' % self)\n", (9205, 9245), False, 'from smqtk.exceptions import InvalidUriError, NoUriResolutionError, ReadOnlyError\n'), ((3213, 3227), 'os.path.isfile', 'osp.isfile', (['fp'], {}), '(fp)\n', (3223, 3227), True, 'import os.path as osp\n'), ((5372, 5396), 'os.path.expanduser', 'osp.expanduser', (['temp_dir'], {}), '(temp_dir)\n', (5386, 5396), True, 'import os.path as osp\n'), ((6344, 6362), 'os.path.isfile', 'os.path.isfile', (['fp'], {}), '(fp)\n', (6358, 6362), False, 'import os\n'), ((5539, 5554), 'os.path.dirname', 'osp.dirname', (['tf'], {}), '(tf)\n', (5550, 5554), True, 'import os.path as osp\n'), ((6384, 6397), 'os.remove', 'os.remove', (['fp'], {}), '(fp)\n', (6393, 6397), False, 'import os\n')]
|
import collections
import json
import os
import numpy as np
import matplotlib.pyplot as plt
from visualDet3D.evaluator.kitti.kitti_common import get_label_annos, get_label_anno
from visualDet3D.evaluator.kitti.eval import get_official_eval_result
from numba import cuda
def get_gt_annos(label_path, label_split_file):
gt_annos = []
seq_indices = {}
with open(label_split_file) as f:
training_dict = json.load(f, object_pairs_hook=collections.OrderedDict)
train_lines = []
start_idx = 0
end_idx = 0
for key in training_dict.keys():
start_idx = end_idx
for sample in training_dict[key]:
train_lines.append(sample.strip())
end_idx += 1
seq_indices[key] = [start_idx, end_idx]
for line in train_lines:
line = line.strip()
label_file = os.path.join(label_path, line.format('labels_kitti', 'txt'))
gt_annos.append(get_label_anno(label_file))
return gt_annos, seq_indices
def evaluate_synthia(label_path="/home/hins/Desktop/M3D-RPN/data/kitti/training/label_2",
result_path="/home/hins/IROS_try/pytorch-retinanet/output/validation/data",
gac_result_path="/home/hins/IROS_try/pytorch-retinanet/output/validation/data",
label_split_file="val.txt",
current_classes=[0],
gpu=0):
cuda.select_device(gpu)
dt_annos = get_label_annos(result_path)
gac_dt_annos = get_label_annos(gac_result_path)
gt_annos, seq_indices = get_gt_annos(label_path, label_split_file)
accuracy_file_name = 'accuracies_gac_deepv2d_without_gac.json'
result_texts = []
accuracies = []
gac_accuracies = []
seqs = []
accuracy_dict = {}
for current_class in current_classes:
# result_text = get_official_eval_result(gt_annos, dt_annos, current_class)
# print(result_text)
# return
for seq, indices in seq_indices.items():
start_idx, end_idx = indices
gt_anno = gt_annos[start_idx: end_idx]
dt_anno = dt_annos[start_idx: end_idx]
gac_dt_anno = gac_dt_annos[start_idx: end_idx]
assert len(gt_anno) == len(dt_anno)
if gt_anno == []:
continue
try:
result_text = get_official_eval_result(gt_anno, dt_anno, current_class)
gac_result_text = get_official_eval_result(gt_anno, gac_dt_anno, current_class)
# print('Sequence is {}.'.format(seq))
# print(result_text)
# print(gac_result_text)
except Exception as e:
# import pdb
# pdb.set_trace()
print(e)
accuracy = parse_result_text(result_text)
gac_accuracy = parse_result_text(gac_result_text)
accuracy_dict[seq] = [float(accuracy), float(gac_accuracy)]
accuracies.append(accuracy)
gac_accuracies.append(gac_accuracy)
seqs.append(seq)
json.dump(
dict(accuracy=accuracies, gac_accuracy=gac_accuracies, seqs=seqs),
open(accuracy_file_name, 'w'),
indent=4)
# plot(accuracy_dict, accuracy_file_name)
# print('My accuracy for {} sequence is {}'.format(seq, accuracy))
# print('GAC accuracy for {} sequence is {}'.format(seq, gac_accuracy))
# return result_texts
def plot(accuracy_dict, accuracy_file_name):
seqs = accuracy_dict.keys()
accuracy = [accuracy[0] for accuracy in accuracy_dict.values()]
gac_accuracy = [accuracy[1] for accuracy in accuracy_dict.values()]
x = np.arange(len(seqs)) # the label locations
width = 0.1 # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width/2, accuracy, width, label='my')
rects2 = ax.bar(x + width/2, gac_accuracy, width, label='gac')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Scores')
ax.set_title('GAC vs My accuracies')
ax.set_xticks(x)
# ax.set_xticklabels(seqs)
ax.legend()
ax.bar_label(rects1, padding=3)
ax.bar_label(rects2, padding=3)
fig.tight_layout()
# plt.show()
accuracy_file_name = accuracy_file_name.replace('json', 'png')
plt.savefig(accuracy_file_name)
def parse_result_text(result_text):
lines = result_text.splitlines()
acc_3d = lines[3].split()[1].strip()[3:-1]
return float(acc_3d)
def load_json_accuracy():
accuracies_file = json.load(open('accuracies.json'))
gac_accuracy = accuracies_file['gac_accuracy']
accuracy = accuracies_file['gac_accuracy']
seqs = accuracies_file['seqs']
plot(seqs, gac_accuracy, accuracy)
# load_json_accuracy()
label_path = '/home/shanus/datasets/SYNTHIA_ICCV2019/'
gac_result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_full_baseline/output/validation/data/'
result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_full_new_code/output/validation/data/'
label_split_file = '/home/shanus/datasets/SYNTHIA_ICCV2019/splits/testing_split_filtered.json'
# label_split_file = '/home/shanus/datasets/SYNTHIA_ICCV2019/splits/testing_10k_split.json'
# gac_result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_10k_baseline/output/validation/data/'
# result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_10k_new_code/output/validation/data/'
# label_split_file = '/home/shanus/datasets/SYNTHIA_ICCV2019/splits/testing_10k_split.json'
# gac_result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_10k_baseline_trained_on_full/output/validation/data/'
# result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_10k_my_trained_on_full/output/validation/data/'
gac_result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_full_baseline/output/validation/data/'
result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_full_new_code/output/validation/data'
result_path = '/misc/lmbraid18/shanus/3D_object_detection/synthia/Mono3D_DeepV2D/output/validation/data/'
label_split_file = '/home/shanus/datasets/SYNTHIA_ICCV2019/splits/testing_split_filtered.json'
evaluate_synthia(label_path, result_path, gac_result_path, label_split_file)
|
[
"json.load",
"visualDet3D.evaluator.kitti.eval.get_official_eval_result",
"visualDet3D.evaluator.kitti.kitti_common.get_label_annos",
"numba.cuda.select_device",
"matplotlib.pyplot.subplots",
"visualDet3D.evaluator.kitti.kitti_common.get_label_anno",
"matplotlib.pyplot.savefig"
] |
[((1450, 1473), 'numba.cuda.select_device', 'cuda.select_device', (['gpu'], {}), '(gpu)\n', (1468, 1473), False, 'from numba import cuda\n'), ((1489, 1517), 'visualDet3D.evaluator.kitti.kitti_common.get_label_annos', 'get_label_annos', (['result_path'], {}), '(result_path)\n', (1504, 1517), False, 'from visualDet3D.evaluator.kitti.kitti_common import get_label_annos, get_label_anno\n'), ((1537, 1569), 'visualDet3D.evaluator.kitti.kitti_common.get_label_annos', 'get_label_annos', (['gac_result_path'], {}), '(gac_result_path)\n', (1552, 1569), False, 'from visualDet3D.evaluator.kitti.kitti_common import get_label_annos, get_label_anno\n'), ((3821, 3835), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3833, 3835), True, 'import matplotlib.pyplot as plt\n'), ((4363, 4394), 'matplotlib.pyplot.savefig', 'plt.savefig', (['accuracy_file_name'], {}), '(accuracy_file_name)\n', (4374, 4394), True, 'import matplotlib.pyplot as plt\n'), ((422, 477), 'json.load', 'json.load', (['f'], {'object_pairs_hook': 'collections.OrderedDict'}), '(f, object_pairs_hook=collections.OrderedDict)\n', (431, 477), False, 'import json\n'), ((975, 1001), 'visualDet3D.evaluator.kitti.kitti_common.get_label_anno', 'get_label_anno', (['label_file'], {}), '(label_file)\n', (989, 1001), False, 'from visualDet3D.evaluator.kitti.kitti_common import get_label_annos, get_label_anno\n'), ((2385, 2442), 'visualDet3D.evaluator.kitti.eval.get_official_eval_result', 'get_official_eval_result', (['gt_anno', 'dt_anno', 'current_class'], {}), '(gt_anno, dt_anno, current_class)\n', (2409, 2442), False, 'from visualDet3D.evaluator.kitti.eval import get_official_eval_result\n'), ((2477, 2538), 'visualDet3D.evaluator.kitti.eval.get_official_eval_result', 'get_official_eval_result', (['gt_anno', 'gac_dt_anno', 'current_class'], {}), '(gt_anno, gac_dt_anno, current_class)\n', (2501, 2538), False, 'from visualDet3D.evaluator.kitti.eval import get_official_eval_result\n')]
|
from flask import Flask, request
from flask_cors import CORS, cross_origin
from flask_restful import Resource, Api
from json import dumps
from flask_jsonpify import jsonify
import numpy as np
import pandas as pd
import matplotlib.pylab as plt
import seaborn as sns
from matplotlib.pylab import rcParams
from datetime import datetime
app = Flask(__name__)
api = Api(app)
from sklearn.externals import joblib
CORS(app)
@app.route("/get_",methods=["POST"])
def get_():
#date_=request.form['date_']
#tons=request.form['tons']
#return json.dumps({'status':'OK'});
data = request.get_json(force=True)
date_= datetime.strptime(data['date_'], '%Y-%m-%d').toordinal()
qty=float(data["tons"])
lin_reg = joblib.load("regression_model.pkl")
dat= lin_reg.predict(np.array([[qty,date_]]))
dat=np.round(dat,2)
dat=dat.tolist()
return jsonify(dat)
##api.add_resource(Employees_Name, '/employees/<employee_id>') # Route_3
@app.route("/get1_",methods=["POST"])
def get1_():
data1=request.get_json(force=True)
date_=datetime.strptime(data1['date_'],'%Y-%m-%d').toordinal()
qty=float(data1["tons"])
lin_reg1 = joblib.load("regression_model1.pkl")
dat1= lin_reg1.predict(np.array([[date_,qty]]))
dat1=dat1.tolist()
return jsonify(dat1)
#@<EMAIL>("/get2_",methods=["POST"])
#def get2_():
# data2=request.get_json(force=True)
# date_=datetime.strptime(data2['date_'],'%Y-%m-%d').toordinal()
# qty=float(data2["tons"])
#print(date_,qty)
#lin_reg2 = joblib.load("regression_model2.pkl")
#dat2= lin_reg2.predict(np.array([[date_,qty]]))
#dat2=dat2.tolist()
#return jsonify(dat2)
if __name__ == '__main__':
app.run(port=8080)
|
[
"flask_restful.Api",
"flask_jsonpify.jsonify",
"flask_cors.CORS",
"flask.Flask",
"datetime.datetime.strptime",
"numpy.array",
"sklearn.externals.joblib.load",
"numpy.round",
"flask.request.get_json"
] |
[((340, 355), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (345, 355), False, 'from flask import Flask, request\n'), ((362, 370), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (365, 370), False, 'from flask_restful import Resource, Api\n'), ((410, 419), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (414, 419), False, 'from flask_cors import CORS, cross_origin\n'), ((588, 616), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (604, 616), False, 'from flask import Flask, request\n'), ((733, 768), 'sklearn.externals.joblib.load', 'joblib.load', (['"""regression_model.pkl"""'], {}), "('regression_model.pkl')\n", (744, 768), False, 'from sklearn.externals import joblib\n'), ((827, 843), 'numpy.round', 'np.round', (['dat', '(2)'], {}), '(dat, 2)\n', (835, 843), True, 'import numpy as np\n'), ((881, 893), 'flask_jsonpify.jsonify', 'jsonify', (['dat'], {}), '(dat)\n', (888, 893), False, 'from flask_jsonpify import jsonify\n'), ((1031, 1059), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (1047, 1059), False, 'from flask import Flask, request\n'), ((1179, 1215), 'sklearn.externals.joblib.load', 'joblib.load', (['"""regression_model1.pkl"""'], {}), "('regression_model1.pkl')\n", (1190, 1215), False, 'from sklearn.externals import joblib\n'), ((1303, 1316), 'flask_jsonpify.jsonify', 'jsonify', (['dat1'], {}), '(dat1)\n', (1310, 1316), False, 'from flask_jsonpify import jsonify\n'), ((794, 818), 'numpy.array', 'np.array', (['[[qty, date_]]'], {}), '([[qty, date_]])\n', (802, 818), True, 'import numpy as np\n'), ((1243, 1267), 'numpy.array', 'np.array', (['[[date_, qty]]'], {}), '([[date_, qty]])\n', (1251, 1267), True, 'import numpy as np\n'), ((629, 673), 'datetime.datetime.strptime', 'datetime.strptime', (["data['date_']", '"""%Y-%m-%d"""'], {}), "(data['date_'], '%Y-%m-%d')\n", (646, 673), False, 'from datetime import datetime\n'), ((1070, 1115), 'datetime.datetime.strptime', 'datetime.strptime', (["data1['date_']", '"""%Y-%m-%d"""'], {}), "(data1['date_'], '%Y-%m-%d')\n", (1087, 1115), False, 'from datetime import datetime\n')]
|
from . import EupathExporter
from . import ReferenceGenome
import sys
import os
import subprocess
class VCFFileExport(EupathExporter.Export):
# Constants
TYPE = "VCFFile"
VERSION = "1.0"
def __init__(self, args):
EupathExporter.Export.__init__(self,
VCFFileExport.TYPE,
VCFFileExport.VERSION,
'validateVCF',
args)
self._datasetInfos = []
# open manifest file
manifestPath = "/tmp/manifest." + str(os.getpid()) + ".txt"
manifest = open(manifestPath, "w+")
for i in range(6, len(args), 2):
print(i, args[i], file=sys.stdout)
samplename = args[i+1]
#filename = samplename + "." + args[i+2]
## Note in the xml this will need the right variables passed in - e.g. sample name, file format.
self._datasetInfos.append({"name": samplename, "path": args[i]})
print(samplename, file=manifest)
self._datasetInfos.append({"name": "manifest.txt", "path": manifestPath})
# Need this?
# self._refGenome = ReferenceGenome.Genome(args[10])
# if len(args) < 10:
# raise EupathExporter.ValidationException("The tool was passed too few arguments.")
def identify_dataset_files(self):
"""
:return: A list containing the dataset files accompanied by their EuPathDB designation.
"""
return self._datasetInfos
def identify_projects(self):
return [self._refGenome.project]
|
[
"os.getpid"
] |
[((607, 618), 'os.getpid', 'os.getpid', ([], {}), '()\n', (616, 618), False, 'import os\n')]
|
""" Does equal """
import re
pattern = re.compile("(.*) == (.*)")
def fn(groups, lsv_fn):
"""Does equal function"""
field, value = groups
try:
float_value = float(value)
return lambda data: float(lsv_fn(data, field)) == float_value
except:
return lambda data: lsv_fn(data, field) == value
|
[
"re.compile"
] |
[((40, 66), 're.compile', 're.compile', (['"""(.*) == (.*)"""'], {}), "('(.*) == (.*)')\n", (50, 66), False, 'import re\n')]
|
#!/usr/bin/env python3
# (c) https://t.me/TelethonChat/37677
# This Source Code Form is subject to the terms of the GNU
# General Public License, v.3.0. If a copy of the GPL was not distributed with this
# file, You can obtain one at https://www.gnu.org/licenses/gpl-3.0.en.html.
from telethon.sync import TelegramClient
from telethon.sessions import StringSession
print("""Please go-to my.telegram.org
Login using your Telegram account
Click on API Development Tools
Create a new application, by entering the required details
For TeleBot""")
APP_ID = int(input("Enter APP ID here: "))
API_HASH = input("Enter API HASH here: ")
with TelegramClient(StringSession(), APP_ID, API_HASH) as client:
print(client.session.save())
|
[
"telethon.sessions.StringSession"
] |
[((651, 666), 'telethon.sessions.StringSession', 'StringSession', ([], {}), '()\n', (664, 666), False, 'from telethon.sessions import StringSession\n')]
|
from models.spacy_based_ir import SpacyIR
from models.bert_sts import BertSTSIR
from models.bert_nli import BertNLIIR
import os
from tqdm import tqdm
import argparse
import pickle
import numpy as np
import json
def read_data_to_score(factfile,is_fact_fact=False,datasets=None):
data = {}
base_dir = os.environ['PREPARED_DATA'] + "/hypothesis/"
if not is_fact_fact:
fnames = datasets
else:
base_dir = os.environ['PREPARED_DATA'] + "/knowledge/"
fnames = ["openbook.txt"]
facts = []
factlines = open(os.environ['PREPARED_DATA'] + "/knowledge/"+factfile,"r").readlines()
for fact in tqdm(factlines,desc="Processing Facts:"):
fact=fact.strip().replace('"',"")
facts.append(fact)
for fname in fnames:
lines = open(base_dir+fname,"r").readlines()
for index,line in tqdm(enumerate(lines),desc="Reading From "+fname+" :"):
if not is_fact_fact:
line = line.strip().split("\t")
idx = line[0]
choices = line[2:6]
assert len(choices) == 4
for index,choice in enumerate(choices):
nidx=idx+"__ch_"+str(index)
data[nidx]=choice
else:
line = line.strip().replace('"',"")
data[str(index)]=line
return {"data":data,"facts":facts}
def read_preranked_file(fname):
preranked = {}
lines = open(os.environ['PREPARED_DATA'] + "/ranked/"+fname,"r").readlines()
for line in tqdm(lines,desc="Reading Pretrained"):
line = line.strip()
row = json.loads(line)
preranked[row["id"]]=row
return preranked
#datasets = ["hyp-ques-test.tsv","hyp-ques-train.tsv","hyp-ques-val.tsv"]
datasets = ["hyp-ques-test.tsv","hyp-ques-val.tsv"]
factfile = "omcs.txt"
prerankedfiles = ["tfidf-omcs.json","scapy-omcs.json"]
for rankedfile in prerankedfiles:
for modeldir in tqdm(["/scratch/pbanerj6/stsir4_output/","/scratch/pbanerj6/stsb_output"],desc="Scoring :"+rankedfile):
print("Running :",rankedfile,modeldir)
irmodel = BertSTSIR(topk=50,output_dir=modeldir,model="pytorch_model.bin.4",eval_batch_size=1024)
data = read_data_to_score(factfile,datasets=datasets)
preranked = read_preranked_file(rankedfile)
outputname = "sts.json" if "stsb" in modeldir else "trained.json"
prefix = rankedfile.split(".")[0]
irmodel.predict(data,os.environ['PREPARED_DATA'] + "/ranked/"+prefix+"-"+outputname,"/scratch/pbanerj6/hyptestvaltokens/"+prefix+"-"+outputname+".tokens",preranked=preranked)
# irmodel = BertSTSIR(topk=50,output_dir="/scratch/pbanerj6/stsb_output",model="pytorch_model.bin.4",eval_batch_size=1024)
# data = read_data_to_score("openbook.txt",datasets=datasets)
# irmodel.predict(data,os.environ['PREPARED_DATA'] + "/ranked/sts-factfact-orig.json","/scratch/pbanerj6/hyptestvaltokens/sts.tokens")
# model_path = "/scratch/pbanerj6/qnli_orig_output/"
# model = "pytorch_model.bin.4"
# outfile = os.environ['PREPARED_DATA'] + "/ranked/qnli-openbook.json"
# irmodel = BertNLIIR(topk=50,output_dir=model_path,model=model,eval_batch_size=2048)
# data = read_data_to_score("openbook.txt",datasets=datasets)
# irmodel.predict(data,outfile,"/scratch/pbanerj6/hyptestvaltokens/sts.tokens")
|
[
"tqdm.tqdm",
"models.bert_sts.BertSTSIR",
"json.loads"
] |
[((639, 680), 'tqdm.tqdm', 'tqdm', (['factlines'], {'desc': '"""Processing Facts:"""'}), "(factlines, desc='Processing Facts:')\n", (643, 680), False, 'from tqdm import tqdm\n'), ((1550, 1588), 'tqdm.tqdm', 'tqdm', (['lines'], {'desc': '"""Reading Pretrained"""'}), "(lines, desc='Reading Pretrained')\n", (1554, 1588), False, 'from tqdm import tqdm\n'), ((1974, 2084), 'tqdm.tqdm', 'tqdm', (["['/scratch/pbanerj6/stsir4_output/', '/scratch/pbanerj6/stsb_output']"], {'desc': "('Scoring :' + rankedfile)"}), "(['/scratch/pbanerj6/stsir4_output/', '/scratch/pbanerj6/stsb_output'],\n desc='Scoring :' + rankedfile)\n", (1978, 2084), False, 'from tqdm import tqdm\n'), ((1631, 1647), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1641, 1647), False, 'import json\n'), ((2143, 2237), 'models.bert_sts.BertSTSIR', 'BertSTSIR', ([], {'topk': '(50)', 'output_dir': 'modeldir', 'model': '"""pytorch_model.bin.4"""', 'eval_batch_size': '(1024)'}), "(topk=50, output_dir=modeldir, model='pytorch_model.bin.4',\n eval_batch_size=1024)\n", (2152, 2237), False, 'from models.bert_sts import BertSTSIR\n')]
|
# coding=utf-8
# Copyright 2022 NAVER AI Labs and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch ViLT model."""
import collections.abc
import math
from dataclasses import dataclass
from typing import List, Optional, Tuple
import torch
import torch.utils.checkpoint
from packaging import version
from torch import nn
from torch.nn import CrossEntropyLoss
from ...activations import ACT2FN
from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings
from ...modeling_outputs import (
BaseModelOutput,
BaseModelOutputWithPooling,
MaskedLMOutput,
ModelOutput,
SequenceClassifierOutput,
)
from ...modeling_utils import PreTrainedModel, find_pruneable_heads_and_indices, prune_linear_layer
from ...utils import logging
from .configuration_vilt import ViltConfig
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "ViltConfig"
_CHECKPOINT_FOR_DOC = "dandelin/vilt-b32-mlm"
VILT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"dandelin/vilt-b32-mlm",
# See all ViLT models at https://huggingface.co/models?filter=vilt
]
@dataclass
class ViltForImagesAndTextClassificationOutput(ModelOutput):
"""
Class for outputs of [`ViltForImagesAndTextClassification`].
Args:
loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided):
Classification (or regression if config.num_labels==1) loss.
logits (`torch.FloatTensor` of shape `(batch_size, config.num_labels)`):
Classification (or regression if config.num_labels==1) scores (before SoftMax).
hidden_states (`List[tuple(torch.FloatTensor)]`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
List of tuples of `torch.FloatTensor` (one for each image-text pair, each tuple containing the output of
the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`.
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
attentions (`List[tuple(torch.FloatTensor)]`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
List of tuples of `torch.FloatTensor` (one for each image-text pair, each tuple containing the attention
weights of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the
attention softmax, used to compute the weighted average in the self-attention heads.
"""
loss: Optional[torch.FloatTensor] = None
logits: torch.FloatTensor = None
hidden_states: Optional[List[Tuple[torch.FloatTensor]]] = None
attentions: Optional[List[Tuple[torch.FloatTensor]]] = None
# Copied from transformers.models.vit.modeling_vit.to_2tuple
def to_2tuple(x):
if isinstance(x, collections.abc.Iterable):
return x
return (x, x)
class ViltEmbeddings(nn.Module):
"""
Construct the text and patch embeddings.
Text embeddings are equivalent to BERT embeddings.
Patch embeddings are equivalent to ViT embeddings.
"""
def __init__(self, config):
super().__init__()
# text embeddings
self.text_embeddings = TextEmbeddings(config)
# patch embeddings
self.cls_token = nn.Parameter(torch.zeros(1, 1, config.hidden_size))
self.patch_embeddings = PatchEmbeddings(
image_size=config.image_size,
patch_size=config.patch_size,
num_channels=config.num_channels,
embed_dim=config.hidden_size,
)
num_patches = self.patch_embeddings.num_patches
self.position_embeddings = nn.Parameter(torch.zeros(1, num_patches + 1, config.hidden_size))
# modality type (text/patch) embeddings
self.token_type_embeddings = nn.Embedding(config.modality_type_vocab_size, config.hidden_size)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.config = config
def visual_embed(self, pixel_values, pixel_mask, max_image_length=200):
_, _, ph, pw = self.patch_embeddings.projection.weight.shape
x = self.patch_embeddings(pixel_values)
x_mask = pixel_mask[:, None, :, :].float()
x_mask = nn.functional.interpolate(x_mask, size=(x.shape[2], x.shape[3])).long()
x_h = x_mask[:, 0].sum(dim=1)[:, 0]
x_w = x_mask[:, 0].sum(dim=2)[:, 0]
batch_size, num_channels, height, width = x.shape
patch_dim = self.config.image_size // self.config.patch_size
spatial_pos = self.position_embeddings[:, 1:, :].transpose(1, 2).view(1, num_channels, patch_dim, patch_dim)
pos_embed = torch.cat(
[
nn.functional.pad(
nn.functional.interpolate(
spatial_pos,
size=(h, w),
mode="bilinear",
align_corners=True,
),
(0, width - w, 0, height - h),
)
for h, w in zip(x_h, x_w)
],
dim=0,
)
pos_embed = pos_embed.flatten(2).transpose(1, 2)
x = x.flatten(2).transpose(1, 2)
patch_index = torch.stack(
torch.meshgrid(torch.arange(x_mask.shape[-2]), torch.arange(x_mask.shape[-1]), indexing="ij"), dim=-1
)
patch_index = patch_index[None, None, :, :, :]
patch_index = patch_index.expand(x_mask.shape[0], x_mask.shape[1], -1, -1, -1)
patch_index = patch_index.flatten(1, 3)
x_mask = x_mask.flatten(1)
if max_image_length < 0 or max_image_length is None or not isinstance(max_image_length, int):
# suppose aug is 800 x 1333, then, maximum effective res is 800 x 1333 (if one side gets bigger, the other will be constrained and be shrinked)
# (800 // self.patch_size) * (1333 // self.patch_size) is the maximum number of patches that single image can get.
# if self.patch_size = 32, 25 * 41 = 1025
# if res is 384 x 640, 12 * 20 = 240
effective_resolution = x_h * x_w
max_image_length = effective_resolution.max()
else:
effective_resolution = x_h * x_w
max_image_length = min(effective_resolution.max(), max_image_length)
valid_idx = x_mask.nonzero(as_tuple=False)
non_valid_idx = (1 - x_mask).nonzero(as_tuple=False)
unique_rows = valid_idx[:, 0].unique()
valid_row_idx = [valid_idx[valid_idx[:, 0] == u] for u in unique_rows]
non_valid_row_idx = [non_valid_idx[non_valid_idx[:, 0] == u] for u in unique_rows]
valid_nums = [v.size(0) for v in valid_row_idx]
non_valid_nums = [v.size(0) for v in non_valid_row_idx]
pad_nums = [max_image_length - v for v in valid_nums]
select = list()
for i, (v, nv, p) in enumerate(zip(valid_nums, non_valid_nums, pad_nums)):
if p <= 0:
valid_choice = torch.multinomial(torch.ones(v).float(), max_image_length)
select.append(valid_row_idx[i][valid_choice])
else:
pad_choice = torch.multinomial(torch.ones(nv).float(), p, replacement=True)
select.append(torch.cat([valid_row_idx[i], non_valid_row_idx[i][pad_choice]], dim=0))
select = torch.cat(select, dim=0)
x = x[select[:, 0], select[:, 1]].view(batch_size, -1, num_channels)
x_mask = x_mask[select[:, 0], select[:, 1]].view(batch_size, -1)
patch_index = patch_index[select[:, 0], select[:, 1]].view(batch_size, -1, 2)
pos_embed = pos_embed[select[:, 0], select[:, 1]].view(batch_size, -1, num_channels)
cls_tokens = self.cls_token.expand(batch_size, -1, -1)
x = torch.cat((cls_tokens, x), dim=1)
pos_embed = torch.cat(
(self.position_embeddings[:, 0, :][:, None, :].expand(batch_size, -1, -1), pos_embed), dim=1
)
x = x + pos_embed
x = self.dropout(x)
x_mask = torch.cat([torch.ones(x_mask.shape[0], 1).to(x_mask), x_mask], dim=1)
return x, x_mask, (patch_index, (height, width))
def forward(
self,
input_ids,
attention_mask,
token_type_ids,
pixel_values,
pixel_mask,
inputs_embeds,
image_embeds,
image_token_type_idx=1,
):
# PART 1: text embeddings
text_embeds = self.text_embeddings(
input_ids=input_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds
)
# PART 2: patch embeddings (with interpolated position encodings)
if image_embeds is None:
image_embeds, image_masks, patch_index = self.visual_embed(
pixel_values, pixel_mask, max_image_length=self.config.max_image_length
)
else:
image_masks = pixel_mask.flatten(1)
# PART 3: add modality type embeddings
# 0 indicates text, 1 indicates image, 2 is optionally used when a second image is provided (NLVR2)
if image_token_type_idx is None:
image_token_type_idx = 1
text_embeds = text_embeds + self.token_type_embeddings(
torch.zeros_like(attention_mask, dtype=torch.long, device=text_embeds.device)
)
image_embeds = image_embeds + self.token_type_embeddings(
torch.full_like(image_masks, image_token_type_idx, dtype=torch.long, device=text_embeds.device)
)
# PART 4: concatenate
embeddings = torch.cat([text_embeds, image_embeds], dim=1)
masks = torch.cat([attention_mask, image_masks], dim=1)
return embeddings, masks
class TextEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.position_embedding_type = getattr(config, "position_embedding_type", "absolute")
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
if version.parse(torch.__version__) > version.parse("1.6.0"):
self.register_buffer(
"token_type_ids",
torch.zeros(self.position_ids.size(), dtype=torch.long),
persistent=False,
)
def forward(self, input_ids=None, token_type_ids=None, position_ids=None, inputs_embeds=None):
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, :seq_length]
# Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs
# when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves
# issue #5664
if token_type_ids is None:
if hasattr(self, "token_type_ids"):
buffered_token_type_ids = self.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = inputs_embeds + token_type_embeddings
if self.position_embedding_type == "absolute":
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
# Based on timm implementation, which can be found here:
# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py
class PatchEmbeddings(nn.Module):
"""
Image to Patch Embedding.
"""
def __init__(self, image_size=224, patch_size=16, num_channels=3, embed_dim=768):
super().__init__()
image_size = to_2tuple(image_size)
patch_size = to_2tuple(patch_size)
num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0])
self.image_size = image_size
self.patch_size = patch_size
self.num_patches = num_patches
self.projection = nn.Conv2d(num_channels, embed_dim, kernel_size=patch_size, stride=patch_size)
def forward(self, pixel_values):
batch_size, num_channels, height, width = pixel_values.shape
x = self.projection(pixel_values)
return x
class ViltSelfAttention(nn.Module):
def __init__(self, config):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
raise ValueError(
f"The hidden size {config.hidden_size,} is not a multiple of the number of attention "
f"heads {config.num_attention_heads}."
)
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=config.qkv_bias)
self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=config.qkv_bias)
self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=config.qkv_bias)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, hidden_states, attention_mask=None, head_mask=None, output_attentions=False):
mixed_query_layer = self.query(hidden_states)
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
query_layer = self.transpose_for_scores(mixed_query_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(dim=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(*new_context_layer_shape)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
return outputs
# Copied from transformers.models.vit.modeling_vit.ViTSelfOutput with ViT->Vilt
class ViltSelfOutput(nn.Module):
"""
The residual connection is defined in ViltLayer instead of here (as is the case with other models), due to the
layernorm applied before each block.
"""
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
return hidden_states
class ViltAttention(nn.Module):
def __init__(self, config):
super().__init__()
self.attention = ViltSelfAttention(config)
self.output = ViltSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.attention.num_attention_heads, self.attention.attention_head_size, self.pruned_heads
)
# Prune linear layers
self.attention.query = prune_linear_layer(self.attention.query, index)
self.attention.key = prune_linear_layer(self.attention.key, index)
self.attention.value = prune_linear_layer(self.attention.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.attention.num_attention_heads = self.attention.num_attention_heads - len(heads)
self.attention.all_head_size = self.attention.attention_head_size * self.attention.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(self, hidden_states, attention_mask=None, head_mask=None, output_attentions=False):
self_outputs = self.attention(hidden_states, attention_mask, head_mask, output_attentions)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
# Copied from transformers.models.vit.modeling_vit.ViTIntermediate with ViT->Vilt
class ViltIntermediate(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
# Copied from transformers.models.vit.modeling_vit.ViTOutput with ViT->Vilt
class ViltOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = hidden_states + input_tensor
return hidden_states
class ViltLayer(nn.Module):
"""This corresponds to the Block class in the timm implementation."""
def __init__(self, config):
super().__init__()
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.seq_len_dim = 1
self.attention = ViltAttention(config)
self.intermediate = ViltIntermediate(config)
self.output = ViltOutput(config)
self.layernorm_before = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.layernorm_after = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states, attention_mask=None, head_mask=None, output_attentions=False):
self_attention_outputs = self.attention(
self.layernorm_before(hidden_states), # in ViLT, layernorm is applied before self-attention
attention_mask,
head_mask,
output_attentions=output_attentions,
)
attention_output = self_attention_outputs[0]
outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
# first residual connection
hidden_states = attention_output + hidden_states
# in ViLT, layernorm is also applied after self-attention
layer_output = self.layernorm_after(hidden_states)
layer_output = self.intermediate(layer_output)
# second residual connection is done here
layer_output = self.output(layer_output, hidden_states)
outputs = (layer_output,) + outputs
return outputs
class ViltEncoder(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.layer = nn.ModuleList([ViltLayer(config) for _ in range(config.num_hidden_layers)])
self.gradient_checkpointing = False
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
output_attentions=False,
output_hidden_states=False,
return_dict=True,
):
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_head_mask = head_mask[i] if head_mask is not None else None
if self.gradient_checkpointing and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(layer_module),
hidden_states,
attention_mask,
layer_head_mask,
)
else:
layer_outputs = layer_module(hidden_states, attention_mask, layer_head_mask, output_attentions)
hidden_states = layer_outputs[0]
if output_attentions:
all_self_attentions = all_self_attentions + (layer_outputs[1],)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=hidden_states,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
)
class ViltPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = ViltConfig
base_model_prefix = "vilt"
supports_gradient_checkpointing = True
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, (nn.Linear, nn.Conv2d)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, ViltEncoder):
module.gradient_checkpointing = value
VILT_START_DOCSTRING = r"""
This model is a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`_ subclass. Use
it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and
behavior.
Parameters:
config ([`ViltConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
VILT_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`BertTokenizer`]. See
[`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input
IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):
Pixel values. Pixel values can be obtained using [`ViltFeatureExtractor`]. See
[`ViltFeatureExtractor.__call__`] for details.
pixel_mask (`torch.LongTensor` of shape `(batch_size, height, width)`, *optional*):
Mask to avoid performing attention on padding pixel values. Mask values selected in `[0, 1]`:
- 1 for pixels that are real (i.e. **not masked**),
- 0 for pixels that are padding (i.e. **masked**).
`What are attention masks? <../glossary.html#attention-mask>`__
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
model's internal embedding lookup matrix.
image_embeds (`torch.FloatTensor` of shape `(batch_size, num_patches, hidden_size)`, *optional*):
Optionally, instead of passing `pixel_values`, you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert `pixel_values` into patch embeddings.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
VILT_IMAGES_AND_TEXT_CLASSIFICATION_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary. Indices can be obtained using [`BertTokenizer`]. See
[`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input
IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
pixel_values (`torch.FloatTensor` of shape `(batch_size, num_images, num_channels, height, width)`):
Pixel values. Pixel values can be obtained using [`ViltFeatureExtractor`]. See
[`ViltFeatureExtractor.__call__`] for details.
pixel_mask (`torch.LongTensor` of shape `(batch_size, num_images, height, width)`, *optional*):
Mask to avoid performing attention on padding pixel values. Mask values selected in `[0, 1]`:
- 1 for pixels that are real (i.e. **not masked**),
- 0 for pixels that are padding (i.e. **masked**).
`What are attention masks? <../glossary.html#attention-mask>`__
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
model's internal embedding lookup matrix.
image_embeds (`torch.FloatTensor` of shape `(batch_size, num_patches, hidden_size)`, *optional*):
Optionally, instead of passing `pixel_values`, you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert `pixel_values` into patch embeddings.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare ViLT Model transformer outputting raw hidden-states without any specific head on top.",
VILT_START_DOCSTRING,
)
class ViltModel(ViltPreTrainedModel):
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
self.embeddings = ViltEmbeddings(config)
self.encoder = ViltEncoder(config)
self.layernorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.pooler = ViltPooler(config) if add_pooling_layer else None
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embeddings.text_embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.text_embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(VILT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
pixel_values=None,
pixel_mask=None,
head_mask=None,
inputs_embeds=None,
image_embeds=None,
image_token_type_idx=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Returns:
Examples:
```python
>>> from transformers import ViltProcessor, ViltModel
>>> from PIL import Image
>>> import requests
>>> # prepare image and text
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> text = "hello world"
>>> processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-mlm")
>>> model = ViltModel.from_pretrained("dandelin/vilt-b32-mlm")
>>> inputs = processor(image, text, return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
batch_size, seq_length = input_shape
device = input_ids.device if input_ids is not None else inputs_embeds.device
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length)), device=device)
if pixel_values is None:
raise ValueError("You have to specify pixel_values")
batch_size, num_channels, height, width = pixel_values.shape
if pixel_mask is None:
pixel_mask = torch.ones(((batch_size, height, width)), device=device)
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output, attention_mask = self.embeddings(
input_ids,
attention_mask,
token_type_ids,
pixel_values,
pixel_mask,
inputs_embeds,
image_embeds,
image_token_type_idx=image_token_type_idx,
)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
sequence_output = self.layernorm(sequence_output)
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
if not return_dict:
return (sequence_output, pooled_output) + encoder_outputs[1:]
return BaseModelOutputWithPooling(
last_hidden_state=sequence_output,
pooler_output=pooled_output,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
)
class ViltPooler(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
@add_start_docstrings(
"""
ViLT Model with a language modeling head on top as done during pretraining.
""",
VILT_START_DOCSTRING,
)
class ViltForMaskedLM(ViltPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.vilt = ViltModel(config)
self.mlm_score = ViltMLMHead(config)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.mlm_score.decoder
def set_output_embeddings(self, new_embeddings):
self.mlm_score.decoder = new_embeddings
@add_start_docstrings_to_model_forward(VILT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=MaskedLMOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
pixel_values=None,
pixel_mask=None,
head_mask=None,
inputs_embeds=None,
image_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (*torch.LongTensor* of shape *(batch_size, sequence_length)*, *optional*):
Labels for computing the masked language modeling loss. Indices should be in *[-100, 0, ...,
config.vocab_size]* (see *input_ids* docstring) Tokens with indices set to *-100* are ignored (masked), the
loss is only computed for the tokens with labels in *[0, ..., config.vocab_size]*
Returns:
Examples:
```python
>>> from transformers import ViltProcessor, ViltForMaskedLM
>>> import requests
>>> from PIL import Image
>>> import re
>>> import torch
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> text = "a bunch of [MASK] laying on a [MASK]."
>>> processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-mlm")
>>> model = ViltForMaskedLM.from_pretrained("dandelin/vilt-b32-mlm")
>>> # prepare inputs
>>> encoding = processor(image, text, return_tensors="pt")
>>> # forward pass
>>> outputs = model(**encoding)
>>> tl = len(re.findall("\[MASK\]", text))
>>> inferred_token = [text]
>>> # gradually fill in the MASK tokens, one by one
>>> with torch.no_grad():
... for i in range(tl):
... encoded = processor.tokenizer(inferred_token)
... input_ids = torch.tensor(encoded.input_ids)
... encoded = encoded["input_ids"][0][1:-1]
... outputs = model(input_ids=input_ids, pixel_values=encoding.pixel_values)
... mlm_logits = outputs.logits[0] # shape (seq_len, vocab_size)
... # only take into account text features (minus CLS and SEP token)
... mlm_logits = mlm_logits[1 : input_ids.shape[1] - 1, :]
... mlm_values, mlm_ids = mlm_logits.softmax(dim=-1).max(dim=-1)
... # only take into account text
... mlm_values[torch.tensor(encoded) != 103] = 0
... select = mlm_values.argmax().item()
... encoded[select] = mlm_ids[select].item()
... inferred_token = [processor.decode(encoded)]
>>> selected_token = ""
>>> encoded = processor.tokenizer(inferred_token)
>>> output = processor.decode(encoded.input_ids[0], skip_special_tokens=True)
>>> print(output)
a bunch of cats laying on a couch.
```"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.vilt(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
pixel_values=pixel_values,
pixel_mask=pixel_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
image_embeds=image_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output, pooled_output = outputs[:2]
# split up final hidden states into text and image features
text_seq_len = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1]
text_features, _ = (sequence_output[:, :text_seq_len], sequence_output[:, text_seq_len:])
mlm_logits = self.mlm_score(text_features)
masked_lm_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss() # -100 index = padding token
masked_lm_loss = loss_fct(mlm_logits.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (mlm_logits,) + outputs[2:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return MaskedLMOutput(
loss=masked_lm_loss,
logits=mlm_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class ViltPredictionHeadTransform(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class ViltMLMHead(nn.Module):
def __init__(self, config, weight=None):
super().__init__()
self.config = config
self.transform = ViltPredictionHeadTransform(config)
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
if weight is not None:
self.decoder.weight = weight
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, x):
x = self.transform(x)
x = self.decoder(x)
return x
@add_start_docstrings(
"""
Vilt Model transformer with a classifier head on top (a linear layer on top of the final hidden state of the [CLS]
token) for visual question answering, e.g. for VQAv2.
""",
VILT_START_DOCSTRING,
)
class ViltForQuestionAnswering(ViltPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.vilt = ViltModel(config)
# Classifier head
self.classifier = nn.Sequential(
nn.Linear(config.hidden_size, config.hidden_size * 2),
nn.LayerNorm(config.hidden_size * 2),
nn.GELU(),
nn.Linear(config.hidden_size * 2, config.num_labels),
)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(VILT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
pixel_values=None,
pixel_mask=None,
head_mask=None,
inputs_embeds=None,
image_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (`torch.FloatTensor` of shape `(batch_size, num_labels)`, *optional*):
Labels for computing the visual question answering loss. This tensor must be either a one-hot encoding of
all answers that are applicable for a given example in the batch, or a soft encoding indicating which
answers are applicable, where 1.0 is the highest score.
Returns:
Examples:
```python
>>> from transformers import ViltProcessor, ViltForQuestionAnswering
>>> import requests
>>> from PIL import Image
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> text = "How many cats are there?"
>>> processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
>>> model = ViltForQuestionAnswering.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
>>> # prepare inputs
>>> encoding = processor(image, text, return_tensors="pt")
>>> # forward pass
>>> outputs = model(**encoding)
>>> logits = outputs.logits
>>> idx = logits.argmax(-1).item()
>>> print("Predicted answer:", model.config.id2label[idx])
Predicted answer: 2
```"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.vilt(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
pixel_values=pixel_values,
pixel_mask=pixel_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
image_embeds=image_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooler_output = outputs.pooler_output if return_dict else outputs[1]
logits = self.classifier(pooler_output)
loss = None
if labels is not None:
loss = nn.functional.binary_cross_entropy_with_logits(logits, labels) * labels.shape[1]
# see https://github.com/jnhwkim/ban-vqa/blob/master/train.py#L19
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Vilt Model transformer with a classifier head on top (a linear layer on top of the final hidden state of the [CLS]
token) for image-to-text or text-to-image retrieval, e.g. MSCOCO and F30K.
""",
VILT_START_DOCSTRING,
)
class ViltForImageAndTextRetrieval(ViltPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.vilt = ViltModel(config)
# Classifier head
self.rank_output = nn.Linear(config.hidden_size, 1)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(VILT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
pixel_values=None,
pixel_mask=None,
head_mask=None,
inputs_embeds=None,
image_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels are currently not supported.
Returns:
Examples:
```python
>>> from transformers import ViltProcessor, ViltForImageAndTextRetrieval
>>> import requests
>>> from PIL import Image
>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)
>>> texts = ["An image of two cats chilling on a couch", "A football player scoring a goal"]
>>> processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-finetuned-coco")
>>> model = ViltForImageAndTextRetrieval.from_pretrained("dandelin/vilt-b32-finetuned-coco")
>>> # forward pass
>>> scores = dict()
>>> for text in texts:
... # prepare inputs
... encoding = processor(image, text, return_tensors="pt")
... outputs = model(**encoding)
... scores[text] = outputs.logits[0, :].item()
```"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.vilt(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
pixel_values=pixel_values,
pixel_mask=pixel_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
image_embeds=image_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooler_output = outputs.pooler_output if return_dict else outputs[1]
logits = self.rank_output(pooler_output)
loss = None
if labels is not None:
raise NotImplementedError("Training is not yet supported.")
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Vilt Model transformer with a classifier head on top for natural language visual reasoning, e.g. NLVR2.
""",
VILT_IMAGES_AND_TEXT_CLASSIFICATION_INPUTS_DOCSTRING,
)
class ViltForImagesAndTextClassification(ViltPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.vilt = ViltModel(config)
# Classifier head
num_images = config.num_images
self.classifier = nn.Sequential(
nn.Linear(config.hidden_size * num_images, config.hidden_size * num_images),
nn.LayerNorm(config.hidden_size * num_images),
nn.GELU(),
nn.Linear(config.hidden_size * num_images, config.num_labels),
)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(VILT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=ViltForImagesAndTextClassificationOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
pixel_values=None,
pixel_mask=None,
head_mask=None,
inputs_embeds=None,
image_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Binary classification labels.
Returns:
Examples:
```python
>>> from transformers import ViltProcessor, ViltForImagesAndTextClassification
>>> import requests
>>> from PIL import Image
>>> image1 = Image.open(requests.get("https://lil.nlp.cornell.edu/nlvr/exs/ex0_0.jpg", stream=True).raw)
>>> image2 = Image.open(requests.get("https://lil.nlp.cornell.edu/nlvr/exs/ex0_1.jpg", stream=True).raw)
>>> text = "The left image contains twice the number of dogs as the right image."
>>> processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-finetuned-nlvr2")
>>> model = ViltForImagesAndTextClassification.from_pretrained("dandelin/vilt-b32-finetuned-nlvr2")
>>> # prepare inputs
>>> encoding = processor([image1, image2], text, return_tensors="pt")
>>> # forward pass
>>> outputs = model(input_ids=encoding.input_ids, pixel_values=encoding.pixel_values.unsqueeze(0))
>>> logits = outputs.logits
>>> idx = logits.argmax(-1).item()
>>> print("Predicted answer:", model.config.id2label[idx])
Predicted answer: True
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if pixel_values.ndim == 4:
# add dummy num_images dimension
pixel_values = pixel_values.unsqueeze(1)
num_images = pixel_values.shape[1]
if num_images != self.config.num_images:
raise ValueError(
"Make sure to match the number of images in the model with the number of images in the input."
)
pooler_outputs = []
hidden_states = [] if output_hidden_states else None
attentions = [] if output_attentions else None
for i in range(num_images):
# forward every image through the model
outputs = self.vilt(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
pixel_values=pixel_values[:, i, :, :, :],
pixel_mask=pixel_mask[:, i, :, :] if pixel_mask is not None else None,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
image_embeds=image_embeds,
image_token_type_idx=i + 1,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooler_output = outputs.pooler_output if return_dict else outputs[1]
pooler_outputs.append(pooler_output)
if output_hidden_states:
hidden_states.append(outputs.hidden_states)
if output_attentions:
attentions.append(outputs.attentions)
pooled_output = torch.cat(pooler_outputs, dim=-1)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
if not return_dict:
output = (logits, hidden_states, attentions)
return ((loss,) + output) if loss is not None else output
return ViltForImagesAndTextClassificationOutput(
loss=loss,
logits=logits,
hidden_states=hidden_states,
attentions=attentions,
)
|
[
"torch.nn.Dropout",
"torch.nn.Embedding",
"torch.cat",
"torch.nn.Softmax",
"torch.arange",
"torch.ones",
"torch.nn.functional.binary_cross_entropy_with_logits",
"torch.nn.LayerNorm",
"torch.nn.Linear",
"torch.zeros",
"torch.matmul",
"math.sqrt",
"torch.zeros_like",
"torch.nn.Tanh",
"packaging.version.parse",
"torch.nn.Conv2d",
"torch.nn.GELU",
"torch.full_like",
"torch.nn.CrossEntropyLoss",
"torch.nn.functional.interpolate"
] |
[((4461, 4526), 'torch.nn.Embedding', 'nn.Embedding', (['config.modality_type_vocab_size', 'config.hidden_size'], {}), '(config.modality_type_vocab_size, config.hidden_size)\n', (4473, 4526), False, 'from torch import nn\n'), ((4550, 4588), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (4560, 4588), False, 'from torch import nn\n'), ((7991, 8015), 'torch.cat', 'torch.cat', (['select'], {'dim': '(0)'}), '(select, dim=0)\n', (8000, 8015), False, 'import torch\n'), ((8421, 8454), 'torch.cat', 'torch.cat', (['(cls_tokens, x)'], {'dim': '(1)'}), '((cls_tokens, x), dim=1)\n', (8430, 8454), False, 'import torch\n'), ((10184, 10229), 'torch.cat', 'torch.cat', (['[text_embeds, image_embeds]'], {'dim': '(1)'}), '([text_embeds, image_embeds], dim=1)\n', (10193, 10229), False, 'import torch\n'), ((10246, 10293), 'torch.cat', 'torch.cat', (['[attention_mask, image_masks]'], {'dim': '(1)'}), '([attention_mask, image_masks], dim=1)\n', (10255, 10293), False, 'import torch\n'), ((10536, 10625), 'torch.nn.Embedding', 'nn.Embedding', (['config.vocab_size', 'config.hidden_size'], {'padding_idx': 'config.pad_token_id'}), '(config.vocab_size, config.hidden_size, padding_idx=config.\n pad_token_id)\n', (10548, 10625), False, 'from torch import nn\n'), ((10656, 10720), 'torch.nn.Embedding', 'nn.Embedding', (['config.max_position_embeddings', 'config.hidden_size'], {}), '(config.max_position_embeddings, config.hidden_size)\n', (10668, 10720), False, 'from torch import nn\n'), ((10758, 10814), 'torch.nn.Embedding', 'nn.Embedding', (['config.type_vocab_size', 'config.hidden_size'], {}), '(config.type_vocab_size, config.hidden_size)\n', (10770, 10814), False, 'from torch import nn\n'), ((10991, 11050), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (11003, 11050), False, 'from torch import nn\n'), ((11074, 11112), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (11084, 11112), False, 'from torch import nn\n'), ((13972, 14049), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_channels', 'embed_dim'], {'kernel_size': 'patch_size', 'stride': 'patch_size'}), '(num_channels, embed_dim, kernel_size=patch_size, stride=patch_size)\n', (13981, 14049), False, 'from torch import nn\n'), ((14876, 14947), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {'bias': 'config.qkv_bias'}), '(config.hidden_size, self.all_head_size, bias=config.qkv_bias)\n', (14885, 14947), False, 'from torch import nn\n'), ((14967, 15038), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {'bias': 'config.qkv_bias'}), '(config.hidden_size, self.all_head_size, bias=config.qkv_bias)\n', (14976, 15038), False, 'from torch import nn\n'), ((15060, 15131), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'self.all_head_size'], {'bias': 'config.qkv_bias'}), '(config.hidden_size, self.all_head_size, bias=config.qkv_bias)\n', (15069, 15131), False, 'from torch import nn\n'), ((15156, 15203), 'torch.nn.Dropout', 'nn.Dropout', (['config.attention_probs_dropout_prob'], {}), '(config.attention_probs_dropout_prob)\n', (15166, 15203), False, 'from torch import nn\n'), ((16729, 16771), 'torch.matmul', 'torch.matmul', (['attention_probs', 'value_layer'], {}), '(attention_probs, value_layer)\n', (16741, 16771), False, 'import torch\n'), ((17484, 17533), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (17493, 17533), False, 'from torch import nn\n'), ((17557, 17595), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (17567, 17595), False, 'from torch import nn\n'), ((19512, 19567), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.intermediate_size'], {}), '(config.hidden_size, config.intermediate_size)\n', (19521, 19567), False, 'from torch import nn\n'), ((20122, 20177), 'torch.nn.Linear', 'nn.Linear', (['config.intermediate_size', 'config.hidden_size'], {}), '(config.intermediate_size, config.hidden_size)\n', (20131, 20177), False, 'from torch import nn\n'), ((20201, 20239), 'torch.nn.Dropout', 'nn.Dropout', (['config.hidden_dropout_prob'], {}), '(config.hidden_dropout_prob)\n', (20211, 20239), False, 'from torch import nn\n'), ((20915, 20974), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (20927, 20974), False, 'from torch import nn\n'), ((21006, 21065), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (21018, 21065), False, 'from torch import nn\n'), ((33138, 33197), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (33150, 33197), False, 'from torch import nn\n'), ((38513, 38562), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (38522, 38562), False, 'from torch import nn\n'), ((38589, 38598), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (38596, 38598), False, 'from torch import nn\n'), ((44324, 44373), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.hidden_size'], {}), '(config.hidden_size, config.hidden_size)\n', (44333, 44373), False, 'from torch import nn\n'), ((44576, 44635), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.hidden_size'], {'eps': 'config.layer_norm_eps'}), '(config.hidden_size, eps=config.layer_norm_eps)\n', (44588, 44635), False, 'from torch import nn\n'), ((45086, 45146), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.vocab_size'], {'bias': '(False)'}), '(config.hidden_size, config.vocab_size, bias=False)\n', (45095, 45146), False, 'from torch import nn\n'), ((49959, 49991), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', '(1)'], {}), '(config.hidden_size, 1)\n', (49968, 49991), False, 'from torch import nn\n'), ((57491, 57524), 'torch.cat', 'torch.cat', (['pooler_outputs'], {'dim': '(-1)'}), '(pooler_outputs, dim=-1)\n', (57500, 57524), False, 'import torch\n'), ((3949, 3986), 'torch.zeros', 'torch.zeros', (['(1)', '(1)', 'config.hidden_size'], {}), '(1, 1, config.hidden_size)\n', (3960, 3986), False, 'import torch\n'), ((4323, 4374), 'torch.zeros', 'torch.zeros', (['(1)', '(num_patches + 1)', 'config.hidden_size'], {}), '(1, num_patches + 1, config.hidden_size)\n', (4334, 4374), False, 'import torch\n'), ((11423, 11455), 'packaging.version.parse', 'version.parse', (['torch.__version__'], {}), '(torch.__version__)\n', (11436, 11455), False, 'from packaging import version\n'), ((11458, 11480), 'packaging.version.parse', 'version.parse', (['"""1.6.0"""'], {}), "('1.6.0')\n", (11471, 11480), False, 'from packaging import version\n'), ((15993, 16028), 'math.sqrt', 'math.sqrt', (['self.attention_head_size'], {}), '(self.attention_head_size)\n', (16002, 16028), False, 'import math\n'), ((16322, 16340), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (16332, 16340), False, 'from torch import nn\n'), ((36168, 36219), 'torch.ones', 'torch.ones', (['(batch_size, seq_length)'], {'device': 'device'}), '((batch_size, seq_length), device=device)\n', (36178, 36219), False, 'import torch\n'), ((36447, 36501), 'torch.ones', 'torch.ones', (['(batch_size, height, width)'], {'device': 'device'}), '((batch_size, height, width), device=device)\n', (36457, 36501), False, 'import torch\n'), ((43681, 43699), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ([], {}), '()\n', (43697, 43699), False, 'from torch.nn import CrossEntropyLoss\n'), ((45180, 45210), 'torch.zeros', 'torch.zeros', (['config.vocab_size'], {}), '(config.vocab_size)\n', (45191, 45210), False, 'import torch\n'), ((46070, 46123), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', '(config.hidden_size * 2)'], {}), '(config.hidden_size, config.hidden_size * 2)\n', (46079, 46123), False, 'from torch import nn\n'), ((46137, 46173), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['(config.hidden_size * 2)'], {}), '(config.hidden_size * 2)\n', (46149, 46173), False, 'from torch import nn\n'), ((46187, 46196), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (46194, 46196), False, 'from torch import nn\n'), ((46210, 46262), 'torch.nn.Linear', 'nn.Linear', (['(config.hidden_size * 2)', 'config.num_labels'], {}), '(config.hidden_size * 2, config.num_labels)\n', (46219, 46262), False, 'from torch import nn\n'), ((53385, 53460), 'torch.nn.Linear', 'nn.Linear', (['(config.hidden_size * num_images)', '(config.hidden_size * num_images)'], {}), '(config.hidden_size * num_images, config.hidden_size * num_images)\n', (53394, 53460), False, 'from torch import nn\n'), ((53474, 53519), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['(config.hidden_size * num_images)'], {}), '(config.hidden_size * num_images)\n', (53486, 53519), False, 'from torch import nn\n'), ((53533, 53542), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (53540, 53542), False, 'from torch import nn\n'), ((53556, 53617), 'torch.nn.Linear', 'nn.Linear', (['(config.hidden_size * num_images)', 'config.num_labels'], {}), '(config.hidden_size * num_images, config.num_labels)\n', (53565, 53617), False, 'from torch import nn\n'), ((57648, 57666), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ([], {}), '()\n', (57664, 57666), False, 'from torch.nn import CrossEntropyLoss\n'), ((4881, 4945), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['x_mask'], {'size': '(x.shape[2], x.shape[3])'}), '(x_mask, size=(x.shape[2], x.shape[3]))\n', (4906, 4945), False, 'from torch import nn\n'), ((5911, 5941), 'torch.arange', 'torch.arange', (['x_mask.shape[-2]'], {}), '(x_mask.shape[-2])\n', (5923, 5941), False, 'import torch\n'), ((5943, 5973), 'torch.arange', 'torch.arange', (['x_mask.shape[-1]'], {}), '(x_mask.shape[-1])\n', (5955, 5973), False, 'import torch\n'), ((9860, 9937), 'torch.zeros_like', 'torch.zeros_like', (['attention_mask'], {'dtype': 'torch.long', 'device': 'text_embeds.device'}), '(attention_mask, dtype=torch.long, device=text_embeds.device)\n', (9876, 9937), False, 'import torch\n'), ((10026, 10126), 'torch.full_like', 'torch.full_like', (['image_masks', 'image_token_type_idx'], {'dtype': 'torch.long', 'device': 'text_embeds.device'}), '(image_masks, image_token_type_idx, dtype=torch.long, device\n =text_embeds.device)\n', (10041, 10126), False, 'import torch\n'), ((12705, 12780), 'torch.zeros', 'torch.zeros', (['input_shape'], {'dtype': 'torch.long', 'device': 'self.position_ids.device'}), '(input_shape, dtype=torch.long, device=self.position_ids.device)\n', (12716, 12780), False, 'import torch\n'), ((48979, 49041), 'torch.nn.functional.binary_cross_entropy_with_logits', 'nn.functional.binary_cross_entropy_with_logits', (['logits', 'labels'], {}), '(logits, labels)\n', (49025, 49041), False, 'from torch import nn\n'), ((5386, 5478), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['spatial_pos'], {'size': '(h, w)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(spatial_pos, size=(h, w), mode='bilinear',\n align_corners=True)\n", (5411, 5478), False, 'from torch import nn\n'), ((7901, 7971), 'torch.cat', 'torch.cat', (['[valid_row_idx[i], non_valid_row_idx[i][pad_choice]]'], {'dim': '(0)'}), '([valid_row_idx[i], non_valid_row_idx[i][pad_choice]], dim=0)\n', (7910, 7971), False, 'import torch\n'), ((11350, 11394), 'torch.arange', 'torch.arange', (['config.max_position_embeddings'], {}), '(config.max_position_embeddings)\n', (11362, 11394), False, 'import torch\n'), ((8684, 8714), 'torch.ones', 'torch.ones', (['x_mask.shape[0]', '(1)'], {}), '(x_mask.shape[0], 1)\n', (8694, 8714), False, 'import torch\n'), ((7658, 7671), 'torch.ones', 'torch.ones', (['v'], {}), '(v)\n', (7668, 7671), False, 'import torch\n'), ((7826, 7840), 'torch.ones', 'torch.ones', (['nv'], {}), '(nv)\n', (7836, 7840), False, 'import torch\n')]
|
"""
This module implements the Resource classes that translate JSON from Jira REST resources
into usable objects.
"""
import json
import logging
import re
import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast
from requests import Response
from requests.structures import CaseInsensitiveDict
from jira.resilientsession import ResilientSession
from jira.utils import json_loads, threaded_requests
if TYPE_CHECKING:
from jira.client import JIRA
AnyLike = Any
else:
class AnyLike:
"""Dummy subclass of base object class for when type checker is not running."""
pass
__all__ = (
"Resource",
"Issue",
"Comment",
"Project",
"Attachment",
"Component",
"Dashboard",
"Filter",
"Votes",
"PermissionScheme",
"Watchers",
"Worklog",
"IssueLink",
"IssueLinkType",
"IssueType",
"Priority",
"Version",
"Role",
"Resolution",
"SecurityLevel",
"Status",
"User",
"Group",
"CustomFieldOption",
"RemoteLink",
"Customer",
"ServiceDesk",
"RequestType",
"resource_class_map",
)
logging.getLogger("jira").addHandler(logging.NullHandler())
def get_error_list(r: Response) -> List[str]:
error_list = []
if r.status_code >= 400:
if r.status_code == 403 and "x-authentication-denied-reason" in r.headers:
error_list = [r.headers["x-authentication-denied-reason"]]
elif r.text:
try:
response: Dict[str, Any] = json_loads(r)
if "message" in response:
# Jira 5.1 errors
error_list = [response["message"]]
elif "errorMessages" in response and len(response["errorMessages"]) > 0:
# Jira 5.0.x error messages sometimes come wrapped in this array
# Sometimes this is present but empty
errorMessages = response["errorMessages"]
if isinstance(errorMessages, (list, tuple)):
error_list = list(errorMessages)
else:
error_list = [errorMessages]
elif "errors" in response and len(response["errors"]) > 0:
# Jira 6.x error messages are found in this array.
error_list = response["errors"].values()
else:
error_list = [r.text]
except ValueError:
error_list = [r.text]
return error_list
class Resource:
"""Models a URL-addressable resource in the Jira REST API.
All Resource objects provide the following:
``find()`` -- get a resource from the server and load it into the current object
(though clients should use the methods in the JIRA class instead of this method directly)
``update()`` -- changes the value of this resource on the server and returns a new resource object for it
``delete()`` -- deletes this resource from the server
``self`` -- the URL of this resource on the server
``raw`` -- dict of properties parsed out of the JSON response from the server
Subclasses will implement ``update()`` and ``delete()`` as appropriate for the specific resource.
All Resources have a resource path of the form:
* ``issue``
* ``project/{0}``
* ``issue/{0}/votes``
* ``issue/{0}/comment/{1}``
where the bracketed numerals are placeholders for ID values that are filled in from the
``ids`` parameter to ``find()``.
"""
JIRA_BASE_URL = "{server}/rest/{rest_path}/{rest_api_version}/{path}"
# A prioritized list of the keys in self.raw most likely to contain a human
# readable name or identifier, or that offer other key information.
_READABLE_IDS = (
"displayName",
"key",
"name",
"accountId",
"filename",
"value",
"scope",
"votes",
"id",
"mimeType",
"closed",
)
# A list of properties that should uniquely identify a Resource object
# Each of these properties should be hashable, usually strings
_HASH_IDS = (
"self",
"type",
"key",
"id",
"name",
)
def __init__(
self,
resource: str,
options: Dict[str, Any],
session: ResilientSession,
base_url: str = JIRA_BASE_URL,
):
"""Initializes a generic resource.
Args:
resource (str): The name of the resource.
options (Dict[str,str]): Options for the new resource
session (ResilientSession): Session used for the resource.
base_url (Optional[str]): The Base Jira url.
"""
self._resource = resource
self._options = options
self._session = session
self._base_url = base_url
# Explicitly define as None so we know when a resource has actually
# been loaded
self.raw: Optional[Dict[str, Any]] = None
def __str__(self) -> str:
"""Return the first value we find that is likely to be human readable.
Returns:
str
"""
if self.raw:
for name in self._READABLE_IDS:
if name in self.raw:
pretty_name = str(self.raw[name])
# Include any child to support nested select fields.
if hasattr(self, "child"):
pretty_name += " - " + str(self.child)
return pretty_name
# If all else fails, use repr to make sure we get something.
return repr(self)
def __repr__(self) -> str:
"""Identify the class and include any and all relevant values.
Returns:
str
"""
names: List[str] = []
if self.raw:
for name in self._READABLE_IDS:
if name in self.raw:
names.append(name + "=" + repr(self.raw[name]))
if not names:
return f"<JIRA {self.__class__.__name__} at {id(self)}>"
return f"<JIRA {self.__class__.__name__}: {', '.join(names)}>"
def __getattr__(self, item: str) -> Any:
"""Allow access of attributes via names.
Args:
item (str): Attribute Name
Raises:
AttributeError: When attribute does not exist.
Returns:
Any: Attribute value.
"""
try:
return self[item] # type: ignore
except Exception as e:
if hasattr(self, "raw") and self.raw is not None and item in self.raw:
return self.raw[item]
else:
raise AttributeError(
f"{self.__class__!r} object has no attribute {item!r} ({e})"
)
def __getstate__(self) -> Dict[str, Any]:
"""Pickling the resource."""
return vars(self)
def __setstate__(self, raw_pickled: Dict[str, Any]):
"""Unpickling of the resource"""
# https://stackoverflow.com/a/50888571/7724187
vars(self).update(raw_pickled)
def __hash__(self) -> int:
"""Hash calculation.
We try to find unique identifier like properties
to form our hash object.
Technically 'self', if present, is the unique URL to the object,
and should be sufficient to generate a unique hash.
"""
hash_list = []
for a in self._HASH_IDS:
if hasattr(self, a):
hash_list.append(getattr(self, a))
if hash_list:
return hash(tuple(hash_list))
else:
raise TypeError(f"'{self.__class__}' is not hashable")
def __eq__(self, other: Any) -> bool:
"""Default equality test.
Checks the types look about right and that the relevant
attributes that uniquely identify a resource are equal.
"""
return isinstance(other, self.__class__) and all(
[
getattr(self, a) == getattr(other, a)
for a in self._HASH_IDS
if hasattr(self, a)
]
)
def find(
self,
id: Union[Tuple[str, str], int, str],
params: Optional[Dict[str, str]] = None,
):
"""Finds a resource based on the input parameters.
Args:
id (Union[Tuple[str, str], int, str]): id
params (Optional[Dict[str, str]]): params
"""
if params is None:
params = {}
if isinstance(id, tuple):
path = self._resource.format(*id)
else:
path = self._resource.format(id)
url = self._get_url(path)
self._load(url, params=params)
def _get_url(self, path: str) -> str:
"""Gets the url for the specified path.
Args:
path (str): str
Returns:
str
"""
options = self._options.copy()
options.update({"path": path})
return self._base_url.format(**options)
def update(
self,
fields: Optional[Dict[str, Any]] = None,
async_: Optional[bool] = None,
jira: "JIRA" = None,
notify: bool = True,
**kwargs: Any,
):
"""Update this resource on the server.
Keyword arguments are marshalled into a dict before being sent. If this
resource doesn't support ``PUT``, a :py:exc:`.JIRAError` will be raised; subclasses that specialize this method
will only raise errors in case of user error.
Args:
fields (Optional[Dict[str, Any]]): Fields which should be updated for the object.
async_ (bool): If true the request will be added to the queue so it can be executed later using async_run()
jira (jira.client.JIRA): Instance of Jira Client
notify (bool): Whether or not to notify users about the update. (Default: True)
kwargs (Any): extra arguments to the PUT request.
"""
if async_ is None:
async_: bool = self._options["async"] # type: ignore # redefinition
data = {}
if fields is not None:
data.update(fields)
data.update(kwargs)
if not notify:
querystring = "?notifyUsers=false"
else:
querystring = ""
r = self._session.put(self.self + querystring, data=json.dumps(data))
if "autofix" in self._options and r.status_code == 400:
user = None
error_list = get_error_list(r)
logging.error(error_list)
if "The reporter specified is not a user." in error_list:
if "reporter" not in data["fields"]:
logging.warning(
"autofix: setting reporter to '%s' and retrying the update."
% self._options["autofix"]
)
data["fields"]["reporter"] = {"name": self._options["autofix"]}
if "Issues must be assigned." in error_list:
if "assignee" not in data["fields"]:
logging.warning(
"autofix: setting assignee to '%s' for %s and retrying the update."
% (self._options["autofix"], self.key)
)
data["fields"]["assignee"] = {"name": self._options["autofix"]}
# for some reason the above approach fails on Jira 5.2.11
# so we need to change the assignee before
if (
"Issue type is a sub-task but parent issue key or id not specified."
in error_list
):
logging.warning(
"autofix: trying to fix sub-task without parent by converting to it to bug"
)
data["fields"]["issuetype"] = {"name": "Bug"}
if (
"The summary is invalid because it contains newline characters."
in error_list
):
logging.warning("autofix: trying to fix newline in summary")
data["fields"]["summary"] = self.fields.summary.replace("/n", "")
for error in error_list:
if re.search(
r"^User '(.*)' was not found in the system\.", error, re.U
):
m = re.search(
r"^User '(.*)' was not found in the system\.", error, re.U
)
if m:
user = m.groups()[0]
else:
raise NotImplementedError()
if re.search(r"^User '(.*)' does not exist\.", error):
m = re.search(r"^User '(.*)' does not exist\.", error)
if m:
user = m.groups()[0]
else:
raise NotImplementedError()
if user and jira:
logging.warning(
"Trying to add missing orphan user '%s' in order to complete the previous failed operation."
% user
)
jira.add_user(user, "<EMAIL>", 10100, active=False)
# if 'assignee' not in data['fields']:
# logging.warning("autofix: setting assignee to '%s' and retrying the update." % self._options['autofix'])
# data['fields']['assignee'] = {'name': self._options['autofix']}
# EXPERIMENTAL --->
if async_: # FIXME: no async
if not hasattr(self._session, "_async_jobs"):
self._session._async_jobs = set() # type: ignore
self._session._async_jobs.add( # type: ignore
threaded_requests.put( # type: ignore
self.self, data=json.dumps(data)
)
)
else:
r = self._session.put(self.self, data=json.dumps(data))
time.sleep(self._options["delay_reload"])
self._load(self.self)
def delete(self, params: Optional[Dict[str, Any]] = None) -> Optional[Response]:
"""Delete this resource from the server, passing the specified query parameters.
If this resource doesn't support ``DELETE``, a :py:exc:`.JIRAError`
will be raised; subclasses that specialize this method will only raise errors
in case of user error.
Args:
params: Parameters for the delete request.
Returns:
Optional[Response]: Returns None if async
"""
if self._options["async"]:
# FIXME: mypy doesn't think this should work
if not hasattr(self._session, "_async_jobs"):
self._session._async_jobs = set() # type: ignore
self._session._async_jobs.add( # type: ignore
threaded_requests.delete(url=self.self, params=params) # type: ignore
)
return None
else:
return self._session.delete(url=self.self, params=params)
def _load(
self,
url: str,
headers=CaseInsensitiveDict(),
params: Optional[Dict[str, str]] = None,
path: Optional[str] = None,
):
"""Load a resource.
Args:
url (str): url
headers (Optional[CaseInsensitiveDict]): headers. Defaults to CaseInsensitiveDict().
params (Optional[Dict[str,str]]): params to get request. Defaults to None.
path (Optional[str]): field to get. Defaults to None.
Raises:
ValueError: If json cannot be loaded
"""
r = self._session.get(url, headers=headers, params=params)
try:
j = json_loads(r)
except ValueError as e:
logging.error(f"{e}:\n{r.text}")
raise e
if path:
j = j[path]
self._parse_raw(j)
def _parse_raw(self, raw: Dict[str, Any]):
"""Parse a raw dictionary to create a resource.
Args:
raw (Dict[str, Any])
"""
self.raw = raw
if not raw:
raise NotImplementedError(f"We cannot instantiate empty resources: {raw}")
dict2resource(raw, self, self._options, self._session)
def _default_headers(self, user_headers):
# result = dict(user_headers)
# result['accept'] = 'application/json'
return CaseInsensitiveDict(
self._options["headers"].items() + user_headers.items()
)
class Attachment(Resource):
"""An issue attachment."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "attachment/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def get(self):
"""Return the file content as a string."""
r = self._session.get(self.content, headers={"Accept": "*/*"})
return r.content
def iter_content(self, chunk_size=1024):
"""Return the file content as an iterable stream."""
r = self._session.get(self.content, stream=True)
return r.iter_content(chunk_size)
class Component(Resource):
"""A project component."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "component/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def delete(self, moveIssuesTo: Optional[str] = None): # type: ignore[override]
"""Delete this component from the server.
Args:
moveIssuesTo: the name of the component to which to move any issues this component is applied
"""
params = {}
if moveIssuesTo is not None:
params["moveIssuesTo"] = moveIssuesTo
super().delete(params)
class CustomFieldOption(Resource):
"""An existing option for a custom issue field."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "customFieldOption/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Dashboard(Resource):
"""A Jira dashboard."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "dashboard/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Filter(Resource):
"""An issue navigator filter."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "filter/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Issue(Resource):
"""A Jira issue."""
class _IssueFields(AnyLike):
class _Comment:
def __init__(self) -> None:
self.comments: List[Comment] = []
class _Worklog:
def __init__(self) -> None:
self.worklogs: List[Worklog] = []
def __init__(self):
self.assignee: Optional[UnknownResource] = None
self.attachment: List[Attachment] = []
self.comment = self._Comment()
self.created: str
self.description: Optional[str] = None
self.duedate: Optional[str] = None
self.issuelinks: List[IssueLink] = []
self.issuetype: IssueType
self.labels: List[str] = []
self.priority: Priority
self.project: Project
self.reporter: UnknownResource
self.resolution: Optional[Resolution] = None
self.security: Optional[SecurityLevel] = None
self.status: Status
self.statuscategorychangedate: Optional[str] = None
self.summary: str
self.timetracking: TimeTracking
self.versions: List[Version] = []
self.votes: Votes
self.watchers: Watchers
self.worklog = self._Worklog()
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}", options, session)
self.fields: Issue._IssueFields
self.id: str
self.key: str
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def update( # type: ignore[override] # incompatible supertype ignored
self,
fields: Dict[str, Any] = None,
update: Dict[str, Any] = None,
async_: bool = None,
jira: "JIRA" = None,
notify: bool = True,
**fieldargs,
):
"""Update this issue on the server.
Each keyword argument (other than the predefined ones) is treated as a field name and the argument's value
is treated as the intended value for that field -- if the fields argument is used, all other keyword arguments
will be ignored.
Jira projects may contain many different issue types. Some issue screens have different requirements for
fields in an issue. This information is available through the :py:meth:`.JIRA.editmeta` method. Further examples
are available here: https://developer.atlassian.com/display/JIRADEV/JIRA+REST+API+Example+-+Edit+issues
Args:
fields (Dict[str,Any]): a dict containing field names and the values to use
update (Dict[str,Any]): a dict containing update operations to apply
notify (bool): query parameter notifyUsers. If true send the email with notification that the issue was updated
to users that watch it. Admin or project admin permissions are required to disable the notification.
jira (Optional[jira.client.JIRA]): JIRA instance.
fieldargs (dict): keyword arguments will generally be merged into fields, except lists,
which will be merged into updates
"""
data = {}
if fields is not None:
fields_dict = fields
else:
fields_dict = {}
data["fields"] = fields_dict
if update is not None:
update_dict = update
else:
update_dict = {}
data["update"] = update_dict
for field in sorted(fieldargs.keys()):
value = fieldargs[field]
# apply some heuristics to make certain changes easier
if isinstance(value, str):
if field == "assignee" or field == "reporter":
fields_dict[field] = {"name": value}
elif field == "comment":
if "comment" not in update_dict:
update_dict["comment"] = []
update_dict["comment"].append({"add": {"body": value}})
else:
fields_dict[field] = value
elif isinstance(value, list):
if field not in update_dict:
update_dict[field] = []
update_dict[field].extend(value)
else:
fields_dict[field] = value
super().update(async_=async_, jira=jira, notify=notify, fields=data)
def add_field_value(self, field: str, value: str):
"""Add a value to a field that supports multiple values, without resetting the existing values.
This should work with: labels, multiple checkbox lists, multiple select
Args:
field (str): The field name
value (str): The field's value
"""
super().update(fields={"update": {field: [{"add": value}]}})
def delete(self, deleteSubtasks=False):
"""Delete this issue from the server.
Args:
deleteSubtasks (bool): if the issue has subtasks, this argument must be set to true for the call to succeed.
"""
super().delete(params={"deleteSubtasks": deleteSubtasks})
def permalink(self):
"""Get the URL of the issue, the browsable one not the REST one.
Returns:
str: URL of the issue
"""
return f"{self._options['server']}/browse/{self.key}"
class Comment(Resource):
"""An issue comment."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/comment/{1}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def update(self, fields=None, async_=None, jira=None, body="", visibility=None):
"""Update a comment"""
data = {}
if body:
data["body"] = body
if visibility:
data["visibility"] = visibility
super().update(data)
class RemoteLink(Resource):
"""A link to a remote application from an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/remotelink/{1}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def update(self, object, globalId=None, application=None, relationship=None):
"""Update a RemoteLink. 'object' is required.
For definitions of the allowable fields for 'object' and the keyword arguments 'globalId', 'application' and
'relationship', see https://developer.atlassian.com/display/JIRADEV/JIRA+REST+API+for+Remote+Issue+Links.
Args:
object: the link details to add (see the above link for details)
globalId: unique ID for the link (see the above link for details)
application: application information for the link (see the above link for details)
relationship: relationship description for the link (see the above link for details)
"""
data = {"object": object}
if globalId is not None:
data["globalId"] = globalId
if application is not None:
data["application"] = application
if relationship is not None:
data["relationship"] = relationship
super().update(**data)
class Votes(Resource):
"""Vote information on an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/votes", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class PermissionScheme(Resource):
"""Permissionscheme information on an project."""
def __init__(self, options, session, raw=None):
Resource.__init__(
self, "project/{0}/permissionscheme?expand=user", options, session
)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Watchers(Resource):
"""Watcher information on an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/watchers", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def delete(self, username):
"""Remove the specified user from the watchers list."""
super().delete(params={"username": username})
class TimeTracking(Resource):
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/worklog/{1}", options, session)
self.remainingEstimate = None
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Worklog(Resource):
"""Worklog on an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issue/{0}/worklog/{1}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def delete( # type: ignore[override]
self, adjustEstimate: Optional[str] = None, newEstimate=None, increaseBy=None
):
"""Delete this worklog entry from its associated issue.
Args:
adjustEstimate: one of ``new``, ``leave``, ``manual`` or ``auto``.
``auto`` is the default and adjusts the estimate automatically.
``leave`` leaves the estimate unchanged by this deletion.
newEstimate: combined with ``adjustEstimate=new``, set the estimate to this value
increaseBy: combined with ``adjustEstimate=manual``, increase the remaining estimate by this amount
"""
params = {}
if adjustEstimate is not None:
params["adjustEstimate"] = adjustEstimate
if newEstimate is not None:
params["newEstimate"] = newEstimate
if increaseBy is not None:
params["increaseBy"] = increaseBy
super().delete(params)
class IssueLink(Resource):
"""Link between two issues."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issueLink/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class IssueLinkType(Resource):
"""Type of link between two issues."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issueLinkType/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class IssueType(Resource):
"""Type of an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "issuetype/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Priority(Resource):
"""Priority that can be set on an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "priority/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Project(Resource):
"""A Jira project."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "project/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Role(Resource):
"""A role inside a project."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "project/{0}/role/{1}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def update( # type: ignore[override]
self,
users: Union[str, List, Tuple] = None,
groups: Union[str, List, Tuple] = None,
):
"""Add the specified users or groups to this project role. One of ``users`` or ``groups`` must be specified.
Args:
users (Optional[Union[str,List,Tuple]]): a user or users to add to the role
groups (Optional[Union[str,List,Tuple]]): a group or groups to add to the role
"""
if users is not None and isinstance(users, str):
users = (users,)
if groups is not None and isinstance(groups, str):
groups = (groups,)
data = {
"id": self.id,
"categorisedActors": {
"atlassian-user-role-actor": users,
"atlassian-group-role-actor": groups,
},
}
super().update(**data)
def add_user(
self,
users: Union[str, List, Tuple] = None,
groups: Union[str, List, Tuple] = None,
):
"""Add the specified users or groups to this project role.
One of ``users`` or ``groups`` must be specified.
Args:
users (Optional[Union[str,List,Tuple]]): a user or users to add to the role
groups (Optional[Union[str,List,Tuple]]): a group or groups to add to the role
"""
if users is not None and isinstance(users, str):
users = (users,)
if groups is not None and isinstance(groups, str):
groups = (groups,)
data = {"user": users, "group": groups}
self._session.post(self.self, data=json.dumps(data))
class Resolution(Resource):
"""A resolution for an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "resolution/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class SecurityLevel(Resource):
"""A security level for an issue or project."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "securitylevel/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Status(Resource):
"""Status for an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "status/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class StatusCategory(Resource):
"""StatusCategory for an issue."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "statuscategory/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class User(Resource):
"""A Jira user."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
*,
_query_param: str = "username",
):
# Handle self-hosted Jira and Jira Cloud differently
if raw and "accountId" in raw["self"]:
_query_param = "accountId"
Resource.__init__(self, f"user?{_query_param}" + "={0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Group(Resource):
"""A Jira user group."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "group?groupname={0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Version(Resource):
"""A version of a project."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "version/{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def delete(self, moveFixIssuesTo=None, moveAffectedIssuesTo=None):
"""
Delete this project version from the server.
If neither of the arguments are specified, the version is removed from all
issues it is attached to.
Args:
moveFixIssuesTo: in issues for which this version is a fix
version, add this argument version to the fix version list
moveAffectedIssuesTo: in issues for which this version is an
affected version, add this argument version to the affected version list
"""
params = {}
if moveFixIssuesTo is not None:
params["moveFixIssuesTo"] = moveFixIssuesTo
if moveAffectedIssuesTo is not None:
params["moveAffectedIssuesTo"] = moveAffectedIssuesTo
return super().delete(params)
def update(self, **kwargs):
"""
Update this project version from the server. It is prior used to archive versions.
Refer to Atlassian REST API `documentation`_.
.. _documentation: https://developer.atlassian.com/cloud/jira/platform/rest/v2/api-group-project-versions/#api-rest-api-2-version-id-put
:Example:
.. code-block:: python
>> version_id = "10543"
>> version = JIRA("https://atlassian.org").version(version_id)
>> print(version.name)
"some_version_name"
>> version.update(name="another_name")
>> print(version.name)
"another_name"
>> version.update(archived=True)
>> print(version.archived)
True
"""
data = {}
for field in kwargs:
data[field] = kwargs[field]
super().update(**data)
# GreenHopper
class GreenHopperResource(Resource):
"""A generic GreenHopper resource."""
AGILE_BASE_URL = "{server}/rest/{agile_rest_path}/{agile_rest_api_version}/{path}"
GREENHOPPER_REST_PATH = "greenhopper"
""" Old, private API. Deprecated and will be removed from Jira on the 1st February 2016. """
AGILE_EXPERIMENTAL_REST_PATH = "greenhopper/experimental-api"
""" Experimental API available in Jira Agile 6.7.3 - 6.7.6, basically the same as Public API """
AGILE_BASE_REST_PATH = "agile"
""" Public API introduced in Jira Agile 6.7.7. """
def __init__(
self,
path: str,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
self.self = None
Resource.__init__(self, path, options, session, self.AGILE_BASE_URL)
if raw:
self._parse_raw(raw)
# Old GreenHopper API did not contain self - create it for backward compatibility.
if not self.self:
self.self = self._get_url(path.format(raw["id"]))
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class Sprint(GreenHopperResource):
"""A GreenHopper sprint."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
GreenHopperResource.__init__(self, "sprint/{0}", options, session, raw)
def find(self, id, params=None):
if (
self._options["agile_rest_path"]
!= GreenHopperResource.GREENHOPPER_REST_PATH
):
Resource.find(self, id, params)
else:
# Old, private GreenHopper API had non-standard way of loading Sprint
url = self._get_url(f"sprint/{id}/edit/model")
self._load(url, params=params, path="sprint")
class Board(GreenHopperResource):
"""A GreenHopper board."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
path = (
"rapidview/{0}"
if options["agile_rest_path"] == self.GREENHOPPER_REST_PATH
else "board/{id}"
)
GreenHopperResource.__init__(self, path, options, session, raw)
def delete(self, params=None):
if (
self._options["agile_rest_path"]
!= GreenHopperResource.GREENHOPPER_REST_PATH
):
raise NotImplementedError(
"Jira Agile Public API does not support Board removal"
)
Resource.delete(self, params)
class BoardConfiguration(GreenHopperResource):
"""Configuration for a Greenhopper board."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
if options["agile_rest_path"] == self.GREENHOPPER_REST_PATH:
raise NotImplementedError(
"Jira private API does not support Board Configuration"
)
path = "board/{0}/configuration"
GreenHopperResource.__init__(self, path, options, session, raw)
def find(self, id, params=None):
if (
self._options["agile_rest_path"]
== GreenHopperResource.GREENHOPPER_REST_PATH
):
raise NotImplementedError(
"Jira private API does not support Board Configuration"
)
Resource.find(self, id, params)
# Service Desk
class Customer(Resource):
"""A Service Desk customer."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(
self, "customer", options, session, "{server}/rest/servicedeskapi/{path}"
)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class ServiceDesk(Resource):
"""A Service Desk."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(
self,
"servicedesk/{0}",
options,
session,
"{server}/rest/servicedeskapi/{path}",
)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
class RequestType(Resource):
"""A Service Desk Request Type."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(
self,
"servicedesk/{0}/requesttype",
options,
session,
"{server}/rest/servicedeskapi/{path}",
)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
# Utilities
def dict2resource(
raw: Dict[str, Any], top=None, options=None, session=None
) -> Union["PropertyHolder", Type[Resource]]:
"""Convert a dictionary into a Jira Resource object.
Recursively walks a dict structure, transforming the properties into attributes
on a new ``Resource`` object of the appropriate type (if a ``self`` link is present)
or a ``PropertyHolder`` object (if no ``self`` link is present).
"""
if top is None:
top = PropertyHolder(raw)
seqs = tuple, list, set, frozenset
for i, j in raw.items():
if isinstance(j, dict):
if "self" in j:
# to try and help mypy know that cls_for_resource can never be 'Resource'
resource_class = cast(Type[Resource], cls_for_resource(j["self"]))
resource = cast(
Type[Resource],
resource_class( # type: ignore
options=options, session=session, raw=j # type: ignore
),
)
setattr(top, i, resource)
elif i == "timetracking":
setattr(top, "timetracking", TimeTracking(options, session, j))
else:
setattr(top, i, dict2resource(j, options=options, session=session))
elif isinstance(j, seqs):
j = cast(List[Dict[str, Any]], j) # help mypy
seq_list: List[Any] = []
for seq_elem in j:
if isinstance(seq_elem, dict):
if "self" in seq_elem:
# to try and help mypy know that cls_for_resource can never be 'Resource'
resource_class = cast(
Type[Resource], cls_for_resource(seq_elem["self"])
)
resource = cast(
Type[Resource],
resource_class( # type: ignore
options=options,
session=session,
raw=seq_elem, # type: ignore
),
)
seq_list.append(resource)
else:
seq_list.append(
dict2resource(seq_elem, options=options, session=session)
)
else:
seq_list.append(seq_elem)
setattr(top, i, seq_list)
else:
setattr(top, i, j)
return top
resource_class_map: Dict[str, Type[Resource]] = {
# Jira-specific resources
r"attachment/[^/]+$": Attachment,
r"component/[^/]+$": Component,
r"customFieldOption/[^/]+$": CustomFieldOption,
r"dashboard/[^/]+$": Dashboard,
r"filter/[^/]$": Filter,
r"issue/[^/]+$": Issue,
r"issue/[^/]+/comment/[^/]+$": Comment,
r"issue/[^/]+/votes$": Votes,
r"issue/[^/]+/watchers$": Watchers,
r"issue/[^/]+/worklog/[^/]+$": Worklog,
r"issueLink/[^/]+$": IssueLink,
r"issueLinkType/[^/]+$": IssueLinkType,
r"issuetype/[^/]+$": IssueType,
r"priority/[^/]+$": Priority,
r"project/[^/]+$": Project,
r"project/[^/]+/role/[^/]+$": Role,
r"project/[^/]+/permissionscheme[^/]+$": PermissionScheme,
r"resolution/[^/]+$": Resolution,
r"securitylevel/[^/]+$": SecurityLevel,
r"status/[^/]+$": Status,
r"statuscategory/[^/]+$": StatusCategory,
r"user\?(username|key|accountId).+$": User,
r"group\?groupname.+$": Group,
r"version/[^/]+$": Version,
# GreenHopper specific resources
r"sprints/[^/]+$": Sprint,
r"views/[^/]+$": Board,
}
class UnknownResource(Resource):
"""A Resource from Jira that is not (yet) supported."""
def __init__(
self,
options: Dict[str, str],
session: ResilientSession,
raw: Dict[str, Any] = None,
):
Resource.__init__(self, "unknown{0}", options, session)
if raw:
self._parse_raw(raw)
self.raw: Dict[str, Any] = cast(Dict[str, Any], self.raw)
def cls_for_resource(resource_literal: str) -> Type[Resource]:
for resource in resource_class_map:
if re.search(resource, resource_literal):
return resource_class_map[resource]
else:
# Generic Resource cannot directly be used b/c of different constructor signature
return UnknownResource
class PropertyHolder:
def __init__(self, raw):
__bases__ = raw # noqa
|
[
"logging.error",
"typing.cast",
"logging.warning",
"json.dumps",
"time.sleep",
"jira.utils.json_loads",
"requests.structures.CaseInsensitiveDict",
"jira.utils.threaded_requests.delete",
"logging.NullHandler",
"re.search",
"logging.getLogger"
] |
[((1187, 1208), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (1206, 1208), False, 'import logging\n'), ((1150, 1175), 'logging.getLogger', 'logging.getLogger', (['"""jira"""'], {}), "('jira')\n", (1167, 1175), False, 'import logging\n'), ((14029, 14070), 'time.sleep', 'time.sleep', (["self._options['delay_reload']"], {}), "(self._options['delay_reload'])\n", (14039, 14070), False, 'import time\n'), ((15172, 15193), 'requests.structures.CaseInsensitiveDict', 'CaseInsensitiveDict', ([], {}), '()\n', (15191, 15193), False, 'from requests.structures import CaseInsensitiveDict\n'), ((16921, 16951), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (16925, 16951), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((17680, 17710), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (17684, 17710), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((18513, 18543), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (18517, 18543), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((18896, 18926), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (18900, 18926), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((19282, 19312), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (19286, 19312), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((20992, 21022), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (20996, 21022), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((25139, 25169), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (25143, 25169), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((25842, 25872), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (25846, 25872), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((27284, 27314), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (27288, 27314), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((27658, 27688), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (27662, 27688), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((28060, 28090), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (28064, 28090), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((28614, 28644), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (28618, 28644), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((29006, 29036), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (29010, 29036), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((30368, 30398), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (30372, 30398), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((30774, 30804), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (30778, 30804), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((31157, 31187), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (31161, 31187), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((31558, 31588), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (31562, 31588), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((31935, 31965), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (31939, 31965), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((32327, 32357), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (32331, 32357), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((34377, 34407), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (34381, 34407), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((34792, 34822), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (34796, 34822), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((35172, 35202), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (35176, 35202), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((35576, 35606), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (35580, 35606), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((36164, 36194), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (36168, 36194), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((36550, 36580), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (36554, 36580), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((36935, 36965), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (36939, 36965), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((39898, 39928), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (39902, 39928), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((42717, 42747), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (42721, 42747), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((43212, 43242), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (43216, 43242), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((43733, 43763), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (43737, 43763), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((47839, 47869), 'typing.cast', 'cast', (['Dict[str, Any]', 'self.raw'], {}), '(Dict[str, Any], self.raw)\n', (47843, 47869), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((47986, 48023), 're.search', 're.search', (['resource', 'resource_literal'], {}), '(resource, resource_literal)\n', (47995, 48023), False, 'import re\n'), ((10563, 10588), 'logging.error', 'logging.error', (['error_list'], {}), '(error_list)\n', (10576, 10588), False, 'import logging\n'), ((15781, 15794), 'jira.utils.json_loads', 'json_loads', (['r'], {}), '(r)\n', (15791, 15794), False, 'from jira.utils import json_loads, threaded_requests\n'), ((10402, 10418), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (10412, 10418), False, 'import json\n'), ((11705, 11807), 'logging.warning', 'logging.warning', (['"""autofix: trying to fix sub-task without parent by converting to it to bug"""'], {}), "(\n 'autofix: trying to fix sub-task without parent by converting to it to bug'\n )\n", (11720, 11807), False, 'import logging\n'), ((12057, 12117), 'logging.warning', 'logging.warning', (['"""autofix: trying to fix newline in summary"""'], {}), "('autofix: trying to fix newline in summary')\n", (12072, 12117), False, 'import logging\n'), ((12256, 12325), 're.search', 're.search', (['"""^User \'(.*)\' was not found in the system\\\\."""', 'error', 're.U'], {}), '("^User \'(.*)\' was not found in the system\\\\.", error, re.U)\n', (12265, 12325), False, 'import re\n'), ((12673, 12723), 're.search', 're.search', (['"""^User \'(.*)\' does not exist\\\\."""', 'error'], {}), '("^User \'(.*)\' does not exist\\\\.", error)\n', (12682, 12723), False, 'import re\n'), ((12996, 13122), 'logging.warning', 'logging.warning', (['("Trying to add missing orphan user \'%s\' in order to complete the previous failed operation."\n % user)'], {}), '(\n "Trying to add missing orphan user \'%s\' in order to complete the previous failed operation."\n % user)\n', (13011, 13122), False, 'import logging\n'), ((14915, 14969), 'jira.utils.threaded_requests.delete', 'threaded_requests.delete', ([], {'url': 'self.self', 'params': 'params'}), '(url=self.self, params=params)\n', (14939, 14969), False, 'from jira.utils import json_loads, threaded_requests\n'), ((15839, 15871), 'logging.error', 'logging.error', (['f"""{e}:\n{r.text}"""'], {}), "(f'{e}:\\n{r.text}')\n", (15852, 15871), False, 'import logging\n'), ((33996, 34012), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (34006, 34012), False, 'import json\n'), ((45125, 45154), 'typing.cast', 'cast', (['List[Dict[str, Any]]', 'j'], {}), '(List[Dict[str, Any]], j)\n', (45129, 45154), False, 'from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast\n'), ((1542, 1555), 'jira.utils.json_loads', 'json_loads', (['r'], {}), '(r)\n', (1552, 1555), False, 'from jira.utils import json_loads, threaded_requests\n'), ((10732, 10846), 'logging.warning', 'logging.warning', (['("autofix: setting reporter to \'%s\' and retrying the update." % self.\n _options[\'autofix\'])'], {}), '(\n "autofix: setting reporter to \'%s\' and retrying the update." % self.\n _options[\'autofix\'])\n', (10747, 10846), False, 'import logging\n'), ((11122, 11255), 'logging.warning', 'logging.warning', (['("autofix: setting assignee to \'%s\' for %s and retrying the update." % (\n self._options[\'autofix\'], self.key))'], {}), '(\n "autofix: setting assignee to \'%s\' for %s and retrying the update." % (\n self._options[\'autofix\'], self.key))\n', (11137, 11255), False, 'import logging\n'), ((12389, 12458), 're.search', 're.search', (['"""^User \'(.*)\' was not found in the system\\\\."""', 'error', 're.U'], {}), '("^User \'(.*)\' was not found in the system\\\\.", error, re.U)\n', (12398, 12458), False, 'import re\n'), ((12749, 12799), 're.search', 're.search', (['"""^User \'(.*)\' does not exist\\\\."""', 'error'], {}), '("^User \'(.*)\' does not exist\\\\.", error)\n', (12758, 12799), False, 'import re\n'), ((14002, 14018), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (14012, 14018), False, 'import json\n'), ((13873, 13889), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (13883, 13889), False, 'import json\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class GetRoleDefinitionResult:
"""
A collection of values returned by getRoleDefinition.
"""
def __init__(__self__, assignable_scopes=None, description=None, id=None, name=None, permissions=None, role_definition_id=None, scope=None, type=None):
if assignable_scopes and not isinstance(assignable_scopes, list):
raise TypeError("Expected argument 'assignable_scopes' to be a list")
__self__.assignable_scopes = assignable_scopes
"""
One or more assignable scopes for this Role Definition, such as `/subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333`, `/subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333/resourceGroups/myGroup`, or `/subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333/resourceGroups/myGroup/providers/Microsoft.Compute/virtualMachines/myVM`.
"""
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
__self__.description = description
"""
the Description of the built-in Role.
"""
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
__self__.id = id
"""
The provider-assigned unique ID for this managed resource.
"""
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
__self__.name = name
if permissions and not isinstance(permissions, list):
raise TypeError("Expected argument 'permissions' to be a list")
__self__.permissions = permissions
"""
a `permissions` block as documented below.
"""
if role_definition_id and not isinstance(role_definition_id, str):
raise TypeError("Expected argument 'role_definition_id' to be a str")
__self__.role_definition_id = role_definition_id
if scope and not isinstance(scope, str):
raise TypeError("Expected argument 'scope' to be a str")
__self__.scope = scope
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
__self__.type = type
"""
the Type of the Role.
"""
class AwaitableGetRoleDefinitionResult(GetRoleDefinitionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRoleDefinitionResult(
assignable_scopes=self.assignable_scopes,
description=self.description,
id=self.id,
name=self.name,
permissions=self.permissions,
role_definition_id=self.role_definition_id,
scope=self.scope,
type=self.type)
def get_role_definition(name=None,role_definition_id=None,scope=None,opts=None):
"""
Use this data source to access information about an existing Role Definition.
:param str name: Specifies the Name of either a built-in or custom Role Definition.
:param str role_definition_id: Specifies the ID of the Role Definition as a UUID/GUID.
:param str scope: Specifies the Scope at which the Custom Role Definition exists.
"""
__args__ = dict()
__args__['name'] = name
__args__['roleDefinitionId'] = role_definition_id
__args__['scope'] = scope
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure:authorization/getRoleDefinition:getRoleDefinition', __args__, opts=opts).value
return AwaitableGetRoleDefinitionResult(
assignable_scopes=__ret__.get('assignableScopes'),
description=__ret__.get('description'),
id=__ret__.get('id'),
name=__ret__.get('name'),
permissions=__ret__.get('permissions'),
role_definition_id=__ret__.get('roleDefinitionId'),
scope=__ret__.get('scope'),
type=__ret__.get('type'))
|
[
"pulumi.runtime.invoke",
"pulumi.InvokeOptions"
] |
[((3712, 3734), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (3732, 3734), False, 'import pulumi\n'), ((3825, 3931), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure:authorization/getRoleDefinition:getRoleDefinition"""', '__args__'], {'opts': 'opts'}), "('azure:authorization/getRoleDefinition:getRoleDefinition'\n , __args__, opts=opts)\n", (3846, 3931), False, 'import pulumi\n')]
|
import logging
from io import StringIO
from unittest import TestCase
from src.utils.logger import logger
def foo():
logger.info('It works!')
class TestLoggerUtils(TestCase):
def setUp(self) -> None:
self.stream = StringIO()
self.handler = logging.StreamHandler(self.stream)
for handler in logger.handlers:
logger.removeHandler(handler)
logger.addHandler(hdlr=self.handler)
def test_logger_output(self):
expected_output = 'It works!'
foo()
actual_output = self.stream.getvalue().strip()
self.assertEqual(expected_output, actual_output)
|
[
"io.StringIO",
"src.utils.logger.logger.info",
"src.utils.logger.logger.addHandler",
"logging.StreamHandler",
"src.utils.logger.logger.removeHandler"
] |
[((123, 147), 'src.utils.logger.logger.info', 'logger.info', (['"""It works!"""'], {}), "('It works!')\n", (134, 147), False, 'from src.utils.logger import logger\n'), ((234, 244), 'io.StringIO', 'StringIO', ([], {}), '()\n', (242, 244), False, 'from io import StringIO\n'), ((268, 302), 'logging.StreamHandler', 'logging.StreamHandler', (['self.stream'], {}), '(self.stream)\n', (289, 302), False, 'import logging\n'), ((393, 429), 'src.utils.logger.logger.addHandler', 'logger.addHandler', ([], {'hdlr': 'self.handler'}), '(hdlr=self.handler)\n', (410, 429), False, 'from src.utils.logger import logger\n'), ((355, 384), 'src.utils.logger.logger.removeHandler', 'logger.removeHandler', (['handler'], {}), '(handler)\n', (375, 384), False, 'from src.utils.logger import logger\n')]
|
import subprocess
import ujson as json
import numpy as np
import sys
import os
os.environ["MKL_SERVICE_FORCE_INTEL"] = "1"
runs=10
#Top k HAN, variant2; adjust train_per in helper.py
args = [
'python3',
'train.py',
'--problem-path',
'../../../LineGraphGCN/data/yelp/',
'--problem',
'yelp',
'--lr-init',
'1e-4',
'--weight-decay',
'5e-4',
'--dropout',
'0.5',
'--prep-class',
'linear',
'--n-train-samples',
'100,100',
'--n-val-samples',
'100,100',
'--prep-len',
'128',
'--in-edge-len',
'18',
'--n-head',
'8',
'--output-dims',
'128,128,32,32',
'--n-layer',
'1',
'--tolerance',
'30',
'--train-per',
'0.4',
'--batch-size',
'64',
'--val-batch-size',
'64',
'--K',
'2599',
'--concat-node',
'--optimizer',
'adam',
'--lr-schedule',
'const',
'--mpaggr-class',
'attention',
]
print(args)
test_acc = []
test_macro = []
for seed in range(runs):
process = subprocess.Popen(args+['--seed',str(seed)],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
text = process.communicate()[1]
lines = text.decode().split('\n')
# print(lines)
correct = False
for line in lines:
if '{' not in line:
continue
print(line)
line = json.loads(line)
if 'test_metric' in line:
correct = True
test_acc.append(line['test_metric']['accuracy'])
test_macro.append(line['test_metric']['macro'])
if not correct:
print(lines)
sys.stdout.flush()
test_acc = np.asarray(test_acc)
test_macro = np.asarray(test_macro)
print('average acc for {} runs is : {}'.format(len(test_acc), np.average(test_acc)))
print('average macro for {} runs is : {}'.format(len(test_macro), np.average(test_macro)))
|
[
"numpy.average",
"numpy.asarray",
"ujson.loads",
"sys.stdout.flush"
] |
[((1609, 1629), 'numpy.asarray', 'np.asarray', (['test_acc'], {}), '(test_acc)\n', (1619, 1629), True, 'import numpy as np\n'), ((1643, 1665), 'numpy.asarray', 'np.asarray', (['test_macro'], {}), '(test_macro)\n', (1653, 1665), True, 'import numpy as np\n'), ((1579, 1597), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1595, 1597), False, 'import sys\n'), ((1335, 1351), 'ujson.loads', 'json.loads', (['line'], {}), '(line)\n', (1345, 1351), True, 'import ujson as json\n'), ((1728, 1748), 'numpy.average', 'np.average', (['test_acc'], {}), '(test_acc)\n', (1738, 1748), True, 'import numpy as np\n'), ((1817, 1839), 'numpy.average', 'np.average', (['test_macro'], {}), '(test_macro)\n', (1827, 1839), True, 'import numpy as np\n')]
|
from copy import deepcopy
import time
import uuid
import newspaper
from civic_jabber_ingest.models.article import Article
import civic_jabber_ingest.utils.database as db
from civic_jabber_ingest.utils.logging import get_logger, tqdm
from civic_jabber_ingest.utils.config import read_config
from civic_jabber_ingest.utils.scrape import get_page
LOGGER = get_logger()
_connection = None
def _connect():
global _connection
if not _connection or _connection.closed > 0:
_connection = db.connect()
def load_news(states=None):
"""Scrapes newspaper articles from all of the sources listed in the newspaper config
file and loads the results into the database.
Parameters
----------
states : list
A list of states to include in the scraping job. If blank, the function will
scrape all states
"""
_connect()
LOGGER.info("Reading newspaper configuration file ...")
valid_papers = read_config("newspaper")
if states:
states = [states] if isinstance(states, str) else states
valid_papers = [
paper for paper in valid_papers if _check_state(states, paper["states"])
]
for metadata in tqdm(valid_papers):
paper = newspaper.build(metadata["url"])
paper_data = {
"source_id": metadata["id"],
"source_name": metadata["name"],
"source_brand": paper.brand,
"source_description": paper.description,
}
for paper_article in paper.articles:
try:
paper_article.build()
except (newspaper.ArticleException, ValueError):
continue
if not paper_article.summary:
continue
article_data = deepcopy(paper_data)
article_data.update(
{
"title": paper_article.title,
"body": paper_article.text,
"summary": paper_article.summary,
"keywords": paper_article.keywords,
"images": list(paper_article.images),
"url": paper_article.url,
}
)
article = Article.from_dict(article_data)
db.insert_obj(article, table="articles", connection=_connection)
def _check_state(enabled_states, paper_states):
"""Checks to see if the newspaper object contains a state that we want to scrape.
Parameters
----------
enabled_states : list
The states that we want to scrape
paper_states : list
The states that are applicable to the newspaper
Returns
-------
valid : bool
True if the paper meets the criteria for the scraping job
"""
enabled_states = set(enabled_states)
paper_states = set(paper_states)
return bool(enabled_states.intersection(paper_states))
USNPL_URL = "https://www.usnpl.com"
def find_sources(sleep_time=1):
"""Scrapes the US Newspaper Listing websites to find URLs for local newspapers in
all 50 US states.
Parameters
----------
sleep_time : int
The amount of time to sleep in between states
Returns
-------
sources : list
A list
"""
page = get_page(USNPL_URL)
states = page.find("div", class_="row desktop").find_all("a")
sources = list()
for state in tqdm(states):
state_code = state.get("href").split("=")[-1]
sources.extend(_state_sources(state_code))
time.sleep(sleep_time)
return sources
def _state_sources(state_code):
"""Extracts the newspaper listings from the USNPL state page.
Parameters
----------
state : str
The two letter state code for the state
Returns
-------
results : list
A list of sources for the state
"""
url = f"{USNPL_URL}/search/state?state={state_code}"
state_page = get_page(url)
result_table = state_page.find("table", class_="table table-sm")
result_rows = result_table.find_all("tr")
results = list()
for row in result_rows:
if not row.find_all("td", class_="w-50"):
continue
paper_name = row.find("td", class_="w-50").text
paper_url = row.find("td", class_="w-10").find("a").get("href")
results.append(
{
"id": uuid.uuid4().hex,
"name": paper_name,
"url": paper_url,
"states": [state_code.lower()],
}
)
return results
|
[
"copy.deepcopy",
"uuid.uuid4",
"civic_jabber_ingest.utils.logging.get_logger",
"newspaper.build",
"civic_jabber_ingest.utils.scrape.get_page",
"civic_jabber_ingest.utils.logging.tqdm",
"time.sleep",
"civic_jabber_ingest.utils.config.read_config",
"civic_jabber_ingest.models.article.Article.from_dict",
"civic_jabber_ingest.utils.database.insert_obj",
"civic_jabber_ingest.utils.database.connect"
] |
[((357, 369), 'civic_jabber_ingest.utils.logging.get_logger', 'get_logger', ([], {}), '()\n', (367, 369), False, 'from civic_jabber_ingest.utils.logging import get_logger, tqdm\n'), ((948, 972), 'civic_jabber_ingest.utils.config.read_config', 'read_config', (['"""newspaper"""'], {}), "('newspaper')\n", (959, 972), False, 'from civic_jabber_ingest.utils.config import read_config\n'), ((1194, 1212), 'civic_jabber_ingest.utils.logging.tqdm', 'tqdm', (['valid_papers'], {}), '(valid_papers)\n', (1198, 1212), False, 'from civic_jabber_ingest.utils.logging import get_logger, tqdm\n'), ((3239, 3258), 'civic_jabber_ingest.utils.scrape.get_page', 'get_page', (['USNPL_URL'], {}), '(USNPL_URL)\n', (3247, 3258), False, 'from civic_jabber_ingest.utils.scrape import get_page\n'), ((3364, 3376), 'civic_jabber_ingest.utils.logging.tqdm', 'tqdm', (['states'], {}), '(states)\n', (3368, 3376), False, 'from civic_jabber_ingest.utils.logging import get_logger, tqdm\n'), ((3894, 3907), 'civic_jabber_ingest.utils.scrape.get_page', 'get_page', (['url'], {}), '(url)\n', (3902, 3907), False, 'from civic_jabber_ingest.utils.scrape import get_page\n'), ((504, 516), 'civic_jabber_ingest.utils.database.connect', 'db.connect', ([], {}), '()\n', (514, 516), True, 'import civic_jabber_ingest.utils.database as db\n'), ((1230, 1262), 'newspaper.build', 'newspaper.build', (["metadata['url']"], {}), "(metadata['url'])\n", (1245, 1262), False, 'import newspaper\n'), ((3491, 3513), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (3501, 3513), False, 'import time\n'), ((1759, 1779), 'copy.deepcopy', 'deepcopy', (['paper_data'], {}), '(paper_data)\n', (1767, 1779), False, 'from copy import deepcopy\n'), ((2197, 2228), 'civic_jabber_ingest.models.article.Article.from_dict', 'Article.from_dict', (['article_data'], {}), '(article_data)\n', (2214, 2228), False, 'from civic_jabber_ingest.models.article import Article\n'), ((2241, 2305), 'civic_jabber_ingest.utils.database.insert_obj', 'db.insert_obj', (['article'], {'table': '"""articles"""', 'connection': '_connection'}), "(article, table='articles', connection=_connection)\n", (2254, 2305), True, 'import civic_jabber_ingest.utils.database as db\n'), ((4334, 4346), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4344, 4346), False, 'import uuid\n')]
|
# std
import logging
from datetime import datetime, timedelta
from typing import List
from threading import Thread
from time import sleep
# project
from . import HarvesterActivityConsumer, WalletAddedCoinConsumer, FinishedSignageConsumer
from .stat_accumulators.eligible_plots_stats import EligiblePlotsStats
from .stat_accumulators.wallet_added_coin_stats import WalletAddedCoinStats
from .stat_accumulators.search_time_stats import SearchTimeStats
from .stat_accumulators.signage_point_stats import SignagePointStats
from .stat_accumulators.found_proof_stats import FoundProofStats
from .stat_accumulators.number_plots_stats import NumberPlotsStats
from src.chia_log.parsers.wallet_added_coin_parser import WalletAddedCoinMessage
from src.chia_log.parsers.harvester_activity_parser import HarvesterActivityMessage
from src.chia_log.parsers.finished_signage_point_parser import FinishedSignagePointMessage
from src.notifier.notify_manager import NotifyManager
from src.notifier import Event, EventType, EventPriority, EventService
class StatsManager:
"""Manage all stat accumulators and trigger daily notification to the user
with a summary from all stats that have been collected for the past 24 hours.
"""
def __init__(self, config: dict, notify_manager: NotifyManager):
self._enable = config.get("enable", False)
self._time_of_day = config.get("time_of_day", 21)
self._frequency_hours = config.get("frequency_hours", 24)
if not self._enable:
logging.warning("Disabled stats and daily notifications")
return
logging.info("Enabled stats for daily notifications")
self._notify_manager = notify_manager
self._stat_accumulators = [
WalletAddedCoinStats(),
FoundProofStats(),
SearchTimeStats(),
NumberPlotsStats(),
EligiblePlotsStats(),
SignagePointStats(),
]
logging.info(
f"Summary notifications will be sent out every {self._frequency_hours} "
f"hours starting from {self._time_of_day} o'clock"
)
self._datetime_next_summary = datetime.now().replace(hour=self._time_of_day, minute=0, second=0, microsecond=0)
while datetime.now() > self._datetime_next_summary:
self._datetime_next_summary += timedelta(hours=self._frequency_hours)
# Start thread
self._is_running = True
self._thread = Thread(target=self._run_loop)
self._thread.start()
def consume_wallet_messages(self, objects: List[WalletAddedCoinMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, WalletAddedCoinConsumer):
for obj in objects:
stat_acc.consume(obj)
def consume_harvester_messages(self, objects: List[HarvesterActivityMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, HarvesterActivityConsumer):
for obj in objects:
stat_acc.consume(obj)
def consume_signage_point_messages(self, objects: List[FinishedSignagePointMessage]):
if not self._enable:
return
for stat_acc in self._stat_accumulators:
if isinstance(stat_acc, FinishedSignageConsumer):
for obj in objects:
stat_acc.consume(obj)
def _send_daily_notification(self):
summary = f"Hello farmer! 👋 Here's what happened in the last {self._frequency_hours} hours:\n"
for stat_acc in self._stat_accumulators:
summary += "\n" + stat_acc.get_summary()
stat_acc.reset()
self._notify_manager.process_events(
[Event(type=EventType.DAILY_STATS, priority=EventPriority.LOW, service=EventService.DAILY, message=summary)]
)
def _run_loop(self):
while self._is_running:
if datetime.now() > self._datetime_next_summary:
self._send_daily_notification()
self._datetime_next_summary += timedelta(hours=self._frequency_hours)
sleep(1)
def stop(self):
self._is_running = False
|
[
"threading.Thread",
"logging.warning",
"time.sleep",
"logging.info",
"src.notifier.Event",
"datetime.timedelta",
"datetime.datetime.now"
] |
[((1597, 1650), 'logging.info', 'logging.info', (['"""Enabled stats for daily notifications"""'], {}), "('Enabled stats for daily notifications')\n", (1609, 1650), False, 'import logging\n'), ((1949, 2092), 'logging.info', 'logging.info', (['f"""Summary notifications will be sent out every {self._frequency_hours} hours starting from {self._time_of_day} o\'clock"""'], {}), '(\n f"Summary notifications will be sent out every {self._frequency_hours} hours starting from {self._time_of_day} o\'clock"\n )\n', (1961, 2092), False, 'import logging\n'), ((2462, 2491), 'threading.Thread', 'Thread', ([], {'target': 'self._run_loop'}), '(target=self._run_loop)\n', (2468, 2491), False, 'from threading import Thread\n'), ((1511, 1568), 'logging.warning', 'logging.warning', (['"""Disabled stats and daily notifications"""'], {}), "('Disabled stats and daily notifications')\n", (1526, 1568), False, 'import logging\n'), ((2255, 2269), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2267, 2269), False, 'from datetime import datetime, timedelta\n'), ((2344, 2382), 'datetime.timedelta', 'timedelta', ([], {'hours': 'self._frequency_hours'}), '(hours=self._frequency_hours)\n', (2353, 2382), False, 'from datetime import datetime, timedelta\n'), ((4205, 4213), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4210, 4213), False, 'from time import sleep\n'), ((2159, 2173), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2171, 2173), False, 'from datetime import datetime, timedelta\n'), ((3822, 3933), 'src.notifier.Event', 'Event', ([], {'type': 'EventType.DAILY_STATS', 'priority': 'EventPriority.LOW', 'service': 'EventService.DAILY', 'message': 'summary'}), '(type=EventType.DAILY_STATS, priority=EventPriority.LOW, service=\n EventService.DAILY, message=summary)\n', (3827, 3933), False, 'from src.notifier import Event, EventType, EventPriority, EventService\n'), ((4013, 4027), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4025, 4027), False, 'from datetime import datetime, timedelta\n'), ((4154, 4192), 'datetime.timedelta', 'timedelta', ([], {'hours': 'self._frequency_hours'}), '(hours=self._frequency_hours)\n', (4163, 4192), False, 'from datetime import datetime, timedelta\n')]
|
"""Module provider for exoscale"""
from __future__ import absolute_import
import logging
import requests
from lexicon.providers.base import Provider as BaseProvider
LOGGER = logging.getLogger(__name__)
HOUR = 3600
NAMESERVER_DOMAINS = ['exoscale.ch']
def provider_parser(subparser):
"""Generate subparser for exoscale"""
subparser.add_argument(
"--auth-key", help="specify API key for authentication"
)
subparser.add_argument(
"--auth-secret", help="specify API secret for authentication"
)
class Provider(BaseProvider):
"""Provider class for exoscale"""
def __init__(self, config):
super(Provider, self).__init__(config)
self.api_endpoint = 'https://api.exoscale.com/dns'
def _authenticate(self):
"""An innocent call to check that the credentials are okay."""
response = self._get("/v1/domains/{0}".format(self.domain))
self.domain_id = response["domain"]["id"]
def _create_record(self, rtype, name, content):
"""Create record if doesnt already exist with same content"""
# check if record already exists
existing_records = self._list_records(rtype, name, content)
if len(existing_records) >= 1:
return True
record = {
"record_type": rtype,
"name": self._relative_name(name),
"content": content,
}
if self._get_lexicon_option("ttl"):
record["ttl"] = self._get_lexicon_option("ttl")
if self._get_lexicon_option("priority"):
record["prio"] = self._get_lexicon_option("priority")
payload = self._post(
"/v1/domains/{0}/records".format(self.domain),
{"record": record},
)
status = "id" in payload.get("record", {})
LOGGER.debug("create_record: %s", status)
return status
def _list_records(self, rtype=None, name=None, content=None):
"""List all records.
record_type, name and content are used to filter the records.
If possible it filters during the query, otherwise afterwards.
An empty list is returned if no records are found.
"""
filter_query = {}
if rtype:
filter_query["record_type"] = rtype
if name:
name = self._relative_name(name)
filter_query["name"] = name
payload = self._get(
"/v1/domains/{0}/records".format(self.domain),
query_params=filter_query,
)
records = []
for data in payload:
record = data["record"]
if content and record["content"] != content:
continue
if record["name"] == "":
rname = self.domain
else:
rname = ".".join((record["name"], self.domain))
processed_record = {
"type": record["record_type"],
"name": rname,
"ttl": record["ttl"],
"content": record["content"],
"id": record["id"],
}
if record["prio"]:
processed_record["options"] = {
"mx": {"priority": record["prio"]}
}
records.append(processed_record)
LOGGER.debug("list_records: %s", records)
return records
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""Create or update a record."""
record = {}
if not identifier:
records = self._list_records(rtype, name, content)
identifiers = [r["id"] for r in records]
else:
identifiers = [identifier]
if name:
record["name"] = self._relative_name(name)
if content:
record["content"] = content
if self._get_lexicon_option('ttl'):
record["ttl"] = self._get_lexicon_option('ttl')
if self._get_lexicon_option('priority'):
record["prio"] = self._get_lexicon_option('priority')
LOGGER.debug("update_records: %s", identifiers)
for record_id in identifiers:
self._put(
"/v1/domains/{0}/records/{1}".format(
self.domain, identifier
),
record,
)
LOGGER.debug("update_record: %s", record_id)
LOGGER.debug("update_record: %s", True)
return True
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""Delete an existing record.
If the record doesn't exist, does nothing.
"""
if not identifier:
records = self._list_records(rtype, name, content)
identifiers = [record["id"] for record in records]
else:
identifiers = [identifier]
LOGGER.debug("delete_records: %s", identifiers)
for record_id in identifiers:
self._delete(
"/v1/domains/{0}/records/{1}".format(
self.domain, record_id
)
)
LOGGER.debug("delete_record: %s", record_id)
LOGGER.debug("delete_record: %s", True)
return True
def _request(self, action="GET", url="/", data=None, query_params=None):
"""Performs the request to the API"""
if data is None:
data = {}
if query_params is None:
query_params = {}
default_headers = {"Accept": "application/json"}
default_headers["X-DNS-Token"] = ":".join(
(self._get_provider_option("auth_key"),
self._get_provider_option("auth_secret"))
)
response = requests.request(
action,
self.api_endpoint + url,
params=query_params,
json=data,
headers=default_headers,
)
# if the request fails for any reason, throw an error.
response.raise_for_status()
if response.text and response.json() is None:
raise Exception("No data returned")
return response.json() if response.text else None
|
[
"requests.request",
"logging.getLogger"
] |
[((177, 204), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (194, 204), False, 'import logging\n'), ((5699, 5810), 'requests.request', 'requests.request', (['action', '(self.api_endpoint + url)'], {'params': 'query_params', 'json': 'data', 'headers': 'default_headers'}), '(action, self.api_endpoint + url, params=query_params, json\n =data, headers=default_headers)\n', (5715, 5810), False, 'import requests\n')]
|
# Test catsgo.py with clockwork config
# Make a config file for testing server: https://cats.oxfordfun.com
# Run all tests: python3 test_catsgo_clockwork.py
# Run one test: python3 test_catsgo_clockwork.py TestCatsgo.test_fetch
# Run code coverage: coverage run test_catsgo_clockwork.py
# View code coverage report: coverage report -m
# Generate code coverage html report: coverage html
import unittest
import catsgo
import json
import os
class TestCatsgo(unittest.TestCase):
def test_load_config(self):
expected_sp3url = "https://cats.oxfordfun.com"
result = catsgo.load_config("config.json")
self.assertEqual(expected_sp3url, result['sp3_url'])
def test_login(self):
result = catsgo.login()
self.assertTrue(result != 'yes')
def test_fetch(self):
result = catsgo.fetch("/data/inputs/uploads/oxforduni/sp3_test_data")
self.assertTrue("guid" in result.keys())
def test_run_clockwork(self):
fetch_result = catsgo.fetch("/data/inputs/uploads/oxforduni/sp3_test_data")
result = catsgo.run_clockwork("sp3test1-Clockwork_combined", fetch_result["guid"])
self.assertTrue("run_uuid" in result.keys())
def test_check_run(self):
fetch_result = catsgo.fetch("/data/inputs/uploads/oxforduni/sp3_test_data")
run_result = catsgo.run_clockwork("sp3test1-Clockwork_combined", fetch_result["guid"])
result = catsgo.check_run("sp3test1-Clockwork_combined", run_result["run_uuid"])
print(result)
def test_run_info(self):
fetch_result = catsgo.fetch("/data/inputs/uploads/oxforduni/sp3_test_data")
run_result = catsgo.run_clockwork("sp3test1-Clockwork_combined", fetch_result["guid"])
result = catsgo.run_info("sp3test1-Clockwork_combined", run_result["run_uuid"])
print(result)
def test_go(self):
catsgo.go("/data/inputs/uploads/oxforduni/sp3_test_data")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"catsgo.run_info",
"catsgo.fetch",
"catsgo.run_clockwork",
"catsgo.login",
"catsgo.go",
"catsgo.check_run",
"catsgo.load_config"
] |
[((1945, 1960), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1958, 1960), False, 'import unittest\n'), ((582, 615), 'catsgo.load_config', 'catsgo.load_config', (['"""config.json"""'], {}), "('config.json')\n", (600, 615), False, 'import catsgo\n'), ((721, 735), 'catsgo.login', 'catsgo.login', ([], {}), '()\n', (733, 735), False, 'import catsgo\n'), ((821, 881), 'catsgo.fetch', 'catsgo.fetch', (['"""/data/inputs/uploads/oxforduni/sp3_test_data"""'], {}), "('/data/inputs/uploads/oxforduni/sp3_test_data')\n", (833, 881), False, 'import catsgo\n'), ((988, 1048), 'catsgo.fetch', 'catsgo.fetch', (['"""/data/inputs/uploads/oxforduni/sp3_test_data"""'], {}), "('/data/inputs/uploads/oxforduni/sp3_test_data')\n", (1000, 1048), False, 'import catsgo\n'), ((1065, 1138), 'catsgo.run_clockwork', 'catsgo.run_clockwork', (['"""sp3test1-Clockwork_combined"""', "fetch_result['guid']"], {}), "('sp3test1-Clockwork_combined', fetch_result['guid'])\n", (1085, 1138), False, 'import catsgo\n'), ((1244, 1304), 'catsgo.fetch', 'catsgo.fetch', (['"""/data/inputs/uploads/oxforduni/sp3_test_data"""'], {}), "('/data/inputs/uploads/oxforduni/sp3_test_data')\n", (1256, 1304), False, 'import catsgo\n'), ((1325, 1398), 'catsgo.run_clockwork', 'catsgo.run_clockwork', (['"""sp3test1-Clockwork_combined"""', "fetch_result['guid']"], {}), "('sp3test1-Clockwork_combined', fetch_result['guid'])\n", (1345, 1398), False, 'import catsgo\n'), ((1415, 1486), 'catsgo.check_run', 'catsgo.check_run', (['"""sp3test1-Clockwork_combined"""', "run_result['run_uuid']"], {}), "('sp3test1-Clockwork_combined', run_result['run_uuid'])\n", (1431, 1486), False, 'import catsgo\n'), ((1560, 1620), 'catsgo.fetch', 'catsgo.fetch', (['"""/data/inputs/uploads/oxforduni/sp3_test_data"""'], {}), "('/data/inputs/uploads/oxforduni/sp3_test_data')\n", (1572, 1620), False, 'import catsgo\n'), ((1641, 1714), 'catsgo.run_clockwork', 'catsgo.run_clockwork', (['"""sp3test1-Clockwork_combined"""', "fetch_result['guid']"], {}), "('sp3test1-Clockwork_combined', fetch_result['guid'])\n", (1661, 1714), False, 'import catsgo\n'), ((1731, 1801), 'catsgo.run_info', 'catsgo.run_info', (['"""sp3test1-Clockwork_combined"""', "run_result['run_uuid']"], {}), "('sp3test1-Clockwork_combined', run_result['run_uuid'])\n", (1746, 1801), False, 'import catsgo\n'), ((1855, 1912), 'catsgo.go', 'catsgo.go', (['"""/data/inputs/uploads/oxforduni/sp3_test_data"""'], {}), "('/data/inputs/uploads/oxforduni/sp3_test_data')\n", (1864, 1912), False, 'import catsgo\n')]
|
import math
import random
import torch
import numpy as np
from scipy.stats import beta
from openmixup.models.utils import batch_shuffle_ddp
def fftfreqnd(h, w=None, z=None):
""" Get bin values for discrete fourier transform of size (h, w, z)
:param h: Required, first dimension size
:param w: Optional, second dimension size
:param z: Optional, third dimension size
"""
fz = fx = 0
fy = np.fft.fftfreq(h)
if w is not None:
fy = np.expand_dims(fy, -1)
if w % 2 == 1:
fx = np.fft.fftfreq(w)[: w // 2 + 2]
else:
fx = np.fft.fftfreq(w)[: w // 2 + 1]
if z is not None:
fy = np.expand_dims(fy, -1)
if z % 2 == 1:
fz = np.fft.fftfreq(z)[:, None]
else:
fz = np.fft.fftfreq(z)[:, None]
return np.sqrt(fx * fx + fy * fy + fz * fz)
def get_spectrum(freqs, decay_power, ch, h, w=0, z=0):
""" Samples a fourier image with given size and frequencies decayed by decay power
:param freqs: Bin values for the discrete fourier transform
:param decay_power: Decay power for frequency decay prop 1/f**d
:param ch: Number of channels for the resulting mask
:param h: Required, first dimension size
:param w: Optional, second dimension size
:param z: Optional, third dimension size
"""
scale = np.ones(1) / (
np.maximum(freqs, np.array([1.0 / max(w, h, z)])) ** decay_power
)
param_size = [ch] + list(freqs.shape) + [2]
param = np.random.randn(*param_size)
scale = np.expand_dims(scale, -1)[None, :]
return scale * param
def make_low_freq_image(decay, shape, ch=1):
""" Sample a low frequency image from fourier space
:param decay_power: Decay power for frequency decay prop 1/f**d
:param shape: Shape of desired mask, list up to 3 dims
:param ch: Number of channels for desired mask
"""
freqs = fftfreqnd(*shape)
spectrum = get_spectrum(freqs, decay, ch, *shape) # .reshape((1, *shape[:-1], -1))
spectrum = spectrum[:, 0] + 1j * spectrum[:, 1]
mask = np.real(np.fft.irfftn(spectrum, shape))
if len(shape) == 1:
mask = mask[:1, : shape[0]]
if len(shape) == 2:
mask = mask[:1, : shape[0], : shape[1]]
if len(shape) == 3:
mask = mask[:1, : shape[0], : shape[1], : shape[2]]
mask = mask
mask = mask - mask.min()
mask = mask / mask.max()
return mask
def sample_lam(alpha, reformulate=False):
""" Sample a lambda from symmetric beta distribution with given alpha
:param alpha: Alpha value for beta distribution
:param reformulate: If True, uses the reformulation of [1].
"""
if reformulate:
lam = beta.rvs(alpha + 1, alpha)
else:
lam = beta.rvs(alpha, alpha)
return lam
def binarise_mask(mask, lam, in_shape, max_soft=0.0):
""" Binarises a given low frequency image such that it has mean lambda.
:param mask: Low frequency image, usually the result of `make_low_freq_image`
:param lam: Mean value of final mask
:param in_shape: Shape of inputs
:param max_soft: Softening value between 0 and 0.5 which smooths hard edges in the mask.
"""
idx = mask.reshape(-1).argsort()[::-1]
mask = mask.reshape(-1)
num = (
math.ceil(lam * mask.size)
if random.random() > 0.5
else math.floor(lam * mask.size)
)
eff_soft = max_soft
if max_soft > lam or max_soft > (1 - lam):
eff_soft = min(lam, 1 - lam)
soft = int(mask.size * eff_soft)
num_low = num - soft
num_high = num + soft
mask[idx[:num_high]] = 1
mask[idx[num_low:]] = 0
mask[idx[num_low:num_high]] = np.linspace(1, 0, (num_high - num_low))
mask = mask.reshape((1, *in_shape))
return mask
def sample_mask(alpha, decay_power, shape, max_soft=0.0, reformulate=False):
""" Samples a mean lambda from beta distribution parametrised by alpha,
creates a low frequency image and binarises, it based on this lambda.
:param alpha: Alpha value for beta distribution from which to sample mean of mask
:param decay_power: Decay power for frequency decay prop 1/f**d
:param shape: Shape of desired mask, list up to 3 dims
:param max_soft: Softening value between 0 and 0.5 which smooths hard edges in the mask.
:param reformulate: If True, uses the reformulation of [1].
"""
if isinstance(shape, int):
shape = (shape,)
# Choose lambda
lam = sample_lam(alpha, reformulate)
# Make mask, get mean / std
mask = make_low_freq_image(decay_power, shape)
mask = binarise_mask(mask, lam, shape, max_soft)
return lam, mask
def sample_and_apply(x, alpha, decay_power, shape, max_soft=0.0, reformulate=False):
"""
:param x: Image batch on which to apply fmix of shape [b, c, shape*]
:param alpha: Alpha value for beta distribution from which to sample mean of mask
:param decay_power: Decay power for frequency decay prop 1/f**d
:param shape: Shape of desired mask, list up to 3 dims
:param max_soft: Softening value between 0 and 0.5 which smooths hard edges in the mask.
:param reformulate: If True, uses the reformulation of [1].
:return: mixed input, permutation indices, lambda value of mix,
"""
lam, mask = sample_mask(alpha, decay_power, shape, max_soft, reformulate)
index = np.random.permutation(x.shape[0])
x1, x2 = x * mask, x[index] * (1 - mask)
return x1 + x2, index, lam
@torch.no_grad()
def fmix(img, gt_label, alpha=1.0, lam=None, dist_mode=False,
decay_power=3, size=(32,32), max_soft=0., reformulate=False, **kwargs):
r""" FMix augmentation.
"FMix: Enhancing Mixed Sample Data Augmentation (https://arxiv.org/abs/2002.12047)".
https://github.com/ecs-vlc/FMix/blob/master/fmix.py
Args:
decay_power (float): Decay power for frequency decay prop 1/f**d
alpha (float): Alpha value for beta distribution from which to
sample mean of mask.
lam (float): The given mixing ratio (fixed). If lam is None, sample a
new lam from Beta distribution.
size ([int] | [int, int] | [int, int, int]): Shape of desired mask,
list up to 3 dims.
max_soft (float): Softening value between 0 and 0.5 which smooths
hard edges in the mask.
reformulate (bool): If True, uses the reformulation of [1].
dist_mode (bool): Whether to do cross gpus index shuffling and
return the mixup shuffle index, which support supervised and
self-supervised methods.
"""
# fmix mask
lam_, mask = sample_mask(alpha, decay_power, size, max_soft, reformulate)
# convert to img dtype (fp16)
mask = torch.from_numpy(mask).cuda().type_as(img)
if lam is None:
lam = lam_
else: # lam bias is fixed, lam should be larger than lam_
if lam_ < lam:
mask = 1 - mask
lam = 1 - lam_
# normal mixup process
if not dist_mode:
indices = torch.randperm(img.size(0)).cuda()
if len(img.size()) == 4: # [N, C, H, W]
img_ = img[indices]
else:
assert img.dim() == 5 # semi-supervised img [N, 2, C, H, W]
# * notice that the rank of two groups of img is fixed
img_ = img[:, 1, ...].contiguous()
img = img[:, 0, ...].contiguous()
y_a = gt_label
y_b = gt_label[indices]
img = mask * img + (1 - mask) * img_
return img, (y_a, y_b, lam)
# dist mixup with cross gpus shuffle
else:
if len(img.size()) == 5: # self-supervised img [N, 2, C, H, W]
img_ = img[:, 1, ...].contiguous()
img = img[:, 0, ...].contiguous()
img_, idx_shuffle, idx_unshuffle = batch_shuffle_ddp( # N
img_, idx_shuffle=kwargs.get("idx_shuffle_mix", None), no_repeat=True)
else:
assert len(img.size()) == 4 # normal img [N, C, H, w]
img_, idx_shuffle, idx_unshuffle = batch_shuffle_ddp( # N
img, idx_shuffle=kwargs.get("idx_shuffle_mix", None), no_repeat=True)
# mixup by mask
img = mask * img + (1 - mask) * img_
if gt_label is not None:
y_a = gt_label
y_b, _, _ = batch_shuffle_ddp(gt_label, idx_shuffle=idx_shuffle, no_repeat=True)
return img, (y_a, y_b, lam)
else:
return img, (idx_shuffle, idx_unshuffle, lam)
|
[
"scipy.stats.beta.rvs",
"torch.from_numpy",
"numpy.random.randn",
"math.ceil",
"numpy.fft.irfftn",
"openmixup.models.utils.batch_shuffle_ddp",
"math.floor",
"numpy.expand_dims",
"numpy.ones",
"random.random",
"numpy.fft.fftfreq",
"numpy.linspace",
"numpy.random.permutation",
"torch.no_grad",
"numpy.sqrt"
] |
[((5484, 5499), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5497, 5499), False, 'import torch\n'), ((418, 435), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['h'], {}), '(h)\n', (432, 435), True, 'import numpy as np\n'), ((827, 863), 'numpy.sqrt', 'np.sqrt', (['(fx * fx + fy * fy + fz * fz)'], {}), '(fx * fx + fy * fy + fz * fz)\n', (834, 863), True, 'import numpy as np\n'), ((1508, 1536), 'numpy.random.randn', 'np.random.randn', (['*param_size'], {}), '(*param_size)\n', (1523, 1536), True, 'import numpy as np\n'), ((3676, 3713), 'numpy.linspace', 'np.linspace', (['(1)', '(0)', '(num_high - num_low)'], {}), '(1, 0, num_high - num_low)\n', (3687, 3713), True, 'import numpy as np\n'), ((5370, 5403), 'numpy.random.permutation', 'np.random.permutation', (['x.shape[0]'], {}), '(x.shape[0])\n', (5391, 5403), True, 'import numpy as np\n'), ((472, 494), 'numpy.expand_dims', 'np.expand_dims', (['fy', '(-1)'], {}), '(fy, -1)\n', (486, 494), True, 'import numpy as np\n'), ((667, 689), 'numpy.expand_dims', 'np.expand_dims', (['fy', '(-1)'], {}), '(fy, -1)\n', (681, 689), True, 'import numpy as np\n'), ((1353, 1363), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (1360, 1363), True, 'import numpy as np\n'), ((1550, 1575), 'numpy.expand_dims', 'np.expand_dims', (['scale', '(-1)'], {}), '(scale, -1)\n', (1564, 1575), True, 'import numpy as np\n'), ((2089, 2119), 'numpy.fft.irfftn', 'np.fft.irfftn', (['spectrum', 'shape'], {}), '(spectrum, shape)\n', (2102, 2119), True, 'import numpy as np\n'), ((2705, 2731), 'scipy.stats.beta.rvs', 'beta.rvs', (['(alpha + 1)', 'alpha'], {}), '(alpha + 1, alpha)\n', (2713, 2731), False, 'from scipy.stats import beta\n'), ((2756, 2778), 'scipy.stats.beta.rvs', 'beta.rvs', (['alpha', 'alpha'], {}), '(alpha, alpha)\n', (2764, 2778), False, 'from scipy.stats import beta\n'), ((3279, 3305), 'math.ceil', 'math.ceil', (['(lam * mask.size)'], {}), '(lam * mask.size)\n', (3288, 3305), False, 'import math\n'), ((3352, 3379), 'math.floor', 'math.floor', (['(lam * mask.size)'], {}), '(lam * mask.size)\n', (3362, 3379), False, 'import math\n'), ((3317, 3332), 'random.random', 'random.random', ([], {}), '()\n', (3330, 3332), False, 'import random\n'), ((8309, 8377), 'openmixup.models.utils.batch_shuffle_ddp', 'batch_shuffle_ddp', (['gt_label'], {'idx_shuffle': 'idx_shuffle', 'no_repeat': '(True)'}), '(gt_label, idx_shuffle=idx_shuffle, no_repeat=True)\n', (8326, 8377), False, 'from openmixup.models.utils import batch_shuffle_ddp\n'), ((536, 553), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['w'], {}), '(w)\n', (550, 553), True, 'import numpy as np\n'), ((599, 616), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['w'], {}), '(w)\n', (613, 616), True, 'import numpy as np\n'), ((730, 747), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['z'], {}), '(z)\n', (744, 747), True, 'import numpy as np\n'), ((788, 805), 'numpy.fft.fftfreq', 'np.fft.fftfreq', (['z'], {}), '(z)\n', (802, 805), True, 'import numpy as np\n'), ((6744, 6766), 'torch.from_numpy', 'torch.from_numpy', (['mask'], {}), '(mask)\n', (6760, 6766), False, 'import torch\n')]
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from uw_canvas.models import CanvasCourse
from restclients_core.exceptions import DataFailureException
from canvas_users.dao.canvas import get_course_sections
from canvas_users.exceptions import MissingSectionException
from canvas_users.views import UserRESTDispatch
import traceback
class CanvasCourseSections(UserRESTDispatch):
""" Performs actions on Canvas Course Sections
GET returns 200 with course sections.
"""
def get(self, request, *args, **kwargs):
course_id = kwargs['canvas_course_id']
user_id = self.blti.canvas_user_id
course_name = self.blti.course_long_name
sis_course_id = self.blti.course_sis_id
try:
course = CanvasCourse(course_id=course_id,
sis_course_id=sis_course_id,
name=course_name)
sections = get_course_sections(course, user_id)
except MissingSectionException as err:
msg = 'Adding users to this course not allowed'
return self.error_response(401, message=msg)
except DataFailureException as err:
return self.error_response(500, message=err.msg)
except Exception as err:
return self.error_response(500, message=traceback.format_exc(err))
return self.json_response({
'sections': sorted(sections, key=lambda k: k['name'])
})
|
[
"canvas_users.dao.canvas.get_course_sections",
"traceback.format_exc",
"uw_canvas.models.CanvasCourse"
] |
[((792, 877), 'uw_canvas.models.CanvasCourse', 'CanvasCourse', ([], {'course_id': 'course_id', 'sis_course_id': 'sis_course_id', 'name': 'course_name'}), '(course_id=course_id, sis_course_id=sis_course_id, name=course_name\n )\n', (804, 877), False, 'from uw_canvas.models import CanvasCourse\n'), ((964, 1000), 'canvas_users.dao.canvas.get_course_sections', 'get_course_sections', (['course', 'user_id'], {}), '(course, user_id)\n', (983, 1000), False, 'from canvas_users.dao.canvas import get_course_sections\n'), ((1356, 1381), 'traceback.format_exc', 'traceback.format_exc', (['err'], {}), '(err)\n', (1376, 1381), False, 'import traceback\n')]
|
from algorithms.graph import Tarjan
from algorithms.graph import check_bipartite
from algorithms.graph.dijkstra import Dijkstra
from algorithms.graph import ford_fulkerson
from algorithms.graph import edmonds_karp
from algorithms.graph import dinic
from algorithms.graph import maximum_flow_bfs
from algorithms.graph import maximum_flow_dfs
from algorithms.graph import all_pairs_shortest_path
from algorithms.graph import bellman_ford
from algorithms.graph import bellman_ford
from algorithms.graph import count_connected_number_of_component
import unittest
class TestTarjan(unittest.TestCase):
"""
Test for the file tarjan.py
Arguments:
unittest {[type]} -- [description]
"""
def test_tarjan_example_1(self):
# Graph from https://en.wikipedia.org/wiki/File:Scc.png
example = {
'A': ['B'],
'B': ['C', 'E', 'F'],
'C': ['D', 'G'],
'D': ['C', 'H'],
'E': ['A', 'F'],
'F': ['G'],
'G': ['F'],
'H': ['D', 'G']
}
g = Tarjan(example)
self.assertEqual(g.sccs, [['F', 'G'], ['C', 'D', 'H'], ['A', 'B', 'E']])
def test_tarjan_example_2(self):
# Graph from https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm#/media/File:Tarjan%27s_Algorithm_Animation.gif
example = {
'A': ['E'],
'B': ['A'],
'C': ['B', 'D'],
'D': ['C'],
'E': ['B'],
'F': ['B', 'E', 'G'],
'G': ['F', 'C'],
'H': ['G', 'H', 'D']
}
g = Tarjan(example)
self.assertEqual(g.sccs, [['A', 'B', 'E'], ['C', 'D'], ['F', 'G'], ['H']])
class TestCheckBipartite(unittest.TestCase):
def test_check_bipartite(self):
adj_list_1 = [[0, 0, 1], [0, 0, 1], [1, 1, 0]]
self.assertEqual(True, check_bipartite(adj_list_1))
adj_list_2 = [[0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 1], [1, 0, 1, 0]]
self.assertEqual(True, check_bipartite(adj_list_2))
adj_list_3 = [[0, 1, 0, 0], [1, 0, 1, 1], [0, 1, 0, 1], [0, 1, 1, 0]]
self.assertEqual(False, check_bipartite(adj_list_3))
class TestDijkstra(unittest.TestCase):
def test_dijkstra(self):
g = Dijkstra(9)
g.graph = [[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0]
];
self.assertEqual(g.dijkstra(0), [0, 4, 12, 19, 21, 11, 9, 8, 14])
class TestMaximumFlow(unittest.TestCase):
"""
Test for the file maximum_flow.py
Arguments:
unittest {[type]} -- [description]
"""
def test_ford_fulkerson(self):
capacity = [
[0, 10, 10, 0, 0, 0, 0],
[0, 0, 2, 0, 4, 8, 0],
[0, 0, 0, 0, 0, 9, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 10],
[0, 0, 0, 0, 6, 0, 10],
[0, 0, 0, 0, 0, 0, 0]
]
self.assertEqual(19, ford_fulkerson(capacity, 0, 6))
def test_edmonds_karp(self):
capacity = [
[0, 10, 10, 0, 0, 0, 0],
[0, 0, 2, 0, 4, 8, 0],
[0, 0, 0, 0, 0, 9, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 10],
[0, 0, 0, 0, 6, 0, 10],
[0, 0, 0, 0, 0, 0, 0]
]
self.assertEqual(19, edmonds_karp(capacity, 0, 6))
def dinic(self):
capacity = [
[0, 10, 10, 0, 0, 0, 0],
[0, 0, 2, 0, 4, 8, 0],
[0, 0, 0, 0, 0, 9, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 10],
[0, 0, 0, 0, 6, 0, 10],
[0, 0, 0, 0, 0, 0, 0]
]
self.assertEqual(19, dinic(capacity, 0, 6))
class TestMaximum_Flow_Bfs(unittest.TestCase):
"""
Test for the file def maximum_flow_bfs.py
Arguments:
unittest {[type]} -- [description]
"""
def test_maximum_flow_bfs(self):
graph = [
[0, 16, 13, 0, 0, 0],
[0, 0, 10, 12, 0, 0],
[0, 4, 0, 0, 14, 0],
[0, 0, 9, 0, 0, 20],
[0, 0, 0, 7, 0, 4],
[0, 0, 0, 0, 0, 0]
]
maximum_flow = maximum_flow_bfs(graph)
self.assertEqual(maximum_flow, 23)
class TestMaximum_Flow_Dfs(unittest.TestCase):
"""
Test for the file def maximum_flow_dfs.py
Arguments:
unittest {[type]} -- [description]
"""
def test_maximum_flow_dfs(self):
graph = [
[0, 16, 13, 0, 0, 0],
[0, 0, 10, 12, 0, 0],
[0, 4, 0, 0, 14, 0],
[0, 0, 9, 0, 0, 20],
[0, 0, 0, 7, 0, 4],
[0, 0, 0, 0, 0, 0]
]
maximum_flow = maximum_flow_dfs(graph)
self.assertEqual(maximum_flow, 23)
class TestAll_Pairs_Shortest_Path(unittest.TestCase):
def test_all_pairs_shortest_path(self):
graph = [[0, 0.1, 0.101, 0.142, 0.277],
[0.465, 0, 0.191, 0.192, 0.587],
[0.245, 0.554, 0, 0.333, 0.931],
[1.032, 0.668, 0.656, 0, 0.151],
[0.867, 0.119, 0.352, 0.398, 0]]
result = all_pairs_shortest_path(graph)
self.assertEqual(result, [
[0, 0.1, 0.101, 0.142, 0.277],
[0.436, 0, 0.191, 0.192, 0.34299999999999997],
[0.245, 0.345, 0, 0.333, 0.484],
[0.706, 0.27, 0.46099999999999997, 0, 0.151],
[0.5549999999999999, 0.119, 0.31, 0.311, 0],
])
class TestBellmanFord(unittest.TestCase):
def test_bellman_ford(self):
graph1 = {
'a': {'b': 6, 'e': 7},
'b': {'c': 5, 'd': -4, 'e': 8},
'c': {'b': -2},
'd': {'a': 2, 'c': 7},
'e': {'b': -3}
}
self.assertEqual(True, bellman_ford(graph1, 'a'))
graph2 = {
'a': {'d': 3, 'e': 4},
'b': {'a': 7, 'e':2},
'c': {'a': 12, 'd':9, 'e':11},
'd': {'c': 5, 'e': 11},
'e': {'a': 7, 'b': 5, 'd': 1}
}
self.assertEqual(True, bellman_ford(graph2, 'a'))
class TestConnectedComponentInGraph(unittest.TestCase):
"""
Class for testing different cases for connected components in graph
"""
def test_count_connected_components(self):
"""
Test Function that test the different cases of count connected components
0----------2 1--------5 3
|
|
4
output = 3
"""
expected_result = 3
# adjacency list representation of graph
l = [[2],
[5],
[0,4],
[],
[2],
[1]
]
size = 5
result = count_connected_number_of_component.count_components(l,size)
self.assertEqual(result,expected_result)
def test_connected_components_with_empty_graph(self):
"""
input :
output : 0
"""
l = [[]]
expected_result = 0
size = 0
result = count_connected_number_of_component.count_components(l,size)
self.assertEqual(result,expected_result)
def test_connected_components_without_edges_graph(self):
"""
input : 0 2 3 4
output : 4
"""
l = [[0],[],[2],[3],[4]]
size = 4
expected_result = 4
result = count_connected_number_of_component.count_components(l,size)
self.assertEqual(result,expected_result)
|
[
"algorithms.graph.ford_fulkerson",
"algorithms.graph.edmonds_karp",
"algorithms.graph.maximum_flow_dfs",
"algorithms.graph.check_bipartite",
"algorithms.graph.count_connected_number_of_component.count_components",
"algorithms.graph.dinic",
"algorithms.graph.dijkstra.Dijkstra",
"algorithms.graph.maximum_flow_bfs",
"algorithms.graph.bellman_ford",
"algorithms.graph.Tarjan",
"algorithms.graph.all_pairs_shortest_path"
] |
[((1072, 1087), 'algorithms.graph.Tarjan', 'Tarjan', (['example'], {}), '(example)\n', (1078, 1087), False, 'from algorithms.graph import Tarjan\n'), ((1620, 1635), 'algorithms.graph.Tarjan', 'Tarjan', (['example'], {}), '(example)\n', (1626, 1635), False, 'from algorithms.graph import Tarjan\n'), ((2320, 2331), 'algorithms.graph.dijkstra.Dijkstra', 'Dijkstra', (['(9)'], {}), '(9)\n', (2328, 2331), False, 'from algorithms.graph.dijkstra import Dijkstra\n'), ((4615, 4638), 'algorithms.graph.maximum_flow_bfs', 'maximum_flow_bfs', (['graph'], {}), '(graph)\n', (4631, 4638), False, 'from algorithms.graph import maximum_flow_bfs\n'), ((5144, 5167), 'algorithms.graph.maximum_flow_dfs', 'maximum_flow_dfs', (['graph'], {}), '(graph)\n', (5160, 5167), False, 'from algorithms.graph import maximum_flow_dfs\n'), ((5586, 5616), 'algorithms.graph.all_pairs_shortest_path', 'all_pairs_shortest_path', (['graph'], {}), '(graph)\n', (5609, 5616), False, 'from algorithms.graph import all_pairs_shortest_path\n'), ((7248, 7309), 'algorithms.graph.count_connected_number_of_component.count_components', 'count_connected_number_of_component.count_components', (['l', 'size'], {}), '(l, size)\n', (7300, 7309), False, 'from algorithms.graph import count_connected_number_of_component\n'), ((7574, 7635), 'algorithms.graph.count_connected_number_of_component.count_components', 'count_connected_number_of_component.count_components', (['l', 'size'], {}), '(l, size)\n', (7626, 7635), False, 'from algorithms.graph import count_connected_number_of_component\n'), ((7949, 8010), 'algorithms.graph.count_connected_number_of_component.count_components', 'count_connected_number_of_component.count_components', (['l', 'size'], {}), '(l, size)\n', (8001, 8010), False, 'from algorithms.graph import count_connected_number_of_component\n'), ((1910, 1937), 'algorithms.graph.check_bipartite', 'check_bipartite', (['adj_list_1'], {}), '(adj_list_1)\n', (1925, 1937), False, 'from algorithms.graph import check_bipartite\n'), ((2057, 2084), 'algorithms.graph.check_bipartite', 'check_bipartite', (['adj_list_2'], {}), '(adj_list_2)\n', (2072, 2084), False, 'from algorithms.graph import check_bipartite\n'), ((2205, 2232), 'algorithms.graph.check_bipartite', 'check_bipartite', (['adj_list_3'], {}), '(adj_list_3)\n', (2220, 2232), False, 'from algorithms.graph import check_bipartite\n'), ((3335, 3365), 'algorithms.graph.ford_fulkerson', 'ford_fulkerson', (['capacity', '(0)', '(6)'], {}), '(capacity, 0, 6)\n', (3349, 3365), False, 'from algorithms.graph import ford_fulkerson\n'), ((3740, 3768), 'algorithms.graph.edmonds_karp', 'edmonds_karp', (['capacity', '(0)', '(6)'], {}), '(capacity, 0, 6)\n', (3752, 3768), False, 'from algorithms.graph import edmonds_karp\n'), ((4131, 4152), 'algorithms.graph.dinic', 'dinic', (['capacity', '(0)', '(6)'], {}), '(capacity, 0, 6)\n', (4136, 4152), False, 'from algorithms.graph import dinic\n'), ((6235, 6260), 'algorithms.graph.bellman_ford', 'bellman_ford', (['graph1', '"""a"""'], {}), "(graph1, 'a')\n", (6247, 6260), False, 'from algorithms.graph import bellman_ford\n'), ((6523, 6548), 'algorithms.graph.bellman_ford', 'bellman_ford', (['graph2', '"""a"""'], {}), "(graph2, 'a')\n", (6535, 6548), False, 'from algorithms.graph import bellman_ford\n')]
|
# Generated by Django 3.2.7 on 2021-11-03 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timewebapp', '0081_timewebmodel_has_alerted_due_date_passed_notice'),
]
operations = [
migrations.AddField(
model_name='settingsmodel',
name='assignment_spacing',
field=models.CharField(choices=[('Comfy', 'Comfy'), ('Compact', 'Compact')], default='Comfy', max_length=7, verbose_name='Assignment Spacing'),
),
]
|
[
"django.db.models.CharField"
] |
[((385, 525), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('Comfy', 'Comfy'), ('Compact', 'Compact')]", 'default': '"""Comfy"""', 'max_length': '(7)', 'verbose_name': '"""Assignment Spacing"""'}), "(choices=[('Comfy', 'Comfy'), ('Compact', 'Compact')],\n default='Comfy', max_length=7, verbose_name='Assignment Spacing')\n", (401, 525), False, 'from django.db import migrations, models\n')]
|
"""
Compare to nested dictionary/list objects. diff() will return a unix diff like
list of lines of the jsonified object to help locate the differences.
"""
import json
from difflib import HtmlDiff
from json import JSONEncoder
from typing import List, Optional, Type
from .formatter import Formatter
from .list_sorter import LIST_SORTERS
from .normalizer import NORMALIZERS
from .sorter import Sorter, NDLElement
class DiffResult:
"""
Result of a compare or diff. Acts like a bool for testing purposes.
Provides supporting information for the match.
"""
def __init__(self, match: bool, support: List[str]):
self._match = match
self.support = support
def __bool__(self) -> bool:
return self._match
class Differ:
"""
Provides comparision and difference methods for two objects of
nested dictionary/lists. The process is to first sort the two objects
and then compare them or jsonify them and compare the individual lines.
"""
@staticmethod
def diff(
left: NDLElement,
right: NDLElement,
cls: Optional[Type[JSONEncoder]] = None,
sorters: LIST_SORTERS = None,
normalizers: NORMALIZERS = None,
max_col_width: Optional[int] = 20,
) -> DiffResult:
"""
Show the difference of two objects. Unix like diff results.
:param left: Test object
:param right: Expected object
:param cls: JSON Encoder if any fields aren't JSON encodable.
:param sorters: Sorters for list elements.
:param normalizers: Normalizers for leaf elements.
:param max_col_width: Maximum column width of diff output.
:return: True if match.
"""
if normalizers:
normalizers = (
normalizers if isinstance(normalizers, list) else [normalizers]
)
sorted_left = Sorter.sorted(left, sorters=sorters, normalizers=normalizers)
sorted_right = Sorter.sorted(right, sorters=sorters, normalizers=normalizers)
differ = HtmlDiff()
result = differ.make_file(
json.dumps(sorted_left, indent=2, cls=cls).split("\n"),
json.dumps(sorted_right, indent=2, cls=cls).split("\n"),
)
match, support = Formatter(max_col_width=max_col_width).format(result)
return DiffResult(match, support)
|
[
"difflib.HtmlDiff",
"json.dumps"
] |
[((2058, 2068), 'difflib.HtmlDiff', 'HtmlDiff', ([], {}), '()\n', (2066, 2068), False, 'from difflib import HtmlDiff\n'), ((2117, 2159), 'json.dumps', 'json.dumps', (['sorted_left'], {'indent': '(2)', 'cls': 'cls'}), '(sorted_left, indent=2, cls=cls)\n', (2127, 2159), False, 'import json\n'), ((2185, 2228), 'json.dumps', 'json.dumps', (['sorted_right'], {'indent': '(2)', 'cls': 'cls'}), '(sorted_right, indent=2, cls=cls)\n', (2195, 2228), False, 'import json\n')]
|
# Copyright (c) OpenMMLab. All rights reserved.
import argparse
import os.path as osp
import cv2
import mmcv
import numpy as np
try:
import imageio
except ImportError:
imageio = None
def parse_args():
parser = argparse.ArgumentParser(
description='Merge images and visualized flow')
parser.add_argument(
'--img_dir', type=str, default=None, help='directory of images')
parser.add_argument(
'--flow_dir',
type=str,
default=None,
help='directory of visualized flow')
parser.add_argument(
'--resize_factor',
type=float,
default=0.5,
help='resize factor for gif')
parser.add_argument(
'--out_dir',
type=str,
default=None,
help='directory to save merged results')
args = parser.parse_args()
return args
def merge_imgs_flow(img_dir: str, flow_dir: str, out_dir: str) -> None:
"""Load images and visualized flow maps and merge them.
Args:
img_dir ([str): The directory of images.
flow_dir (str): The directory of flow maps.
out_dir (str): The directory to save the frames
"""
img_files = list(mmcv.scandir(img_dir))
flow_files = list(mmcv.scandir(flow_dir))
img_files.sort()
flow_files.sort()
# img is longer than flow
for i in range(len(img_files) - 1):
img = mmcv.imread(osp.join(img_dir, img_files[i]))
flow = mmcv.imread(osp.join(flow_dir, flow_files[i]))
frame = np.concatenate((img, flow), axis=1)
cv2.imwrite(osp.join(out_dir, flow_files[i]), frame)
def main():
args = parse_args()
merge_imgs_flow(args.img_dir, args.flow_dir, args.out_dir)
if __name__ == '__main__':
main()
|
[
"os.path.join",
"numpy.concatenate",
"argparse.ArgumentParser",
"mmcv.scandir"
] |
[((226, 297), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Merge images and visualized flow"""'}), "(description='Merge images and visualized flow')\n", (249, 297), False, 'import argparse\n'), ((1182, 1203), 'mmcv.scandir', 'mmcv.scandir', (['img_dir'], {}), '(img_dir)\n', (1194, 1203), False, 'import mmcv\n'), ((1227, 1249), 'mmcv.scandir', 'mmcv.scandir', (['flow_dir'], {}), '(flow_dir)\n', (1239, 1249), False, 'import mmcv\n'), ((1501, 1536), 'numpy.concatenate', 'np.concatenate', (['(img, flow)'], {'axis': '(1)'}), '((img, flow), axis=1)\n', (1515, 1536), True, 'import numpy as np\n'), ((1390, 1421), 'os.path.join', 'osp.join', (['img_dir', 'img_files[i]'], {}), '(img_dir, img_files[i])\n', (1398, 1421), True, 'import os.path as osp\n'), ((1450, 1483), 'os.path.join', 'osp.join', (['flow_dir', 'flow_files[i]'], {}), '(flow_dir, flow_files[i])\n', (1458, 1483), True, 'import os.path as osp\n'), ((1558, 1590), 'os.path.join', 'osp.join', (['out_dir', 'flow_files[i]'], {}), '(out_dir, flow_files[i])\n', (1566, 1590), True, 'import os.path as osp\n')]
|
#!/usr/bin/env python
# Advanced Multi-Mission Operations System (AMMOS) Instrument Toolkit (AIT)
# Bespoke Link to Instruments and Small Satellites (BLISS)
#
# Copyright 2013, by the California Institute of Technology. ALL RIGHTS
# RESERVED. United States Government Sponsorship acknowledged. Any
# commercial use must be negotiated with the Office of Technology Transfer
# at the California Institute of Technology.
#
# This software may be subject to U.S. export control laws. By accepting
# this software, the user agrees to comply with all applicable U.S. export
# laws and regulations. User has the responsibility to obtain export licenses,
# or other export authority as may be required before exporting such
# information to foreign countries or providing access to foreign persons.
'''
usage: ait-seq-print oco3_seq_SSS_NNN_desc.bin
Prints the given binary relative time command sequence to standard
output as text.
Examples:
$ ait-seq-print seq/oco3_seq_gps_001_reset.bin
'''
import os
import sys
import argparse
from ait.core import gds, log, seq
def main():
log.begin()
parser = argparse.ArgumentParser(
description = __doc__,
formatter_class = argparse.RawDescriptionHelpFormatter)
# Add required command line argument
parser.add_argument('filename')
# Get command line arguments
args = vars(parser.parse_args())
filename = os.path.abspath(args['filename'])
extension = os.path.splitext(filename)[1]
if extension.lower() != '.bin':
log.warn("Filename '%s' does not have a '.bin' extension", filename)
sequence = seq.Seq(filename)
if not sequence.validate():
for msg in sequence.messages:
log.error(msg)
sequence.printText()
log.end()
if __name__ == '__main__':
main()
|
[
"os.path.abspath",
"argparse.ArgumentParser",
"ait.core.seq.Seq",
"ait.core.log.end",
"ait.core.log.begin",
"ait.core.log.warn",
"os.path.splitext",
"ait.core.log.error"
] |
[((1085, 1096), 'ait.core.log.begin', 'log.begin', ([], {}), '()\n', (1094, 1096), False, 'from ait.core import gds, log, seq\n'), ((1111, 1214), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(description=__doc__, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n', (1134, 1214), False, 'import argparse\n'), ((1397, 1430), 'os.path.abspath', 'os.path.abspath', (["args['filename']"], {}), "(args['filename'])\n", (1412, 1430), False, 'import os\n'), ((1607, 1624), 'ait.core.seq.Seq', 'seq.Seq', (['filename'], {}), '(filename)\n', (1614, 1624), False, 'from ait.core import gds, log, seq\n'), ((1754, 1763), 'ait.core.log.end', 'log.end', ([], {}), '()\n', (1761, 1763), False, 'from ait.core import gds, log, seq\n'), ((1447, 1473), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1463, 1473), False, 'import os\n'), ((1522, 1590), 'ait.core.log.warn', 'log.warn', (['"""Filename \'%s\' does not have a \'.bin\' extension"""', 'filename'], {}), '("Filename \'%s\' does not have a \'.bin\' extension", filename)\n', (1530, 1590), False, 'from ait.core import gds, log, seq\n'), ((1708, 1722), 'ait.core.log.error', 'log.error', (['msg'], {}), '(msg)\n', (1717, 1722), False, 'from ait.core import gds, log, seq\n')]
|
from django.core.management.base import BaseCommand
from dashboard.models import Course, CourseViewOption, AcademicTerms
from dashboard.common.db_util import canvas_id_to_incremented_id
from datetime import datetime
import pytz
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--course_id', dest='course_id', type=int, required=True)
parser.add_argument('--term_id', dest='term_id', type=int, required=False)
parser.add_argument('--name', dest='name', type=str, required=True)
parser.add_argument('--date_start', dest='date_start', type=str, required=False)
parser.add_argument('--date_end', dest='date_end', type=str, required=False)
def handle(self, *args, **options):
course_id = options.get('course_id')
term_id = options.get('term_id')
name = options.get('name')
date_start = options.get('date_start')
if date_start is not None:
date_start = datetime.strptime(date_start, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC)
date_end = options.get('date_end')
if date_end is not None:
date_end = datetime.strptime(date_end, '%Y-%m-%d %H:%M:%S').replace(tzinfo=pytz.UTC)
prefixed_course_id = canvas_id_to_incremented_id(course_id)
if term_id is not None:
prefixed_term_id = canvas_id_to_incremented_id(term_id)
try:
term_obj = AcademicTerms.objects.get(id=prefixed_term_id)
except AcademicTerms.DoesNotExist:
self.stdout.write (f"Error: Term {term_id} does not exists.")
return
else:
term_obj = None
is_new = False
try:
course_obj = Course.objects.get(id=prefixed_course_id)
self.stdout.write (f"Updating course {course_id}")
except Course.DoesNotExist:
course_obj = Course(id=prefixed_course_id)
self.stdout.write (f"Creating course {course_id}")
is_new = True
course_obj.canvas_id = course_id
course_obj.term = term_obj
course_obj.name = name
course_obj.date_start = date_start
course_obj.date_end = date_end
course_obj.save()
if is_new:
self.stdout.write ("Creating course view options")
course_view_obj = CourseViewOption(
course_id=prefixed_course_id,
show_resources_accessed=True,
show_assignment_planning=True,
show_grade_distribution=True
)
course_view_obj.save()
|
[
"dashboard.models.CourseViewOption",
"dashboard.models.Course.objects.get",
"dashboard.models.Course",
"dashboard.common.db_util.canvas_id_to_incremented_id",
"datetime.datetime.strptime",
"dashboard.models.AcademicTerms.objects.get"
] |
[((1261, 1299), 'dashboard.common.db_util.canvas_id_to_incremented_id', 'canvas_id_to_incremented_id', (['course_id'], {}), '(course_id)\n', (1288, 1299), False, 'from dashboard.common.db_util import canvas_id_to_incremented_id\n'), ((1363, 1399), 'dashboard.common.db_util.canvas_id_to_incremented_id', 'canvas_id_to_incremented_id', (['term_id'], {}), '(term_id)\n', (1390, 1399), False, 'from dashboard.common.db_util import canvas_id_to_incremented_id\n'), ((1743, 1784), 'dashboard.models.Course.objects.get', 'Course.objects.get', ([], {'id': 'prefixed_course_id'}), '(id=prefixed_course_id)\n', (1761, 1784), False, 'from dashboard.models import Course, CourseViewOption, AcademicTerms\n'), ((2357, 2498), 'dashboard.models.CourseViewOption', 'CourseViewOption', ([], {'course_id': 'prefixed_course_id', 'show_resources_accessed': '(True)', 'show_assignment_planning': '(True)', 'show_grade_distribution': '(True)'}), '(course_id=prefixed_course_id, show_resources_accessed=True,\n show_assignment_planning=True, show_grade_distribution=True)\n', (2373, 2498), False, 'from dashboard.models import Course, CourseViewOption, AcademicTerms\n'), ((1444, 1490), 'dashboard.models.AcademicTerms.objects.get', 'AcademicTerms.objects.get', ([], {'id': 'prefixed_term_id'}), '(id=prefixed_term_id)\n', (1469, 1490), False, 'from dashboard.models import Course, CourseViewOption, AcademicTerms\n'), ((1909, 1938), 'dashboard.models.Course', 'Course', ([], {'id': 'prefixed_course_id'}), '(id=prefixed_course_id)\n', (1915, 1938), False, 'from dashboard.models import Course, CourseViewOption, AcademicTerms\n'), ((982, 1032), 'datetime.datetime.strptime', 'datetime.strptime', (['date_start', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(date_start, '%Y-%m-%d %H:%M:%S')\n", (999, 1032), False, 'from datetime import datetime\n'), ((1157, 1205), 'datetime.datetime.strptime', 'datetime.strptime', (['date_end', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(date_end, '%Y-%m-%d %H:%M:%S')\n", (1174, 1205), False, 'from datetime import datetime\n')]
|
import random
import requests
from meowbot.triggers import SimpleResponseCommand
from meowbot.conditions import IsCommand
from meowbot.context import CommandContext
from meowbot.util import get_default_zip_code, get_petfinder_api_key
class AdoptCat(SimpleResponseCommand):
condition = IsCommand(["adoptcat"])
help = "`adoptcat [zipcode]`: get cat adoption info"
def get_message_args(self, context: CommandContext):
if len(context.args) == 1:
(zip_code,) = context.args
if not zip_code.isnumeric():
return {"text": f"Zip code must be a number. Got `{zip_code}`"}
elif len(context.args) > 1:
return {"text": "Usage: `adoptcat [zipcode]`"}
else:
zip_code = get_default_zip_code()
api_key = get_petfinder_api_key()
petfinder_url = "http://api.petfinder.com/pet.find"
r = requests.get(
petfinder_url,
params={
"key": api_key,
"output": "basic",
"animal": "cat",
"count": "25",
"location": zip_code,
"format": "json",
},
)
data = r.json()
def pet_info(pet):
url = (
"https://www.petfinder.com/cat/"
"{short_name}-{pet_id}/state/city/shelter-{shelter_id}/"
).format(
short_name=pet["name"]["$t"].split(" ", 1)[0].lower(),
pet_id=pet["id"]["$t"],
shelter_id=pet["shelterId"]["$t"],
)
photos = [
photo["$t"]
for photo in pet.get("media", {}).get("photos", {}).get("photo", [])
if photo["@size"] == "pn"
]
name = pet["name"]["$t"]
sex = pet["sex"]["$t"]
age = pet["age"]["$t"]
return {
"basic_info": f"{name} sex: {sex} age: {age} {url}",
"photo": None if len(photos) == 0 else photos[0],
}
pets = random.sample(
[pet_info(pet) for pet in data["petfinder"]["pets"]["pet"]], k=5
)
return {
"attachments": [
{"text": pet["basic_info"], "image_url": pet["photo"]} for pet in pets
],
"thread_ts": context.event.ts,
}
|
[
"requests.get",
"meowbot.conditions.IsCommand",
"meowbot.util.get_petfinder_api_key",
"meowbot.util.get_default_zip_code"
] |
[((294, 317), 'meowbot.conditions.IsCommand', 'IsCommand', (["['adoptcat']"], {}), "(['adoptcat'])\n", (303, 317), False, 'from meowbot.conditions import IsCommand\n'), ((802, 825), 'meowbot.util.get_petfinder_api_key', 'get_petfinder_api_key', ([], {}), '()\n', (823, 825), False, 'from meowbot.util import get_default_zip_code, get_petfinder_api_key\n'), ((898, 1045), 'requests.get', 'requests.get', (['petfinder_url'], {'params': "{'key': api_key, 'output': 'basic', 'animal': 'cat', 'count': '25',\n 'location': zip_code, 'format': 'json'}"}), "(petfinder_url, params={'key': api_key, 'output': 'basic',\n 'animal': 'cat', 'count': '25', 'location': zip_code, 'format': 'json'})\n", (910, 1045), False, 'import requests\n'), ((760, 782), 'meowbot.util.get_default_zip_code', 'get_default_zip_code', ([], {}), '()\n', (780, 782), False, 'from meowbot.util import get_default_zip_code, get_petfinder_api_key\n')]
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='pyworking-cz',
version='0.0.1',
description='Website pyworking.cz',
url='https://github.com/pypa/sampleproject',
license='MIT',
packages=find_packages(exclude=['contrib', 'doc*', 'tests']),
install_requires=[
'flask',
'pyyaml',
'markdown',
'markdown-urlize',
],
extras_require={
'test': ['pytest'],
},
include_package_data=True,
#package_data={ 'pyworking-cz': ['pyworking_cz/templates/*', 'pyworking_cz/static/*'] },
)
|
[
"setuptools.find_packages"
] |
[((243, 294), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'doc*', 'tests']"}), "(exclude=['contrib', 'doc*', 'tests'])\n", (256, 294), False, 'from setuptools import setup, find_packages\n')]
|
from flask import Flask, jsonify, request
from backend.core.blockchain import Blockchain
app = Flask(__name__)
app.config.update(
JSONIFY_PRETTYPRINT_REGULAR=True
)
blk = Blockchain(difficulty=3)
blk.add_node("Finney", 9)
blk.add_node("Szabo", 5)
blk.add_node("Back", 4)
blk.add_node("Nakamoto", 7)
blk.add_node("Wei", 8)
blk.mine_genesis()
num_blocks = 5
for _ in range(0, num_blocks):
blk.mine_block()
@app.route('/blocks', methods=['GET'])
def get_blocks():
return jsonify([block.__dict__ for block in blk.blocks])
@app.route('/block/<int:height>', methods=['GET'])
def get_block(height):
if (height >= 0 and height < blk.len):
return blk.blocks[height].to_json()
return "INVALID_BLOCK_HEIGHT", 400
@app.route('/mempool', methods=['GET'])
def get_mempool():
return jsonify([tx.__dict__ for tx in blk.mempool])
@app.route('/nodes', methods=['GET'])
def get_nodes():
return jsonify([node.__dict__ for node in blk.nodes])
@app.route('/mine', methods=['POST'])
def post_mine():
block = blk.mine_block()
return jsonify(block.__dict__), 201
@app.route('/tx', methods=['POST'])
def post_tx():
return "NOT_IMPLEMENTED", 501
@app.route('/node', methods=['POST'])
def post_node():
node_json = request.get_json()
node = blk.add_node(node_json['name'], node_json['weight'])
return jsonify(node.__dict__), 201
if __name__ == '__main__':
app.run()
|
[
"flask.jsonify",
"flask.Flask",
"flask.request.get_json",
"backend.core.blockchain.Blockchain"
] |
[((96, 111), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (101, 111), False, 'from flask import Flask, jsonify, request\n'), ((177, 201), 'backend.core.blockchain.Blockchain', 'Blockchain', ([], {'difficulty': '(3)'}), '(difficulty=3)\n', (187, 201), False, 'from backend.core.blockchain import Blockchain\n'), ((487, 536), 'flask.jsonify', 'jsonify', (['[block.__dict__ for block in blk.blocks]'], {}), '([block.__dict__ for block in blk.blocks])\n', (494, 536), False, 'from flask import Flask, jsonify, request\n'), ((816, 860), 'flask.jsonify', 'jsonify', (['[tx.__dict__ for tx in blk.mempool]'], {}), '([tx.__dict__ for tx in blk.mempool])\n', (823, 860), False, 'from flask import Flask, jsonify, request\n'), ((929, 975), 'flask.jsonify', 'jsonify', (['[node.__dict__ for node in blk.nodes]'], {}), '([node.__dict__ for node in blk.nodes])\n', (936, 975), False, 'from flask import Flask, jsonify, request\n'), ((1262, 1280), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1278, 1280), False, 'from flask import Flask, jsonify, request\n'), ((1073, 1096), 'flask.jsonify', 'jsonify', (['block.__dict__'], {}), '(block.__dict__)\n', (1080, 1096), False, 'from flask import Flask, jsonify, request\n'), ((1356, 1378), 'flask.jsonify', 'jsonify', (['node.__dict__'], {}), '(node.__dict__)\n', (1363, 1378), False, 'from flask import Flask, jsonify, request\n')]
|
import pygame
from pygame.locals import *
import sys
import time
import random
class TypingSpeed:
def __init__(main):
main.width=750
main.height=500
main.input_text=''
main.word = ''
main.reset=True
main.active = False
main.accuracy = '0%'
main.time_start = 0
main.time_total = 0
main.results = 'Time:0 Accuracy:0 % Wpm:0 '
main.wpm = 0
main.end = False
main.HEAD_Area = (255,213,102)
main.TEXT_Area = (240,240,240)
main.RESULT_Area = (255,70,70)
pygame.init()
main.open_img = pygame.image.load('bg0.jpg')
main.open_img = pygame.transform.scale(main.open_img, (main.width,main.height))
main.bg = pygame.image.load('bg1.jpg')
main.bg = pygame.transform.scale(main.bg, (750,500))
main.screen = pygame.display.set_mode((main.width,main.height))
pygame.display.set_caption('Test Your Typing Speed')
def get_sentence(main):
f = open('sentences.txt').read()
sentences = f.split('\n')
sentence = random.choice(sentences)
return sentence
def draw_text(main, screen, msg, y ,fsize, color):
font = pygame.font.Font(None, fsize)
text = font.render(msg, 1,color)
text_rect = text.get_rect(center=(main.width/2, y))
screen.blit(text, text_rect)
pygame.display.update()
def show_results(main, screen):
if(not main.end):
main.time_total = time.time() - main.time_start
#Calculate accuracy
count = 0
for i,users_text in enumerate(main.word):
try:
if main.input_text[i] == users_text:
count += 1
except:
pass
main.accuracy = count/len(main.word)*100
main.wpm = len(main.input_text)*60/( 6*main.time_total)
main.end = True
print("You took ",main.time_total," to complete the test.")
main.results = 'Time:'+str(round(main.time_total)) +" secs Accuracy:"+ str(round(main.accuracy)) + "%" + ' Wpm: ' + str(round(main.wpm))
main.time_img = pygame.image.load('icon.png')
main.time_img = pygame.transform.scale(main.time_img, (50,50))
screen.blit(main.time_img, (main.width/2 -25,main.height-90))
main.draw_text(screen,"Try Again", main.height - 110, 26, (100,100,100))
print(main.results)
pygame.display.update()
def run(main):
main.reset_game()
main.running=True
while(main.running):
clock = pygame.time.Clock()
main.screen.fill((0,0,0), (50,250,650,50))
pygame.draw.rect(main.screen,main.HEAD_Area, (50,250,650,50), 2)
# update the text of user input
main.draw_text(main.screen, main.input_text, 274, 26,(250,250,250))
pygame.display.update()
for event in pygame.event.get():
if event.type == QUIT:
main.running = False
pygame.display.quit()
pygame.quit()
quit()
elif event.type == pygame.KEYDOWN:
if main.active and not main.end:
if event.key == pygame.K_RETURN:
print("Text given : ")
print(main.word)
print("Your input : ")
print(main.input_text)
main.show_results(main.screen)
main.draw_text(main.screen, main.results,350, 28, main.RESULT_Area)
main.end = True
elif event.key == pygame.K_BACKSPACE:
main.input_text = main.input_text[:-1]
else:
try:
main.input_text += event.unicode
except:
pass
elif event.type == pygame.MOUSEBUTTONUP:
x,y = pygame.mouse.get_pos()
# position of input box
if(x>=50 and x<=650 and y>=250 and y<=300):
main.active = True
main.input_text = ''
main.time_start = time.time()
# position of reset box
if(x>=310 and x<=510 and y>=390 and main.end):
main.reset_game()
x,y = pygame.mouse.get_pos()
pygame.display.update()
clock.tick(60)
def reset_game(main):
main.screen.blit(main.open_img, (0,0))
pygame.display.update()
time.sleep(2)
main.active=False
main.reset=False
main.end = False
main.input_text=''
main.word = ''
main.time_start = 0
main.time_total = 0
main.wpm = 0
main.word = main.get_sentence()
if (not main.word): main.reset_game()
#Heading
main.screen.fill((0,0,0))
main.screen.blit(main.bg,(0,0))
msg = "Typing Speed Test"
main.draw_text(main.screen, msg,80, 80,main.HEAD_Area)
# rectangle for input box
pygame.draw.rect(main.screen,(255,192,25), (50,250,650,50), 2)
# sentence string
main.draw_text(main.screen, main.word,200, 28,main.TEXT_Area)
pygame.display.update()
TypingSpeed().run()
|
[
"pygame.quit",
"pygame.draw.rect",
"pygame.display.set_mode",
"pygame.event.get",
"random.choice",
"pygame.init",
"time.sleep",
"time.time",
"pygame.transform.scale",
"pygame.display.update",
"pygame.mouse.get_pos",
"pygame.font.Font",
"pygame.image.load",
"pygame.display.quit",
"pygame.display.set_caption",
"pygame.time.Clock"
] |
[((634, 647), 'pygame.init', 'pygame.init', ([], {}), '()\n', (645, 647), False, 'import pygame\n'), ((673, 701), 'pygame.image.load', 'pygame.image.load', (['"""bg0.jpg"""'], {}), "('bg0.jpg')\n", (690, 701), False, 'import pygame\n'), ((727, 791), 'pygame.transform.scale', 'pygame.transform.scale', (['main.open_img', '(main.width, main.height)'], {}), '(main.open_img, (main.width, main.height))\n', (749, 791), False, 'import pygame\n'), ((814, 842), 'pygame.image.load', 'pygame.image.load', (['"""bg1.jpg"""'], {}), "('bg1.jpg')\n", (831, 842), False, 'import pygame\n'), ((862, 905), 'pygame.transform.scale', 'pygame.transform.scale', (['main.bg', '(750, 500)'], {}), '(main.bg, (750, 500))\n', (884, 905), False, 'import pygame\n'), ((930, 980), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(main.width, main.height)'], {}), '((main.width, main.height))\n', (953, 980), False, 'import pygame\n'), ((989, 1041), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Test Your Typing Speed"""'], {}), "('Test Your Typing Speed')\n", (1015, 1041), False, 'import pygame\n'), ((1177, 1201), 'random.choice', 'random.choice', (['sentences'], {}), '(sentences)\n', (1190, 1201), False, 'import random\n'), ((1301, 1330), 'pygame.font.Font', 'pygame.font.Font', (['None', 'fsize'], {}), '(None, fsize)\n', (1317, 1330), False, 'import pygame\n'), ((1481, 1504), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1502, 1504), False, 'import pygame\n'), ((5233, 5256), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5254, 5256), False, 'import pygame\n'), ((5266, 5279), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (5276, 5279), False, 'import time\n'), ((5834, 5902), 'pygame.draw.rect', 'pygame.draw.rect', (['main.screen', '(255, 192, 25)', '(50, 250, 650, 50)', '(2)'], {}), '(main.screen, (255, 192, 25), (50, 250, 650, 50), 2)\n', (5850, 5902), False, 'import pygame\n'), ((6016, 6039), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (6037, 6039), False, 'import pygame\n'), ((2429, 2458), 'pygame.image.load', 'pygame.image.load', (['"""icon.png"""'], {}), "('icon.png')\n", (2446, 2458), False, 'import pygame\n'), ((2488, 2535), 'pygame.transform.scale', 'pygame.transform.scale', (['main.time_img', '(50, 50)'], {}), '(main.time_img, (50, 50))\n', (2510, 2535), False, 'import pygame\n'), ((2758, 2781), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2779, 2781), False, 'import pygame\n'), ((2934, 2953), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (2951, 2953), False, 'import pygame\n'), ((3023, 3091), 'pygame.draw.rect', 'pygame.draw.rect', (['main.screen', 'main.HEAD_Area', '(50, 250, 650, 50)', '(2)'], {}), '(main.screen, main.HEAD_Area, (50, 250, 650, 50), 2)\n', (3039, 3091), False, 'import pygame\n'), ((3227, 3250), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (3248, 3250), False, 'import pygame\n'), ((3277, 3295), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3293, 3295), False, 'import pygame\n'), ((5062, 5085), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5083, 5085), False, 'import pygame\n'), ((1631, 1642), 'time.time', 'time.time', ([], {}), '()\n', (1640, 1642), False, 'import time\n'), ((3400, 3421), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (3419, 3421), False, 'import pygame\n'), ((3443, 3456), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3454, 3456), False, 'import pygame\n'), ((4543, 4565), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (4563, 4565), False, 'import pygame\n'), ((4809, 4820), 'time.time', 'time.time', ([], {}), '()\n', (4818, 4820), False, 'import time\n'), ((5012, 5034), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (5032, 5034), False, 'import pygame\n')]
|
#!/usr/bin/env python
import tkinter as tk
from scripts.generate import generate_secret
class Application(tk.Tk):
MODES = [
("Words", "Words"),
("Numbers", "Numbers"),
("Mixed", "Mixed"),
]
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.grid()
self._create_message()
self._create_buttons()
def _create_buttons(self):
quit_button = tk.Button(self, text='Quit', command=self.quit)
generate_btn = tk.Button(self, text='Generate',
command=generate_secret)
v = tk.StringVar()
v.set("Words")
def _sel():
label = tk.Label()
selection = "You selected {0} type. Click Generate".format(
str(v.get())
)
label.config(text=selection)
for text, mode in self.MODES:
b = tk.Radiobutton(self, text=text,
variable=v, value=mode, command=_sel)
b.pack(anchor=tk.CENTER)
quit_button.pack(anchor=tk.SW)
generate_btn.pack(anchor=tk.SE)
def _create_message(self):
text = "Select a secret type. For the `words` type, select the " \
"the number of dice and how many rolls."
m = tk.Message(self, text=text)
m.pack(anchor=tk.N)
app = Application()
app.call('wm', 'iconphoto', app._w, tk.PhotoImage(file='lock_icon_bkgrd.png'))
app.title('Password Generator')
app.geometry('{}x{}'.format(500, 300))
app.mainloop()
|
[
"tkinter.StringVar",
"tkinter.PhotoImage",
"tkinter.Message",
"tkinter.Tk.__init__",
"tkinter.Button",
"tkinter.Radiobutton",
"tkinter.Label"
] |
[((1439, 1480), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'file': '"""lock_icon_bkgrd.png"""'}), "(file='lock_icon_bkgrd.png')\n", (1452, 1480), True, 'import tkinter as tk\n'), ((278, 315), 'tkinter.Tk.__init__', 'tk.Tk.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (292, 315), True, 'import tkinter as tk\n'), ((454, 501), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""Quit"""', 'command': 'self.quit'}), "(self, text='Quit', command=self.quit)\n", (463, 501), True, 'import tkinter as tk\n'), ((525, 582), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""Generate"""', 'command': 'generate_secret'}), "(self, text='Generate', command=generate_secret)\n", (534, 582), True, 'import tkinter as tk\n'), ((629, 643), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (641, 643), True, 'import tkinter as tk\n'), ((1325, 1352), 'tkinter.Message', 'tk.Message', (['self'], {'text': 'text'}), '(self, text=text)\n', (1335, 1352), True, 'import tkinter as tk\n'), ((708, 718), 'tkinter.Label', 'tk.Label', ([], {}), '()\n', (716, 718), True, 'import tkinter as tk\n'), ((931, 1000), 'tkinter.Radiobutton', 'tk.Radiobutton', (['self'], {'text': 'text', 'variable': 'v', 'value': 'mode', 'command': '_sel'}), '(self, text=text, variable=v, value=mode, command=_sel)\n', (945, 1000), True, 'import tkinter as tk\n')]
|
import requests
import time
import random
import traceback
import logging
import os
import gc
header_close = {'Connection': 'keep-alive'}
class autoLogin():
__url = ['http://www.baidu.com','http://cn.bing.com']
__status = -1
__loginUrl = 'http://119.39.119.2'
__initFlag = False
def getTime3(self):
return time.localtime(time.time())[3]
def isInit(self) -> bool:
return self.__initFlag
# def printNowTime(self):
# print(time.strftime("%Y-%m-%d %H:%M:%S :", time.localtime()),end=' ')
def __del__(self):
del self.__session
logging.warning("Object autoLogin deleted!")
def __init__(self):
self.__status = -1
self.__initFlag = False
self.__session = requests.session()
def check(self) -> bool:
returnVal = True
try:
Connection = self.__session.get(
url=self.__url, timeout=5, headers=header_close)
if Connection.url == 'http://119.39.119.2':
del Connection
returnVal = False
else:
del Connection
# returnVal = False
except:
logging.warning(traceback.format_exc())
finally:
gc.collect()
return returnVal
def login(self):
try:
Connection = self.__session.get(url=self.__url[random.randint(0,1)],timeout=5, headers=header_close)
if Connection.url != 'http://119.39.119.2':
return
except:
logging.warning(traceback.format_exc())
return
logging.info('Try Login!')
post_test = 'http://119.39.119.2/a70.htm'
# print("Try login")
post_header = {
'Host': '172.16.31.10',
'Connection': 'keep-alive',
'Content-Length': '164',
'Cache-Control': 'max-age=0',
'Upgrade-Insecure-Requests': '1',
'Origin': 'http://119.39.119.2',
'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Referer': 'http://119.39.119.2/a70.htm',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9',
# 'Server': 'DrcomServer1.0',
# 'Accept-Ranges': 'bytes',
# 'Connection': 'keep-alive',
# 'Content-type': 'text/html; charset=gbk',
# 'Cache-Control': 'no-cache',
# 'Content-length': '4036',
}
user_agent_list = ["Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; …) Gecko/20100101 Firefox/61.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
"Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
]
post_header['User-Agent'] = random.choice(user_agent_list)
post_data = {
'DDDDD': '', # 填账户
'upass': '', # 填密码
'R1': '0',
'R3': '0',
'R6': '0',
'para': '00',
'0MKKey': '123456'
}
if post_data['DDDDD'] == '':
logging.error("Wrong UserName!")
exit()
if post_data['upass'] == '':
logging.error('Wrong Password!')
exit()
try:
loginPost = self.__session.post(
url=post_test, headers=post_header, data=post_data, timeout=10)
checkFlag = loginPost.headers["Content-length"]
if loginPost.status_code == 200 and checkFlag == '4036':
logging.info('Login Succeed!')
else:
logging.info('Login Failed!')
if 3000 <= checkFlag <= 4000 :
logging.warning("Reboot!")
os.system("reboot")
del loginPost
except:
logging.warning(traceback.format_exc())
def run(self):
# self.printNowTime()
logging.info('Start Running!')
def main():
LOG_FORMAT = "[%(asctime)s] - [%(levelname)s] - %(message)s"
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
for _ in range(1):
try:
autologin = autoLogin()
autologin.run()
del autologin
except Exception:
logging.warning(traceback.format_exc())
finally:
gc.collect()
if __name__ == "__main__":
main()
|
[
"requests.session",
"logging.error",
"random.randint",
"logging.basicConfig",
"logging.warning",
"random.choice",
"os.system",
"time.time",
"logging.info",
"gc.collect",
"traceback.format_exc"
] |
[((5216, 5275), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': 'LOG_FORMAT'}), '(level=logging.DEBUG, format=LOG_FORMAT)\n', (5235, 5275), False, 'import logging\n'), ((627, 671), 'logging.warning', 'logging.warning', (['"""Object autoLogin deleted!"""'], {}), "('Object autoLogin deleted!')\n", (642, 671), False, 'import logging\n'), ((786, 804), 'requests.session', 'requests.session', ([], {}), '()\n', (802, 804), False, 'import requests\n'), ((1677, 1703), 'logging.info', 'logging.info', (['"""Try Login!"""'], {}), "('Try Login!')\n", (1689, 1703), False, 'import logging\n'), ((3939, 3969), 'random.choice', 'random.choice', (['user_agent_list'], {}), '(user_agent_list)\n', (3952, 3969), False, 'import random\n'), ((5087, 5117), 'logging.info', 'logging.info', (['"""Start Running!"""'], {}), "('Start Running!')\n", (5099, 5117), False, 'import logging\n'), ((1302, 1314), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1312, 1314), False, 'import gc\n'), ((4252, 4284), 'logging.error', 'logging.error', (['"""Wrong UserName!"""'], {}), "('Wrong UserName!')\n", (4265, 4284), False, 'import logging\n'), ((4356, 4388), 'logging.error', 'logging.error', (['"""Wrong Password!"""'], {}), "('Wrong Password!')\n", (4369, 4388), False, 'import logging\n'), ((5518, 5530), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5528, 5530), False, 'import gc\n'), ((371, 382), 'time.time', 'time.time', ([], {}), '()\n', (380, 382), False, 'import time\n'), ((4694, 4724), 'logging.info', 'logging.info', (['"""Login Succeed!"""'], {}), "('Login Succeed!')\n", (4706, 4724), False, 'import logging\n'), ((4761, 4790), 'logging.info', 'logging.info', (['"""Login Failed!"""'], {}), "('Login Failed!')\n", (4773, 4790), False, 'import logging\n'), ((1247, 1269), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1267, 1269), False, 'import traceback\n'), ((1624, 1646), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1644, 1646), False, 'import traceback\n'), ((4860, 4886), 'logging.warning', 'logging.warning', (['"""Reboot!"""'], {}), "('Reboot!')\n", (4875, 4886), False, 'import logging\n'), ((4908, 4927), 'os.system', 'os.system', (['"""reboot"""'], {}), "('reboot')\n", (4917, 4927), False, 'import os\n'), ((5001, 5023), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (5021, 5023), False, 'import traceback\n'), ((5463, 5485), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (5483, 5485), False, 'import traceback\n'), ((1443, 1463), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (1457, 1463), False, 'import random\n')]
|
from rest_framework import viewsets, mixins
from recipe.serializers import TagSerializers
from rest_framework import authentication, permissions
from core.models import Tag
class TagViewSet(viewsets.GenericViewSet, mixins.ListModelMixin):
"""Manage tags in the database"""
authentication_classes = (authentication.TokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
queryset = Tag.objects.all()
serializer_class = TagSerializers
def get_queryset(self):
"""Limiting tags to authenticated current user"""
return self.queryset.filter(user=self.request.user).order_by('-name')
|
[
"core.models.Tag.objects.all"
] |
[((417, 434), 'core.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (432, 434), False, 'from core.models import Tag\n')]
|
"""
-----------------------------------------------------------------------------
U N V E R I F I E D S E I S M S
-----------------------------------------------------------------------------
"""
import json
import datetime
from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash
from flask_login import login_required
import main.forms as f
from main.utilities.api_querying import makeRequest
u_seism = Blueprint('u_seism', __name__, url_prefix='/unverified-seisms/')
@u_seism.route('/')
@login_required
def main():
url = current_app.config["API_URL"] + "/unverified-seisms"
sensors_url = current_app.config["API_URL"] + "/sensors"
filters = f.USeismsFilterForm(request.args, meta={'csrf': False})
query = makeRequest("GET", sensors_url, authenticated_user=True)
sensors = json.loads(query.text)["sensors"]
filters.sensor_id.choices = [(int(sensor['id_num']), sensor['name']) for sensor in sensors]
filters.sensor_id.choices.insert(0, [0, "-"])
data = {}
if 'sensor_id' in request.args:
data["sensor_id"] = request.args.get('sensor_id', '')
if "from_datetime" in request.args and request.args["from_datetime"] != "":
date = datetime.datetime.strptime(request.args.get("from_datetime", ""), "%Y-%m-%dT%H:%M")
data["from_date"] = datetime.datetime.strftime(date, "%Y-%m-%d %H:%M:%S")
if "to_datetime" in request.args and request.args["to_datetime"] != "":
date = datetime.datetime.strptime(request.args.get("to_datetime", ""), "%Y-%m-%dT%H:%M")
data["to_date"] = datetime.datetime.strftime(date, "%Y-%m-%d %H:%M:%S")
if 'sort_by' in request.args and request.args['sort_by'] != "":
data["sort_by"] = request.args.get('sort_by', '')
if 'page_num' in request.args and request.args['page_num'] != "":
data["page_num"] = request.args.get('page_num', '')
if 'elem_per_page' in request.args and request.args['elem_per_page'] != "":
data["elem_per_page"] = request.args.get('elem_per_page', '')
data = json.dumps(data)
query = makeRequest("GET", url, authenticated_user=True, data=data)
if query.status_code == 200:
unverified_seisms = json.loads(query.text)["unverified_seisms"]
pagination = {"items_num": json.loads(query.text)["items_num"],
"total_pages": json.loads(query.text)["total_pages"],
"page_num": json.loads(query.text)["page_num"]}
return render_template('/derived/unverified-seisms/main.html',
unverified_seisms=unverified_seisms,
filters=filters,
pagination=pagination)
else:
return redirect(url_for('u_seism.main'))
@u_seism.route('/edit/<int:id>', methods=["POST", "GET"])
@login_required
def edit_useism(id):
url = current_app.config["API_URL"] + "/unverified-seism/" + str(id)
form = f.SeismForm()
query = makeRequest("GET", url, authenticated_user=True)
unverified_seism = query.json()
if not form.is_submitted():
# If the form is not sent, makes a request
if query.status_code == 404:
flash("Seism not found", "warning")
return redirect(url_for('u_seism.main'))
form.depth.data = unverified_seism["depth"]
form.magnitude.data = unverified_seism["magnitude"]
if form.validate_on_submit():
seism = {
"depth": form.depth.data,
"magnitude": form.magnitude.data
}
seism_json = json.dumps(seism)
query = makeRequest("PUT", url, authenticated_user=True, data=seism_json)
return redirect(url_for('u_seism.main'))
return render_template('/derived/unverified-seisms/edit-useism.html', id=id, form=form,
unverified_seism=unverified_seism)
@u_seism.route('/delete/<int:id>')
@login_required
def delete_useism(id):
url = current_app.config["API_URL"] + "/unverified-seism/" + str(id)
query = makeRequest("DELETE", url, authenticated_user=True)
if query.status_code == 409:
flash("Seism not found", "warning")
return redirect(url_for('u_seism.main'))
return redirect(url_for('u_seism.main'))
@u_seism.route('/view/<int:id>')
@login_required
def view_useism(id):
url = current_app.config["API_URL"] + "/unverified-seism/" + str(id)
query = makeRequest("GET", url, authenticated_user=True)
if query.status_code == 404:
flash("Seism not found", "warning")
return redirect(url_for('u_seism.main'))
unverified_seism = query.json()
return render_template('/derived/unverified-seisms/view-useism.html', unverified_seism=unverified_seism)
@u_seism.route('/validate/<int:id>')
@login_required
def verify_useism(id):
url = current_app.config["API_URL"] + "/unverified-seism/" + str(id)
query = makeRequest("GET", url, authenticated_user=True)
if query.status_code == 404:
flash("Seism not found", "warning")
return redirect(url_for('u_seism.main'))
verification = {
"verified": True
}
data_json = json.dumps(verification)
_query = makeRequest("PUT", url, authenticated_user=True, data=data_json)
return redirect(url_for('u_seism.main'))
|
[
"datetime.datetime.strftime",
"main.forms.SeismForm",
"flask.Blueprint",
"json.loads",
"flask.request.args.get",
"flask.flash",
"json.dumps",
"main.forms.USeismsFilterForm",
"main.utilities.api_querying.makeRequest",
"flask.url_for",
"flask.render_template"
] |
[((467, 531), 'flask.Blueprint', 'Blueprint', (['"""u_seism"""', '__name__'], {'url_prefix': '"""/unverified-seisms/"""'}), "('u_seism', __name__, url_prefix='/unverified-seisms/')\n", (476, 531), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((721, 776), 'main.forms.USeismsFilterForm', 'f.USeismsFilterForm', (['request.args'], {'meta': "{'csrf': False}"}), "(request.args, meta={'csrf': False})\n", (740, 776), True, 'import main.forms as f\n'), ((790, 846), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""GET"""', 'sensors_url'], {'authenticated_user': '(True)'}), "('GET', sensors_url, authenticated_user=True)\n", (801, 846), False, 'from main.utilities.api_querying import makeRequest\n'), ((2125, 2141), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2135, 2141), False, 'import json\n'), ((2154, 2213), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""GET"""', 'url'], {'authenticated_user': '(True)', 'data': 'data'}), "('GET', url, authenticated_user=True, data=data)\n", (2165, 2213), False, 'from main.utilities.api_querying import makeRequest\n'), ((3022, 3035), 'main.forms.SeismForm', 'f.SeismForm', ([], {}), '()\n', (3033, 3035), True, 'import main.forms as f\n'), ((3049, 3097), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""GET"""', 'url'], {'authenticated_user': '(True)'}), "('GET', url, authenticated_user=True)\n", (3060, 3097), False, 'from main.utilities.api_querying import makeRequest\n'), ((3799, 3919), 'flask.render_template', 'render_template', (['"""/derived/unverified-seisms/edit-useism.html"""'], {'id': 'id', 'form': 'form', 'unverified_seism': 'unverified_seism'}), "('/derived/unverified-seisms/edit-useism.html', id=id, form=\n form, unverified_seism=unverified_seism)\n", (3814, 3919), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4103, 4154), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""DELETE"""', 'url'], {'authenticated_user': '(True)'}), "('DELETE', url, authenticated_user=True)\n", (4114, 4154), False, 'from main.utilities.api_querying import makeRequest\n'), ((4483, 4531), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""GET"""', 'url'], {'authenticated_user': '(True)'}), "('GET', url, authenticated_user=True)\n", (4494, 4531), False, 'from main.utilities.api_querying import makeRequest\n'), ((4705, 4806), 'flask.render_template', 'render_template', (['"""/derived/unverified-seisms/view-useism.html"""'], {'unverified_seism': 'unverified_seism'}), "('/derived/unverified-seisms/view-useism.html',\n unverified_seism=unverified_seism)\n", (4720, 4806), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4966, 5014), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""GET"""', 'url'], {'authenticated_user': '(True)'}), "('GET', url, authenticated_user=True)\n", (4977, 5014), False, 'from main.utilities.api_querying import makeRequest\n'), ((5210, 5234), 'json.dumps', 'json.dumps', (['verification'], {}), '(verification)\n', (5220, 5234), False, 'import json\n'), ((5248, 5312), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""PUT"""', 'url'], {'authenticated_user': '(True)', 'data': 'data_json'}), "('PUT', url, authenticated_user=True, data=data_json)\n", (5259, 5312), False, 'from main.utilities.api_querying import makeRequest\n'), ((861, 883), 'json.loads', 'json.loads', (['query.text'], {}), '(query.text)\n', (871, 883), False, 'import json\n'), ((1134, 1167), 'flask.request.args.get', 'request.args.get', (['"""sensor_id"""', '""""""'], {}), "('sensor_id', '')\n", (1150, 1167), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((1396, 1449), 'datetime.datetime.strftime', 'datetime.datetime.strftime', (['date', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(date, '%Y-%m-%d %H:%M:%S')\n", (1422, 1449), False, 'import datetime\n'), ((1650, 1703), 'datetime.datetime.strftime', 'datetime.datetime.strftime', (['date', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(date, '%Y-%m-%d %H:%M:%S')\n", (1676, 1703), False, 'import datetime\n'), ((1799, 1830), 'flask.request.args.get', 'request.args.get', (['"""sort_by"""', '""""""'], {}), "('sort_by', '')\n", (1815, 1830), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((1929, 1961), 'flask.request.args.get', 'request.args.get', (['"""page_num"""', '""""""'], {}), "('page_num', '')\n", (1945, 1961), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((2075, 2112), 'flask.request.args.get', 'request.args.get', (['"""elem_per_page"""', '""""""'], {}), "('elem_per_page', '')\n", (2091, 2112), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((2556, 2693), 'flask.render_template', 'render_template', (['"""/derived/unverified-seisms/main.html"""'], {'unverified_seisms': 'unverified_seisms', 'filters': 'filters', 'pagination': 'pagination'}), "('/derived/unverified-seisms/main.html', unverified_seisms=\n unverified_seisms, filters=filters, pagination=pagination)\n", (2571, 2693), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((3637, 3654), 'json.dumps', 'json.dumps', (['seism'], {}), '(seism)\n', (3647, 3654), False, 'import json\n'), ((3672, 3737), 'main.utilities.api_querying.makeRequest', 'makeRequest', (['"""PUT"""', 'url'], {'authenticated_user': '(True)', 'data': 'seism_json'}), "('PUT', url, authenticated_user=True, data=seism_json)\n", (3683, 3737), False, 'from main.utilities.api_querying import makeRequest\n'), ((4196, 4231), 'flask.flash', 'flash', (['"""Seism not found"""', '"""warning"""'], {}), "('Seism not found', 'warning')\n", (4201, 4231), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4301, 4324), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (4308, 4324), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4573, 4608), 'flask.flash', 'flash', (['"""Seism not found"""', '"""warning"""'], {}), "('Seism not found', 'warning')\n", (4578, 4608), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((5056, 5091), 'flask.flash', 'flash', (['"""Seism not found"""', '"""warning"""'], {}), "('Seism not found', 'warning')\n", (5061, 5091), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((5334, 5357), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (5341, 5357), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((1311, 1348), 'flask.request.args.get', 'request.args.get', (['"""from_datetime"""', '""""""'], {}), "('from_datetime', '')\n", (1327, 1348), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((1569, 1604), 'flask.request.args.get', 'request.args.get', (['"""to_datetime"""', '""""""'], {}), "('to_datetime', '')\n", (1585, 1604), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((2277, 2299), 'json.loads', 'json.loads', (['query.text'], {}), '(query.text)\n', (2287, 2299), False, 'import json\n'), ((2816, 2839), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (2823, 2839), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((3268, 3303), 'flask.flash', 'flash', (['"""Seism not found"""', '"""warning"""'], {}), "('Seism not found', 'warning')\n", (3273, 3303), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((3762, 3785), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (3769, 3785), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4256, 4279), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (4263, 4279), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((4633, 4656), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (4640, 4656), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((5116, 5139), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (5123, 5139), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n'), ((2357, 2379), 'json.loads', 'json.loads', (['query.text'], {}), '(query.text)\n', (2367, 2379), False, 'import json\n'), ((2431, 2453), 'json.loads', 'json.loads', (['query.text'], {}), '(query.text)\n', (2441, 2453), False, 'import json\n'), ((2504, 2526), 'json.loads', 'json.loads', (['query.text'], {}), '(query.text)\n', (2514, 2526), False, 'import json\n'), ((3332, 3355), 'flask.url_for', 'url_for', (['"""u_seism.main"""'], {}), "('u_seism.main')\n", (3339, 3355), False, 'from flask import Blueprint, render_template, current_app, redirect, url_for, request, flash\n')]
|
import os
from hazelcast.predicate import (
equal,
and_,
between,
less,
less_or_equal,
greater,
greater_or_equal,
or_,
not_equal,
not_,
like,
ilike,
regex,
sql,
true,
false,
in_,
instance_of,
paging,
)
from hazelcast.serialization.api import Portable, IdentifiedDataSerializable
from hazelcast.util import IterationType
from tests.base import SingleMemberTestCase, HazelcastTestCase
from tests.integration.backward_compatible.util import (
write_string_to_writer,
read_string_from_reader,
)
from tests.util import random_string, get_abs_path
from hazelcast import HazelcastClient
class PredicateTest(SingleMemberTestCase):
@classmethod
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
return config
def setUp(self):
self.map = self.client.get_map(random_string()).blocking()
def tearDown(self):
self.map.destroy()
def fill_map(self, count=10):
m = {"key-%d" % x: "value-%d" % x for x in range(0, count)}
self.map.put_all(m)
return m
def fill_map_numeric(self, count=100):
m = {n: n for n in range(count)}
self.map.put_all(m)
def test_key_set(self):
self.fill_map()
key_set = self.map.key_set()
list(key_set)
key_set_list = list(key_set)
assert key_set_list[0]
def test_sql(self):
self.fill_map()
predicate = sql("this == 'value-1'")
self.assertCountEqual(self.map.key_set(predicate), ["key-1"])
def test_and(self):
self.fill_map()
predicate = and_(equal("this", "value-1"), equal("this", "value-2"))
self.assertCountEqual(self.map.key_set(predicate), [])
def test_or(self):
self.fill_map()
predicate = or_(equal("this", "value-1"), equal("this", "value-2"))
self.assertCountEqual(self.map.key_set(predicate), ["key-1", "key-2"])
def test_not(self):
self.fill_map(count=3)
predicate = not_(equal("this", "value-1"))
self.assertCountEqual(self.map.key_set(predicate), ["key-0", "key-2"])
def test_between(self):
self.fill_map_numeric()
predicate = between("this", 1, 20)
self.assertCountEqual(self.map.key_set(predicate), list(range(1, 21)))
def test_equal(self):
self.fill_map()
predicate = equal("this", "value-1")
self.assertCountEqual(self.map.key_set(predicate), ["key-1"])
def test_not_equal(self):
self.fill_map(count=3)
predicate = not_equal("this", "value-1")
self.assertCountEqual(self.map.key_set(predicate), ["key-0", "key-2"])
def test_in(self):
self.fill_map_numeric(count=10)
predicate = in_("this", 1, 5, 7)
self.assertCountEqual(self.map.key_set(predicate), [1, 5, 7])
def test_less_than(self):
self.fill_map_numeric()
predicate = less("this", 10)
self.assertCountEqual(self.map.key_set(predicate), list(range(0, 10)))
def test_less_than_or_equal(self):
self.fill_map_numeric()
predicate = less_or_equal("this", 10)
self.assertCountEqual(self.map.key_set(predicate), list(range(0, 11)))
def test_greater_than(self):
self.fill_map_numeric()
predicate = greater("this", 10)
self.assertCountEqual(self.map.key_set(predicate), list(range(11, 100)))
def test_greater_than_or_equal(self):
self.fill_map_numeric()
predicate = greater_or_equal("this", 10)
self.assertCountEqual(self.map.key_set(predicate), list(range(10, 100)))
def test_like(self):
self.map.put("key-1", "a_value")
self.map.put("key-2", "b_value")
self.map.put("key-3", "aa_value")
self.map.put("key-4", "AA_value")
predicate = like("this", "a%")
self.assertCountEqual(self.map.key_set(predicate), ["key-1", "key-3"])
def test_ilike(self):
self.map.put("key-1", "a_value")
self.map.put("key-2", "b_value")
self.map.put("key-3", "AA_value")
predicate = ilike("this", "a%")
self.assertCountEqual(self.map.key_set(predicate), ["key-1", "key-3"])
def test_regex(self):
self.map.put("key-1", "car")
self.map.put("key-2", "cry")
self.map.put("key-3", "giraffe")
predicate = regex("this", "c[ar].*")
self.assertCountEqual(self.map.key_set(predicate), ["key-1", "key-2"])
def test_instance_of(self):
self.map.put("key-1", True)
self.map.put("key-2", 5)
self.map.put("key-3", "str")
predicate = instance_of("java.lang.Boolean")
self.assertCountEqual(self.map.key_set(predicate), ["key-1"])
def test_true(self):
m = self.fill_map()
predicate = true()
self.assertCountEqual(self.map.key_set(predicate), list(m.keys()))
def test_false(self):
self.fill_map()
predicate = false()
self.assertCountEqual(self.map.key_set(predicate), [])
def test_paging(self):
self.fill_map_numeric()
predicate = paging(less("this", 4), 2)
self.assertCountEqual([0, 1], self.map.key_set(predicate))
predicate.next_page()
self.assertCountEqual([2, 3], self.map.key_set(predicate))
predicate.next_page()
self.assertCountEqual([], self.map.key_set(predicate))
class SimplePortable(Portable):
def __init__(self, field=None):
self.field = field
def write_portable(self, writer):
writer.write_int("field", self.field)
def read_portable(self, reader):
self.field = reader.read_int("field")
def get_factory_id(self):
return 1
def get_class_id(self):
return 1
class PredicatePortableTest(SingleMemberTestCase):
@classmethod
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
config["portable_factories"] = {1: {1: SimplePortable}}
return config
def setUp(self):
self.map = self.client.get_map(random_string()).blocking()
def tearDown(self):
self.map.destroy()
def fill_map(self, count=1000):
m = {x: SimplePortable(x) for x in range(0, count)}
self.map.put_all(m)
return m
def test_predicate_portable_key(self):
_map = self.fill_map()
map_keys = list(_map.keys())
predicate = sql("field >= 900")
entries = self.map.entry_set(predicate)
self.assertEqual(len(entries), 100)
for k, v in entries:
self.assertGreaterEqual(v.field, 900)
self.assertIn(k, map_keys)
class NestedPredicatePortableTest(SingleMemberTestCase):
class Body(Portable):
def __init__(self, name=None, limb=None):
self.name = name
self.limb = limb
def get_class_id(self):
return 1
def get_factory_id(self):
return 1
def get_class_version(self):
return 15
def write_portable(self, writer):
write_string_to_writer(writer, "name", self.name)
writer.write_portable("limb", self.limb)
def read_portable(self, reader):
self.name = read_string_from_reader(reader, "name")
self.limb = reader.read_portable("limb")
def __eq__(self, other):
return isinstance(other, self.__class__) and (self.name, self.limb) == (
other.name,
other.limb,
)
class Limb(Portable):
def __init__(self, name=None):
self.name = name
def get_class_id(self):
return 2
def get_factory_id(self):
return 1
def get_class_version(self):
return 2
def write_portable(self, writer):
write_string_to_writer(writer, "name", self.name)
def read_portable(self, reader):
self.name = read_string_from_reader(reader, "name")
def __eq__(self, other):
return isinstance(other, self.__class__) and self.name == other.name
@classmethod
def configure_client(cls, config):
config["cluster_name"] = cls.cluster.id
config["portable_factories"] = {
1: {
1: NestedPredicatePortableTest.Body,
2: NestedPredicatePortableTest.Limb,
},
}
return config
def setUp(self):
self.map = self.client.get_map(random_string()).blocking()
self.map.put(
1, NestedPredicatePortableTest.Body("body1", NestedPredicatePortableTest.Limb("hand"))
)
self.map.put(
2, NestedPredicatePortableTest.Body("body2", NestedPredicatePortableTest.Limb("leg"))
)
def tearDown(self):
self.map.destroy()
def test_adding_indexes(self):
# single-attribute index
self.map.add_index(attributes=["name"])
# nested-attribute index
self.map.add_index(attributes=["limb.name"])
def test_single_attribute_query_portable_predicates(self):
predicate = equal("limb.name", "hand")
values = self.map.values(predicate)
self.assertEqual(1, len(values))
self.assertEqual("body1", values[0].name)
def test_nested_attribute_query_sql_predicate(self):
predicate = sql("limb.name == 'leg'")
values = self.map.values(predicate)
self.assertEqual(1, len(values))
self.assertEqual("body2", values[0].name)
class PagingPredicateTest(HazelcastTestCase):
rc = None
cluster = None
client = None
map = None
@classmethod
def setUpClass(cls):
cls.rc = cls.create_rc()
cls.cluster = cls.create_cluster(cls.rc, cls.configure_cluster())
cls.cluster.start_member()
cls.cluster.start_member()
cls.client = HazelcastClient(cluster_name=cls.cluster.id)
cls.map = cls.client.get_map(random_string()).blocking()
def setUp(self):
self.map.clear()
@classmethod
def tearDownClass(cls):
cls.map.destroy()
cls.client.shutdown()
cls.rc.shutdownCluster(cls.cluster.id)
cls.rc.exit()
@staticmethod
def configure_cluster():
current_directory = os.path.dirname(__file__)
with open(
get_abs_path(os.path.join(current_directory, "proxy"), "hazelcast.xml"), "r"
) as f:
return f.read()
def test_with_inner_paging_predicate(self):
predicate = paging(true(), 1)
with self.assertRaises(TypeError):
paging(predicate, 1)
def test_with_non_positive_page_size(self):
with self.assertRaises(ValueError):
paging(true(), 0)
with self.assertRaises(ValueError):
paging(true(), -1)
def test_previous_page_when_index_is_zero(self):
predicate = paging(true(), 2)
self.assertEqual(0, predicate.previous_page())
self.assertEqual(0, predicate.previous_page())
def test_entry_set_with_paging_predicate(self):
self.fill_map(3)
entry_set = self.map.entry_set(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(entry_set), 1)
self.assertEqual(entry_set[0], ("key-2", 2))
def test_key_set_with_paging_predicate(self):
self.fill_map(3)
key_set = self.map.key_set(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(key_set), 1)
self.assertEqual(key_set[0], "key-2")
def test_values_with_paging_predicate(self):
self.fill_map(3)
values = self.map.values(paging(greater_or_equal("this", 2), 1))
self.assertEqual(len(values), 1)
self.assertEqual(values[0], 2)
def test_with_none_inner_predicate(self):
self.fill_map(3)
predicate = paging(None, 10)
self.assertEqual(self.map.values(predicate), [0, 1, 2])
def test_first_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(self.map.values(predicate), [40, 41])
def test_next_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.next_page()
self.assertEqual(self.map.values(predicate), [42, 43])
def test_set_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.assertEqual(self.map.values(predicate), [48, 49])
def test_get_page(self):
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.assertEqual(predicate.page, 4)
def test_page_size(self):
predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(predicate.page_size, 2)
def test_previous_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
predicate.previous_page()
self.assertEqual(self.map.values(predicate), [46, 47])
def test_get_4th_then_previous_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
self.map.values(predicate)
predicate.previous_page()
self.assertEqual(self.map.values(predicate), [46, 47])
def test_get_3rd_then_next_page(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 3
self.map.values(predicate)
predicate.next_page()
self.assertEqual(self.map.values(predicate), [48, 49])
def test_set_nonexistent_page(self):
# Trying to get page 10, which is out of range, should return empty list.
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 10
self.assertEqual(self.map.values(predicate), [])
def test_nonexistent_previous_page(self):
# Trying to get previous page while already at first page should return first page.
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.previous_page()
self.assertEqual(self.map.values(predicate), [40, 41])
def test_nonexistent_next_page(self):
# Trying to get next page while already at last page should return empty list.
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
predicate.page = 4
predicate.next_page()
self.assertEqual(self.map.values(predicate), [])
def test_get_half_full_last_page(self):
# Page size set to 2, but last page only has 1 element.
self.fill_map()
predicate = paging(greater_or_equal("this", 41), 2)
predicate.page = 4
self.assertEqual(self.map.values(predicate), [49])
def test_reset(self):
self.fill_map()
predicate = paging(greater_or_equal("this", 40), 2)
self.assertEqual(self.map.values(predicate), [40, 41])
predicate.next_page()
self.assertEqual(self.map.values(predicate), [42, 43])
predicate.reset()
self.assertEqual(self.map.values(predicate), [40, 41])
def test_empty_map(self):
# Empty map should return empty list.
predicate = paging(greater_or_equal("this", 30), 2)
self.assertEqual(self.map.values(predicate), [])
def test_equal_values_paging(self):
self.fill_map()
# keys[50 - 99], values[0 - 49]:
m = {"key-%d" % i: i - 50 for i in range(50, 100)}
self.map.put_all(m)
predicate = paging(less_or_equal("this", 8), 5)
self.assertEqual(self.map.values(predicate), [0, 0, 1, 1, 2])
predicate.next_page()
self.assertEqual(self.map.values(predicate), [2, 3, 3, 4, 4])
predicate.next_page()
self.assertEqual(self.map.values(predicate), [5, 5, 6, 6, 7])
predicate.next_page()
self.assertEqual(self.map.values(predicate), [7, 8, 8])
def test_entry_set_with_custom_comparator(self):
m = self.fill_map()
predicate = paging(less("this", 10), 5, CustomComparator(1, IterationType.KEY))
def entries(start, end):
return list(
sorted(
map(lambda k: (k, m[k]), filter(lambda k: start <= m[k] < end, m)),
key=lambda e: e[1],
reverse=True,
)
)
self.assertEqual(entries(5, 10), self.map.entry_set(predicate))
predicate.next_page()
self.assertEqual(entries(0, 5), self.map.entry_set(predicate))
predicate.next_page()
self.assertEqual([], self.map.entry_set(predicate))
def test_key_set_with_custom_comparator(self):
m = self.fill_map()
predicate = paging(less("this", 10), 5, CustomComparator(1, IterationType.KEY))
keys = list(sorted(m.keys(), key=lambda k: m[k]))
self.assertEqual(keys[9:4:-1], self.map.key_set(predicate))
predicate.next_page()
self.assertEqual(keys[4::-1], self.map.key_set(predicate))
predicate.next_page()
self.assertEqual([], self.map.key_set(predicate))
def test_values_with_custom_comparator(self):
m = self.fill_map()
predicate = paging(less("this", 10), 5, CustomComparator(1, IterationType.KEY))
values = list(sorted(m.values()))
self.assertEqual(values[9:4:-1], self.map.values(predicate))
predicate.next_page()
self.assertEqual(values[4::-1], self.map.values(predicate))
predicate.next_page()
self.assertEqual([], self.map.values(predicate))
def fill_map(self, count=50):
m = {"key-%d" % x: x for x in range(count)}
self.map.put_all(m)
return m
class CustomComparator(IdentifiedDataSerializable):
"""
For type:
- 0 -> lexicographical order
- 1 -> reverse lexicographical
- 2 -> length increasing order
Iteration type is same as the ``hazelcast.util.IterationType``
"""
def __init__(self, order, iteration_type):
self.order = order
self.iteration_type = iteration_type
def write_data(self, object_data_output):
object_data_output.write_int(self.order)
object_data_output.write_int(self.iteration_type)
def read_data(self, object_data_input):
pass
def get_factory_id(self):
return 66
def get_class_id(self):
return 2
|
[
"tests.integration.backward_compatible.util.write_string_to_writer",
"os.path.join",
"hazelcast.predicate.between",
"hazelcast.predicate.less",
"hazelcast.predicate.not_equal",
"os.path.dirname",
"hazelcast.predicate.true",
"hazelcast.predicate.like",
"hazelcast.predicate.ilike",
"hazelcast.predicate.less_or_equal",
"hazelcast.predicate.greater_or_equal",
"hazelcast.predicate.paging",
"hazelcast.HazelcastClient",
"hazelcast.predicate.sql",
"hazelcast.predicate.false",
"tests.util.random_string",
"hazelcast.predicate.in_",
"hazelcast.predicate.instance_of",
"hazelcast.predicate.greater",
"hazelcast.predicate.regex",
"tests.integration.backward_compatible.util.read_string_from_reader",
"hazelcast.predicate.equal"
] |
[((1487, 1511), 'hazelcast.predicate.sql', 'sql', (['"""this == \'value-1\'"""'], {}), '("this == \'value-1\'")\n', (1490, 1511), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2242, 2264), 'hazelcast.predicate.between', 'between', (['"""this"""', '(1)', '(20)'], {}), "('this', 1, 20)\n", (2249, 2264), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2415, 2439), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-1"""'], {}), "('this', 'value-1')\n", (2420, 2439), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2592, 2620), 'hazelcast.predicate.not_equal', 'not_equal', (['"""this"""', '"""value-1"""'], {}), "('this', 'value-1')\n", (2601, 2620), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2785, 2805), 'hazelcast.predicate.in_', 'in_', (['"""this"""', '(1)', '(5)', '(7)'], {}), "('this', 1, 5, 7)\n", (2788, 2805), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2960, 2976), 'hazelcast.predicate.less', 'less', (['"""this"""', '(10)'], {}), "('this', 10)\n", (2964, 2976), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((3148, 3173), 'hazelcast.predicate.less_or_equal', 'less_or_equal', (['"""this"""', '(10)'], {}), "('this', 10)\n", (3161, 3173), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((3339, 3358), 'hazelcast.predicate.greater', 'greater', (['"""this"""', '(10)'], {}), "('this', 10)\n", (3346, 3358), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((3535, 3563), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(10)'], {}), "('this', 10)\n", (3551, 3563), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((3858, 3876), 'hazelcast.predicate.like', 'like', (['"""this"""', '"""a%"""'], {}), "('this', 'a%')\n", (3862, 3876), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((4129, 4148), 'hazelcast.predicate.ilike', 'ilike', (['"""this"""', '"""a%"""'], {}), "('this', 'a%')\n", (4134, 4148), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((4392, 4416), 'hazelcast.predicate.regex', 'regex', (['"""this"""', '"""c[ar].*"""'], {}), "('this', 'c[ar].*')\n", (4397, 4416), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((4657, 4689), 'hazelcast.predicate.instance_of', 'instance_of', (['"""java.lang.Boolean"""'], {}), "('java.lang.Boolean')\n", (4668, 4689), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((4835, 4841), 'hazelcast.predicate.true', 'true', ([], {}), '()\n', (4839, 4841), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((4988, 4995), 'hazelcast.predicate.false', 'false', ([], {}), '()\n', (4993, 4995), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((6442, 6461), 'hazelcast.predicate.sql', 'sql', (['"""field >= 900"""'], {}), "('field >= 900')\n", (6445, 6461), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((9137, 9163), 'hazelcast.predicate.equal', 'equal', (['"""limb.name"""', '"""hand"""'], {}), "('limb.name', 'hand')\n", (9142, 9163), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((9378, 9403), 'hazelcast.predicate.sql', 'sql', (['"""limb.name == \'leg\'"""'], {}), '("limb.name == \'leg\'")\n', (9381, 9403), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((9895, 9939), 'hazelcast.HazelcastClient', 'HazelcastClient', ([], {'cluster_name': 'cls.cluster.id'}), '(cluster_name=cls.cluster.id)\n', (9910, 9939), False, 'from hazelcast import HazelcastClient\n'), ((10299, 10324), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (10314, 10324), False, 'import os\n'), ((11855, 11871), 'hazelcast.predicate.paging', 'paging', (['None', '(10)'], {}), '(None, 10)\n', (11861, 11871), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((1656, 1680), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-1"""'], {}), "('this', 'value-1')\n", (1661, 1680), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((1682, 1706), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-2"""'], {}), "('this', 'value-2')\n", (1687, 1706), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((1843, 1867), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-1"""'], {}), "('this', 'value-1')\n", (1848, 1867), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((1869, 1893), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-2"""'], {}), "('this', 'value-2')\n", (1874, 1893), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((2055, 2079), 'hazelcast.predicate.equal', 'equal', (['"""this"""', '"""value-1"""'], {}), "('this', 'value-1')\n", (2060, 2079), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((5146, 5161), 'hazelcast.predicate.less', 'less', (['"""this"""', '(4)'], {}), "('this', 4)\n", (5150, 5161), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((7090, 7139), 'tests.integration.backward_compatible.util.write_string_to_writer', 'write_string_to_writer', (['writer', '"""name"""', 'self.name'], {}), "(writer, 'name', self.name)\n", (7112, 7139), False, 'from tests.integration.backward_compatible.util import write_string_to_writer, read_string_from_reader\n'), ((7259, 7298), 'tests.integration.backward_compatible.util.read_string_from_reader', 'read_string_from_reader', (['reader', '"""name"""'], {}), "(reader, 'name')\n", (7282, 7298), False, 'from tests.integration.backward_compatible.util import write_string_to_writer, read_string_from_reader\n'), ((7860, 7909), 'tests.integration.backward_compatible.util.write_string_to_writer', 'write_string_to_writer', (['writer', '"""name"""', 'self.name'], {}), "(writer, 'name', self.name)\n", (7882, 7909), False, 'from tests.integration.backward_compatible.util import write_string_to_writer, read_string_from_reader\n'), ((7976, 8015), 'tests.integration.backward_compatible.util.read_string_from_reader', 'read_string_from_reader', (['reader', '"""name"""'], {}), "(reader, 'name')\n", (7999, 8015), False, 'from tests.integration.backward_compatible.util import write_string_to_writer, read_string_from_reader\n'), ((10553, 10559), 'hazelcast.predicate.true', 'true', ([], {}), '()\n', (10557, 10559), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((10620, 10640), 'hazelcast.predicate.paging', 'paging', (['predicate', '(1)'], {}), '(predicate, 1)\n', (10626, 10640), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((10921, 10927), 'hazelcast.predicate.true', 'true', ([], {}), '()\n', (10925, 10927), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12019, 12047), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12035, 12047), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12197, 12225), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12213, 12225), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12404, 12432), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12420, 12432), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12584, 12612), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12600, 12612), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12746, 12774), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12762, 12774), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((12914, 12942), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (12930, 12942), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((13170, 13198), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (13186, 13198), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((13457, 13485), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (13473, 13485), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((13820, 13848), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (13836, 13848), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((14128, 14156), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (14144, 14156), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((14439, 14467), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (14455, 14467), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((14746, 14774), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(41)'], {}), "('this', 41)\n", (14762, 14774), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((14943, 14971), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(40)'], {}), "('this', 40)\n", (14959, 14971), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((15325, 15353), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(30)'], {}), "('this', 30)\n", (15341, 15353), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((15636, 15660), 'hazelcast.predicate.less_or_equal', 'less_or_equal', (['"""this"""', '(8)'], {}), "('this', 8)\n", (15649, 15660), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((16139, 16155), 'hazelcast.predicate.less', 'less', (['"""this"""', '(10)'], {}), "('this', 10)\n", (16143, 16155), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((16848, 16864), 'hazelcast.predicate.less', 'less', (['"""this"""', '(10)'], {}), "('this', 10)\n", (16852, 16864), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((17328, 17344), 'hazelcast.predicate.less', 'less', (['"""this"""', '(10)'], {}), "('this', 10)\n", (17332, 17344), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((10753, 10759), 'hazelcast.predicate.true', 'true', ([], {}), '()\n', (10757, 10759), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((10828, 10834), 'hazelcast.predicate.true', 'true', ([], {}), '()\n', (10832, 10834), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((11166, 11193), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(2)'], {}), "('this', 2)\n", (11182, 11193), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((11414, 11441), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(2)'], {}), "('this', 2)\n", (11430, 11441), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((11650, 11677), 'hazelcast.predicate.greater_or_equal', 'greater_or_equal', (['"""this"""', '(2)'], {}), "('this', 2)\n", (11666, 11677), False, 'from hazelcast.predicate import equal, and_, between, less, less_or_equal, greater, greater_or_equal, or_, not_equal, not_, like, ilike, regex, sql, true, false, in_, instance_of, paging\n'), ((897, 912), 'tests.util.random_string', 'random_string', ([], {}), '()\n', (910, 912), False, 'from tests.util import random_string, get_abs_path\n'), ((6087, 6102), 'tests.util.random_string', 'random_string', ([], {}), '()\n', (6100, 6102), False, 'from tests.util import random_string, get_abs_path\n'), ((8508, 8523), 'tests.util.random_string', 'random_string', ([], {}), '()\n', (8521, 8523), False, 'from tests.util import random_string, get_abs_path\n'), ((9977, 9992), 'tests.util.random_string', 'random_string', ([], {}), '()\n', (9990, 9992), False, 'from tests.util import random_string, get_abs_path\n'), ((10369, 10409), 'os.path.join', 'os.path.join', (['current_directory', '"""proxy"""'], {}), "(current_directory, 'proxy')\n", (10381, 10409), False, 'import os\n')]
|
from server.methods.transaction import Transaction
from server import utils
from server import cache
import config
class Block():
@classmethod
def height(cls, height: int):
data = utils.make_request('getblockhash', [height])
if data['error'] is None:
txid = data['result']
data.pop('result')
data['result'] = utils.make_request('getblock', [txid])['result']
data['result']['txcount'] = len(data['result']['tx'])
data['result'].pop('nTx')
return data
@classmethod
def hash(cls, bhash: str):
data = utils.make_request('getblock', [bhash])
if data['error'] is None:
data['result']['txcount'] = len(data['result']['tx'])
data['result'].pop('nTx')
return data
@classmethod
@cache.memoize(timeout=config.cache)
def get(cls, height: int):
return utils.make_request('getblockhash', [height])
@classmethod
def range(cls, height: int, offset: int):
result = []
for block in range(height - (offset - 1), height + 1):
data = utils.make_request('getblockhash', [block])
nethash = utils.make_request('getnetworkhashps', [120, block])
if data['error'] is None and nethash['error'] is None:
txid = data['result']
data.pop('result')
data['result'] = utils.make_request('getblock', [txid])['result']
data['result']['txcount'] = len(data['result']['tx'])
data['result']['nethash'] = int(nethash['result'])
data['result'].pop('nTx')
result.append(data['result'])
return result[::-1]
@classmethod
@cache.memoize(timeout=config.cache)
def inputs(cls, bhash: str):
data = cls.hash(bhash)
return Transaction().addresses(data['result']['tx'])
|
[
"server.methods.transaction.Transaction",
"server.utils.make_request",
"server.cache.memoize"
] |
[((715, 750), 'server.cache.memoize', 'cache.memoize', ([], {'timeout': 'config.cache'}), '(timeout=config.cache)\n', (728, 750), False, 'from server import cache\n'), ((1478, 1513), 'server.cache.memoize', 'cache.memoize', ([], {'timeout': 'config.cache'}), '(timeout=config.cache)\n', (1491, 1513), False, 'from server import cache\n'), ((185, 229), 'server.utils.make_request', 'utils.make_request', (['"""getblockhash"""', '[height]'], {}), "('getblockhash', [height])\n", (203, 229), False, 'from server import utils\n'), ((528, 567), 'server.utils.make_request', 'utils.make_request', (['"""getblock"""', '[bhash]'], {}), "('getblock', [bhash])\n", (546, 567), False, 'from server import utils\n'), ((788, 832), 'server.utils.make_request', 'utils.make_request', (['"""getblockhash"""', '[height]'], {}), "('getblockhash', [height])\n", (806, 832), False, 'from server import utils\n'), ((972, 1015), 'server.utils.make_request', 'utils.make_request', (['"""getblockhash"""', '[block]'], {}), "('getblockhash', [block])\n", (990, 1015), False, 'from server import utils\n'), ((1029, 1081), 'server.utils.make_request', 'utils.make_request', (['"""getnetworkhashps"""', '[120, block]'], {}), "('getnetworkhashps', [120, block])\n", (1047, 1081), False, 'from server import utils\n'), ((326, 364), 'server.utils.make_request', 'utils.make_request', (['"""getblock"""', '[txid]'], {}), "('getblock', [txid])\n", (344, 364), False, 'from server import utils\n'), ((1578, 1591), 'server.methods.transaction.Transaction', 'Transaction', ([], {}), '()\n', (1589, 1591), False, 'from server.methods.transaction import Transaction\n'), ((1211, 1249), 'server.utils.make_request', 'utils.make_request', (['"""getblock"""', '[txid]'], {}), "('getblock', [txid])\n", (1229, 1249), False, 'from server import utils\n')]
|
#!/usr/bin/env python
# Copyright 2017 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_query_rules
short_description: PANOS module that allows search for security rules in PANW NGFW devices.
description:
- Security policies allow you to enforce rules and take action, and can be as general or specific as needed.
- The policy rules are compared against the incoming traffic in sequence, and because the first rule that matches
- the traffic is applied, the more specific rules must precede the more general ones.
author: "<NAME> (@rnh556)"
version_added: "2.5"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPI U(https://pypi.python.org/pypi/pandevice)
- xmltodict can be obtains from PyPI U(https://pypi.python.org/pypi/xmltodict)
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS firewall or Panorama management console being queried.
required: true
username:
description:
- Username credentials to use for authentication.
required: false
default: "admin"
password:
description:
- Password credentials to use for authentication.
required: true
api_key:
description:
- API key that can be used instead of I(username)/I(password) credentials.
application:
description:
- Name of the application or application group to be queried.
required: false
default: None
source_zone:
description:
- Name of the source security zone to be queried.
required: false
default: None
source_ip:
description:
- The source IP address to be queried.
required: false
default: None
source_port:
description:
- The source port to be queried.
required: false
default: None
destination_zone:
description:
- Name of the destination security zone to be queried.
required: false
default: None
destination_ip:
description:
- The destination IP address to be queried.
required: false
default: None
destination_port:
description:
- The destination port to be queried.
required: false
default: None
protocol:
description:
- The protocol used to be queried. Must be either I(tcp) or I(udp).
required: false
default: None
tag_name:
description:
- Name of the rule tag to be queried.
required: false
default: None
devicegroup:
description:
- The Panorama device group in which to conduct the query.
required: false
default: None
'''
EXAMPLES = '''
- name: search for rules with tcp/3306
panos_query_rules:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
source_zone: 'DevNet'
destination_zone: 'DevVPC'
destination_port: '3306'
protocol: 'tcp'
- name: search devicegroup for inbound rules to dmz host
panos_query_rules:
ip_address: '{{ ip_address }}'
api_key: '{{ api_key }}'
destination_zone: 'DMZ'
destination_ip: '10.100.42.18'
address: 'DeviceGroupA'
- name: search for rules containing a specified rule tag
panos_query_rules:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
tag_name: 'ProjectX'
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
try:
import pandevice
from pandevice import base
from pandevice import firewall
from pandevice import panorama
from pandevice import objects
from pandevice import policies
import ipaddress
import xmltodict
import json
HAS_LIB = True
except ImportError:
HAS_LIB = False
def get_devicegroup(device, devicegroup):
dg_list = device.refresh_devices()
for group in dg_list:
if isinstance(group, pandevice.panorama.DeviceGroup):
if group.name == devicegroup:
return group
return False
def get_rulebase(device, devicegroup):
# Build the rulebase
if isinstance(device, firewall.Firewall):
rulebase = policies.Rulebase()
device.add(rulebase)
elif isinstance(device, panorama.Panorama):
dg = panorama.DeviceGroup(devicegroup)
device.add(dg)
rulebase = policies.PreRulebase()
dg.add(rulebase)
else:
return False
policies.SecurityRule.refreshall(rulebase)
return rulebase
def get_object(device, dev_group, obj_name):
# Search global address objects
match = device.find(obj_name, objects.AddressObject)
if match:
return match
# Search global address groups
match = device.find(obj_name, objects.AddressGroup)
if match:
return match
# Search Panorama device group
if isinstance(device, pandevice.panorama.Panorama):
# Search device group address objects
match = dev_group.find(obj_name, objects.AddressObject)
if match:
return match
# Search device group address groups
match = dev_group.find(obj_name, objects.AddressGroup)
if match:
return match
return False
def addr_in_obj(addr, obj):
ip = ipaddress.ip_address(addr)
# Process address objects
if isinstance(obj, objects.AddressObject):
if obj.type == 'ip-netmask':
net = ipaddress.ip_network(obj.value)
if ip in net:
return True
if obj.type == 'ip-range':
ip_range = obj.value.split('-')
lower = ipaddress.ip_address(ip_range[0])
upper = ipaddress.ip_address(ip_range[1])
if lower < ip < upper:
return True
return False
def get_services(device, dev_group, svc_list, obj_list):
for svc in svc_list:
# Search global address objects
global_obj_match = device.find(svc, objects.ServiceObject)
if global_obj_match:
obj_list.append(global_obj_match)
# Search global address groups
global_grp_match = device.find(svc, objects.ServiceGroup)
if global_grp_match:
get_services(device, dev_group, global_grp_match.value, obj_list)
# Search Panorama device group
if isinstance(device, pandevice.panorama.Panorama):
# Search device group address objects
dg_obj_match = dev_group.find(svc, objects.ServiceObject)
if dg_obj_match:
obj_list.append(dg_obj_match)
# Search device group address groups
dg_grp_match = dev_group.find(svc, objects.ServiceGroup)
if dg_grp_match:
get_services(device, dev_group, dg_grp_match.value, obj_list)
return obj_list
def port_in_svc(orientation, port, protocol, obj):
# Process address objects
if orientation is 'source':
for x in obj.source_port.split(','):
if '-' in x:
port_range = x.split('-')
lower = int(port_range[0])
upper = int(port_range[1])
if (lower <= int(port) <= upper) and (obj.protocol == protocol):
return True
else:
if port == x and obj.protocol == protocol:
return True
elif orientation is 'destination':
for x in obj.destination_port.split(','):
if '-' in x:
port_range = x.split('-')
lower = int(port_range[0])
upper = int(port_range[1])
if (lower <= int(port) <= upper) and (obj.protocol == protocol):
return True
else:
if port == x and obj.protocol == protocol:
return True
return False
def get_tag(device, dev_group, tag_name):
# Search global address objects
match = device.find(tag_name, objects.Tag)
if match:
return match
# Search Panorama device group
if isinstance(device, panorama.Panorama):
# Search device group address objects
match = dev_group.find(tag_name, objects.Tag)
if match:
return match
return False
def main():
argument_spec = dict(
ip_address=dict(required=True),
password=dict(no_log=True),
username=dict(default='admin'),
api_key=dict(no_log=True),
application=dict(default=None),
source_zone=dict(default=None),
destination_zone=dict(default=None),
source_ip=dict(default=None),
destination_ip=dict(default=None),
source_port=dict(default=None),
destination_port=dict(default=None),
protocol=dict(default=None, choices=['tcp', 'udp']),
tag_name=dict(default=None),
devicegroup=dict(default=None)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['api_key', 'password']]
)
if not HAS_LIB:
module.fail_json(msg='Missing required libraries.')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
api_key = module.params['api_key']
# application = module.params['application']
source_zone = module.params['source_zone']
source_ip = module.params['source_ip']
source_port = module.params['source_port']
destination_zone = module.params['destination_zone']
destination_ip = module.params['destination_ip']
destination_port = module.params['destination_port']
protocol = module.params['protocol']
tag_name = module.params['tag_name']
devicegroup = module.params['devicegroup']
# Create the device with the appropriate pandevice type
device = base.PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
# Grab the global objects
objects.AddressObject.refreshall(device)
objects.AddressGroup.refreshall(device)
objects.ServiceObject.refreshall(device)
objects.ServiceGroup.refreshall(device)
objects.Tag.refreshall(device)
# If Panorama, validate the devicegroup and grab the devicegroup objects
dev_group = None
if devicegroup and isinstance(device, panorama.Panorama):
dev_group = get_devicegroup(device, devicegroup)
if dev_group:
device.add(dev_group)
objects.AddressObject.refreshall(dev_group)
objects.AddressGroup.refreshall(dev_group)
objects.ServiceObject.refreshall(dev_group)
objects.ServiceGroup.refreshall(dev_group)
objects.Tag.refreshall(dev_group)
else:
module.fail_json(
failed=1,
msg='\'%s\' device group not found in Panorama. Is the name correct?' % devicegroup
)
# Build the rulebase and produce list
rulebase = get_rulebase(device, dev_group)
rulelist = rulebase.children
hitbase = policies.Rulebase()
loose_match = True
# Process each rule
for rule in rulelist:
hitlist = []
if source_zone:
source_zone_match = False
if loose_match and 'any' in rule.fromzone:
source_zone_match = True
else:
for object_string in rule.fromzone:
if object_string == source_zone:
source_zone_match = True
hitlist.append(source_zone_match)
if destination_zone:
destination_zone_match = False
if loose_match and 'any' in rule.tozone:
destination_zone_match = True
else:
for object_string in rule.tozone:
if object_string == destination_zone:
destination_zone_match = True
hitlist.append(destination_zone_match)
if source_ip:
source_ip_match = False
if loose_match and 'any' in rule.source:
source_ip_match = True
else:
for object_string in rule.source:
# Get a valid AddressObject or AddressGroup
obj = get_object(device, dev_group, object_string)
# Otherwise the object_string is not an object and should be handled differently
if obj is False:
if '-' in object_string:
obj = ipaddress.ip_address(source_ip)
source_range = object_string.split('-')
source_lower = ipaddress.ip_address(source_range[0])
source_upper = ipaddress.ip_address(source_range[1])
if source_lower <= obj <= source_upper:
source_ip_match = True
else:
if source_ip == object_string:
source_ip_match = True
if isinstance(obj, objects.AddressObject) and addr_in_obj(source_ip, obj):
source_ip_match = True
elif isinstance(obj, objects.AddressGroup) and obj.static_value:
for member_string in obj.static_value:
member = get_object(device, dev_group, member_string)
if addr_in_obj(source_ip, member):
source_ip_match = True
hitlist.append(source_ip_match)
if destination_ip:
destination_ip_match = False
if loose_match and 'any' in rule.destination:
destination_ip_match = True
else:
for object_string in rule.destination:
# Get a valid AddressObject or AddressGroup
obj = get_object(device, dev_group, object_string)
# Otherwise the object_string is not an object and should be handled differently
if obj is False:
if '-' in object_string:
obj = ipaddress.ip_address(destination_ip)
destination_range = object_string.split('-')
destination_lower = ipaddress.ip_address(destination_range[0])
destination_upper = ipaddress.ip_address(destination_range[1])
if destination_lower <= obj <= destination_upper:
destination_ip_match = True
else:
if destination_ip == object_string:
destination_ip_match = True
if isinstance(obj, objects.AddressObject) and addr_in_obj(destination_ip, obj):
destination_ip_match = True
elif isinstance(obj, objects.AddressGroup) and obj.static_value:
for member_string in obj.static_value:
member = get_object(device, dev_group, member_string)
if addr_in_obj(destination_ip, member):
destination_ip_match = True
hitlist.append(destination_ip_match)
if source_port:
source_port_match = False
orientation = 'source'
if loose_match and (rule.service[0] == 'any'):
source_port_match = True
elif rule.service[0] == 'application-default':
source_port_match = False # Fix this once apps are supported
else:
service_list = []
service_list = get_services(device, dev_group, rule.service, service_list)
for obj in service_list:
if port_in_svc(orientation, source_port, protocol, obj):
source_port_match = True
break
hitlist.append(source_port_match)
if destination_port:
destination_port_match = False
orientation = 'destination'
if loose_match and (rule.service[0] == 'any'):
destination_port_match = True
elif rule.service[0] == 'application-default':
destination_port_match = False # Fix this once apps are supported
else:
service_list = []
service_list = get_services(device, dev_group, rule.service, service_list)
for obj in service_list:
if port_in_svc(orientation, destination_port, protocol, obj):
destination_port_match = True
break
hitlist.append(destination_port_match)
if tag_name:
tag_match = False
if rule.tag:
for object_string in rule.tag:
obj = get_tag(device, dev_group, object_string)
if obj and (obj.name == tag_name):
tag_match = True
hitlist.append(tag_match)
# Add to hit rulebase
if False not in hitlist:
hitbase.add(rule)
# Dump the hit rulebase
if hitbase.children:
output_string = xmltodict.parse(hitbase.element_str())
module.exit_json(
stdout_lines=json.dumps(output_string, indent=2),
msg='%s of %s rules matched' % (hitbase.children.__len__(), rulebase.children.__len__())
)
else:
module.fail_json(msg='No matching rules found.')
if __name__ == '__main__':
main()
|
[
"pandevice.objects.ServiceObject.refreshall",
"ipaddress.ip_network",
"pandevice.objects.ServiceGroup.refreshall",
"pandevice.policies.PreRulebase",
"ipaddress.ip_address",
"pandevice.policies.SecurityRule.refreshall",
"json.dumps",
"pandevice.objects.Tag.refreshall",
"pandevice.objects.AddressGroup.refreshall",
"pandevice.objects.AddressObject.refreshall",
"pandevice.policies.Rulebase",
"ansible.module_utils.basic.AnsibleModule",
"pandevice.base.PanDevice.create_from_device",
"pandevice.panorama.DeviceGroup"
] |
[((5425, 5467), 'pandevice.policies.SecurityRule.refreshall', 'policies.SecurityRule.refreshall', (['rulebase'], {}), '(rulebase)\n', (5457, 5467), False, 'from pandevice import policies\n'), ((6243, 6269), 'ipaddress.ip_address', 'ipaddress.ip_address', (['addr'], {}), '(addr)\n', (6263, 6269), False, 'import ipaddress\n'), ((9835, 9951), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(False)', 'required_one_of': "[['api_key', 'password']]"}), "(argument_spec=argument_spec, supports_check_mode=False,\n required_one_of=[['api_key', 'password']])\n", (9848, 9951), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((10806, 10893), 'pandevice.base.PanDevice.create_from_device', 'base.PanDevice.create_from_device', (['ip_address', 'username', 'password'], {'api_key': 'api_key'}), '(ip_address, username, password, api_key=\n api_key)\n', (10839, 10893), False, 'from pandevice import base\n'), ((10924, 10964), 'pandevice.objects.AddressObject.refreshall', 'objects.AddressObject.refreshall', (['device'], {}), '(device)\n', (10956, 10964), False, 'from pandevice import objects\n'), ((10969, 11008), 'pandevice.objects.AddressGroup.refreshall', 'objects.AddressGroup.refreshall', (['device'], {}), '(device)\n', (11000, 11008), False, 'from pandevice import objects\n'), ((11013, 11053), 'pandevice.objects.ServiceObject.refreshall', 'objects.ServiceObject.refreshall', (['device'], {}), '(device)\n', (11045, 11053), False, 'from pandevice import objects\n'), ((11058, 11097), 'pandevice.objects.ServiceGroup.refreshall', 'objects.ServiceGroup.refreshall', (['device'], {}), '(device)\n', (11089, 11097), False, 'from pandevice import objects\n'), ((11102, 11132), 'pandevice.objects.Tag.refreshall', 'objects.Tag.refreshall', (['device'], {}), '(device)\n', (11124, 11132), False, 'from pandevice import objects\n'), ((11996, 12015), 'pandevice.policies.Rulebase', 'policies.Rulebase', ([], {}), '()\n', (12013, 12015), False, 'from pandevice import policies\n'), ((5156, 5175), 'pandevice.policies.Rulebase', 'policies.Rulebase', ([], {}), '()\n', (5173, 5175), False, 'from pandevice import policies\n'), ((5266, 5299), 'pandevice.panorama.DeviceGroup', 'panorama.DeviceGroup', (['devicegroup'], {}), '(devicegroup)\n', (5286, 5299), False, 'from pandevice import panorama\n'), ((5342, 5364), 'pandevice.policies.PreRulebase', 'policies.PreRulebase', ([], {}), '()\n', (5362, 5364), False, 'from pandevice import policies\n'), ((6402, 6433), 'ipaddress.ip_network', 'ipaddress.ip_network', (['obj.value'], {}), '(obj.value)\n', (6422, 6433), False, 'import ipaddress\n'), ((6587, 6620), 'ipaddress.ip_address', 'ipaddress.ip_address', (['ip_range[0]'], {}), '(ip_range[0])\n', (6607, 6620), False, 'import ipaddress\n'), ((6641, 6674), 'ipaddress.ip_address', 'ipaddress.ip_address', (['ip_range[1]'], {}), '(ip_range[1])\n', (6661, 6674), False, 'import ipaddress\n'), ((11419, 11462), 'pandevice.objects.AddressObject.refreshall', 'objects.AddressObject.refreshall', (['dev_group'], {}), '(dev_group)\n', (11451, 11462), False, 'from pandevice import objects\n'), ((11475, 11517), 'pandevice.objects.AddressGroup.refreshall', 'objects.AddressGroup.refreshall', (['dev_group'], {}), '(dev_group)\n', (11506, 11517), False, 'from pandevice import objects\n'), ((11530, 11573), 'pandevice.objects.ServiceObject.refreshall', 'objects.ServiceObject.refreshall', (['dev_group'], {}), '(dev_group)\n', (11562, 11573), False, 'from pandevice import objects\n'), ((11586, 11628), 'pandevice.objects.ServiceGroup.refreshall', 'objects.ServiceGroup.refreshall', (['dev_group'], {}), '(dev_group)\n', (11617, 11628), False, 'from pandevice import objects\n'), ((11641, 11674), 'pandevice.objects.Tag.refreshall', 'objects.Tag.refreshall', (['dev_group'], {}), '(dev_group)\n', (11663, 11674), False, 'from pandevice import objects\n'), ((18342, 18377), 'json.dumps', 'json.dumps', (['output_string'], {'indent': '(2)'}), '(output_string, indent=2)\n', (18352, 18377), False, 'import json\n'), ((13466, 13497), 'ipaddress.ip_address', 'ipaddress.ip_address', (['source_ip'], {}), '(source_ip)\n', (13486, 13497), False, 'import ipaddress\n'), ((13609, 13646), 'ipaddress.ip_address', 'ipaddress.ip_address', (['source_range[0]'], {}), '(source_range[0])\n', (13629, 13646), False, 'import ipaddress\n'), ((13690, 13727), 'ipaddress.ip_address', 'ipaddress.ip_address', (['source_range[1]'], {}), '(source_range[1])\n', (13710, 13727), False, 'import ipaddress\n'), ((15129, 15165), 'ipaddress.ip_address', 'ipaddress.ip_address', (['destination_ip'], {}), '(destination_ip)\n', (15149, 15165), False, 'import ipaddress\n'), ((15287, 15329), 'ipaddress.ip_address', 'ipaddress.ip_address', (['destination_range[0]'], {}), '(destination_range[0])\n', (15307, 15329), False, 'import ipaddress\n'), ((15378, 15420), 'ipaddress.ip_address', 'ipaddress.ip_address', (['destination_range[1]'], {}), '(destination_range[1])\n', (15398, 15420), False, 'import ipaddress\n')]
|
#
# Copyright(c) 2019 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import pytest
from api.cas import casadm
from api.cas.cache_config import CacheMode
from core.test_run import TestRun
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools.dd import Dd
from test_utils.size import Size, Unit
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_core_inactive():
"""
1. Start cache with 3 cores.
2. Stop cache.
3. Remove one of core devices.
4. Load cache.
5. Check if cache has appropriate number of valid and inactive core devices.
"""
cache, core_device = prepare()
cache_device = cache.cache_device
stats = cache.get_cache_statistics()
assert stats["core devices"] == 3
assert stats["inactive core devices"] == 0
TestRun.LOGGER.info("Stopping cache")
cache.stop()
TestRun.LOGGER.info("Removing one of core devices")
core_device.remove_partitions()
core_device.create_partitions([Size(1, Unit.GibiByte), Size(1, Unit.GibiByte)])
TestRun.LOGGER.info("Loading cache with missing core device")
cache = casadm.start_cache(cache_device, load=True)
stats = cache.get_cache_statistics()
assert stats["core devices"] == 3
assert stats["inactive core devices"] == 1
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_core_inactive_stats():
"""
1. Start cache with 3 cores.
2. Switch cache into WB mode.
3. Issue IO to each core.
4. Stop cache without flush.
5. Remove two core devices.
6. Load cache.
7. Check if cache stats are equal to sum of valid and inactive cores stats.
8. Check if percentage values are calculated properly.
"""
cache, core_device = prepare()
cache_device = cache.cache_device
TestRun.LOGGER.info("Switching cache mode to WB")
cache.set_cache_mode(cache_mode=CacheMode.WB)
cores = cache.get_core_devices()
TestRun.LOGGER.info("Issue IO to each core")
for core in cores:
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1000)
.block_size(Size(4, Unit.KibiByte))
).run()
TestRun.LOGGER.info("Stopping cache with dirty data")
cores[2].flush_core()
cache.stop(no_data_flush=True)
TestRun.LOGGER.info("Removing two of core devices")
core_device.remove_partitions()
core_device.create_partitions([Size(1, Unit.GibiByte)])
TestRun.LOGGER.info("Loading cache with missing core device")
cache = casadm.start_cache(cache_device, load=True)
# Accumulate valid cores stats
cores_occupancy = 0
cores_clean = 0
cores_dirty = 0
cores = cache.get_core_devices()
for core in cores:
core_stats = core.get_core_statistics()
cores_occupancy += core_stats["occupancy"].value
cores_clean += core_stats["clean"].value
cores_dirty += core_stats["dirty"].value
cache_stats = cache.get_cache_statistics()
# Add inactive core stats
cores_occupancy += cache_stats["inactive occupancy"].value
cores_clean += cache_stats["inactive clean"].value
cores_dirty += cache_stats["inactive dirty"].value
assert cache_stats["occupancy"].value == cores_occupancy
assert cache_stats["dirty"].value == cores_dirty
assert cache_stats["clean"].value == cores_clean
cache_stats_percentage = cache.get_cache_statistics(percentage_val=True)
# Calculate expected percentage value of inactive core stats
inactive_occupancy_perc = (
cache_stats["inactive occupancy"].value / cache_stats["cache size"].value
)
inactive_clean_perc = (
cache_stats["inactive clean"].value / cache_stats["occupancy"].value
)
inactive_dirty_perc = (
cache_stats["inactive dirty"].value / cache_stats["occupancy"].value
)
inactive_occupancy_perc = round(100 * inactive_occupancy_perc, 1)
inactive_clean_perc = round(100 * inactive_clean_perc, 1)
inactive_dirty_perc = round(100 * inactive_dirty_perc, 1)
TestRun.LOGGER.info(str(cache_stats_percentage))
assert inactive_occupancy_perc == cache_stats_percentage["inactive occupancy"]
assert inactive_clean_perc == cache_stats_percentage["inactive clean"]
assert inactive_dirty_perc == cache_stats_percentage["inactive dirty"]
def prepare():
cache_device = TestRun.disks['cache']
core_device = TestRun.disks['core']
cache_device.create_partitions([Size(500, Unit.MebiByte)])
core_device.create_partitions(
[Size(1, Unit.GibiByte), Size(1, Unit.GibiByte), Size(1, Unit.GibiByte)]
)
cache_device = cache_device.partitions[0]
core_device_1 = core_device.partitions[0]
core_device_2 = core_device.partitions[1]
core_device_3 = core_device.partitions[2]
TestRun.LOGGER.info("Staring cache")
cache = casadm.start_cache(cache_device, force=True)
TestRun.LOGGER.info("Adding core device")
core_1 = cache.add_core(core_dev=core_device_1)
core_2 = cache.add_core(core_dev=core_device_2)
core_3 = cache.add_core(core_dev=core_device_3)
return cache, core_device
|
[
"storage_devices.disk.DiskTypeLowerThan",
"storage_devices.disk.DiskTypeSet",
"core.test_run.TestRun.LOGGER.info",
"api.cas.casadm.start_cache",
"test_utils.size.Size",
"test_tools.dd.Dd"
] |
[((952, 989), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Stopping cache"""'], {}), "('Stopping cache')\n", (971, 989), False, 'from core.test_run import TestRun\n'), ((1012, 1063), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Removing one of core devices"""'], {}), "('Removing one of core devices')\n", (1031, 1063), False, 'from core.test_run import TestRun\n'), ((1189, 1250), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Loading cache with missing core device"""'], {}), "('Loading cache with missing core device')\n", (1208, 1250), False, 'from core.test_run import TestRun\n'), ((1263, 1306), 'api.cas.casadm.start_cache', 'casadm.start_cache', (['cache_device'], {'load': '(True)'}), '(cache_device, load=True)\n', (1281, 1306), False, 'from api.cas import casadm\n'), ((388, 433), 'storage_devices.disk.DiskTypeSet', 'DiskTypeSet', (['[DiskType.optane, DiskType.nand]'], {}), '([DiskType.optane, DiskType.nand])\n', (399, 433), False, 'from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan\n'), ((469, 495), 'storage_devices.disk.DiskTypeLowerThan', 'DiskTypeLowerThan', (['"""cache"""'], {}), "('cache')\n", (486, 495), False, 'from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan\n'), ((2059, 2108), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Switching cache mode to WB"""'], {}), "('Switching cache mode to WB')\n", (2078, 2108), False, 'from core.test_run import TestRun\n'), ((2200, 2244), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Issue IO to each core"""'], {}), "('Issue IO to each core')\n", (2219, 2244), False, 'from core.test_run import TestRun\n'), ((2464, 2517), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Stopping cache with dirty data"""'], {}), "('Stopping cache with dirty data')\n", (2483, 2517), False, 'from core.test_run import TestRun\n'), ((2584, 2635), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Removing two of core devices"""'], {}), "('Removing two of core devices')\n", (2603, 2635), False, 'from core.test_run import TestRun\n'), ((2737, 2798), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Loading cache with missing core device"""'], {}), "('Loading cache with missing core device')\n", (2756, 2798), False, 'from core.test_run import TestRun\n'), ((2811, 2854), 'api.cas.casadm.start_cache', 'casadm.start_cache', (['cache_device'], {'load': '(True)'}), '(cache_device, load=True)\n', (2829, 2854), False, 'from api.cas import casadm\n'), ((1471, 1516), 'storage_devices.disk.DiskTypeSet', 'DiskTypeSet', (['[DiskType.optane, DiskType.nand]'], {}), '([DiskType.optane, DiskType.nand])\n', (1482, 1516), False, 'from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan\n'), ((1552, 1578), 'storage_devices.disk.DiskTypeLowerThan', 'DiskTypeLowerThan', (['"""cache"""'], {}), "('cache')\n", (1569, 1578), False, 'from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan\n'), ((5079, 5115), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Staring cache"""'], {}), "('Staring cache')\n", (5098, 5115), False, 'from core.test_run import TestRun\n'), ((5128, 5172), 'api.cas.casadm.start_cache', 'casadm.start_cache', (['cache_device'], {'force': '(True)'}), '(cache_device, force=True)\n', (5146, 5172), False, 'from api.cas import casadm\n'), ((5177, 5218), 'core.test_run.TestRun.LOGGER.info', 'TestRun.LOGGER.info', (['"""Adding core device"""'], {}), "('Adding core device')\n", (5196, 5218), False, 'from core.test_run import TestRun\n'), ((1135, 1157), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (1139, 1157), False, 'from test_utils.size import Size, Unit\n'), ((1159, 1181), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (1163, 1181), False, 'from test_utils.size import Size, Unit\n'), ((2707, 2729), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (2711, 2729), False, 'from test_utils.size import Size, Unit\n'), ((4740, 4764), 'test_utils.size.Size', 'Size', (['(500)', 'Unit.MebiByte'], {}), '(500, Unit.MebiByte)\n', (4744, 4764), False, 'from test_utils.size import Size, Unit\n'), ((4811, 4833), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (4815, 4833), False, 'from test_utils.size import Size, Unit\n'), ((4835, 4857), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (4839, 4857), False, 'from test_utils.size import Size, Unit\n'), ((4859, 4881), 'test_utils.size.Size', 'Size', (['(1)', 'Unit.GibiByte'], {}), '(1, Unit.GibiByte)\n', (4863, 4881), False, 'from test_utils.size import Size, Unit\n'), ((2419, 2441), 'test_utils.size.Size', 'Size', (['(4)', 'Unit.KibiByte'], {}), '(4, Unit.KibiByte)\n', (2423, 2441), False, 'from test_utils.size import Size, Unit\n'), ((2295, 2299), 'test_tools.dd.Dd', 'Dd', ([], {}), '()\n', (2297, 2299), False, 'from test_tools.dd import Dd\n')]
|
import json
from django.contrib.auth.views import LoginView, LogoutView
from django.views.generic import TemplateView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth import authenticate, login, logout
from django.http.response import JsonResponse, HttpResponseRedirect
from django.urls import reverse
from django.contrib.auth.models import User
from django.db.models import Q
# Create your views here.
class Index(TemplateView):
http_method_names = ['get']
template_name = 'core/index.html'
class Login(LoginView):
http_method_names = ['get', 'post']
template_name = 'core/login.html'
def get(self, request, *args, **kwargs):
if 'search' in request.GET:
data = None
status = None
search = request.GET.get('search')
try:
query = (Q(username = search)
| Q(email = search))
user = User.objects.get(query)
data = {'username': user.username}
status = 200
except User.DoesNotExist:
data = {'error': 'User does not exist'}
status = 404
return JsonResponse(data, safe=False, status=status)
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
data = None
status = None
form = self.form_class(data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if user and user.is_active:
login(request, user)
data = {'success': True}
status = 200
else:
data = json.dumps(dict(form.errors.items()), ensure_ascii=False)
status = 401
return JsonResponse(data, safe=False, status=status)
class Home(LoginRequiredMixin, TemplateView):
http_method_names = ['get']
template_name = 'core/home.html'
def get_login_url(self):
return reverse('core:login')
class Logout(LogoutView):
http_method_names = ['get']
def get(self, request, *args, **kwargs):
logout(request)
return HttpResponseRedirect(reverse('core:index'))
|
[
"django.contrib.auth.models.User.objects.get",
"django.http.response.JsonResponse",
"django.db.models.Q",
"django.urls.reverse",
"django.contrib.auth.logout",
"django.contrib.auth.authenticate",
"django.contrib.auth.login"
] |
[((1934, 1979), 'django.http.response.JsonResponse', 'JsonResponse', (['data'], {'safe': '(False)', 'status': 'status'}), '(data, safe=False, status=status)\n', (1946, 1979), False, 'from django.http.response import JsonResponse, HttpResponseRedirect\n'), ((2142, 2163), 'django.urls.reverse', 'reverse', (['"""core:login"""'], {}), "('core:login')\n", (2149, 2163), False, 'from django.urls import reverse\n'), ((2278, 2293), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (2284, 2293), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((1200, 1245), 'django.http.response.JsonResponse', 'JsonResponse', (['data'], {'safe': '(False)', 'status': 'status'}), '(data, safe=False, status=status)\n', (1212, 1245), False, 'from django.http.response import JsonResponse, HttpResponseRedirect\n'), ((1603, 1653), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1615, 1653), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((2330, 2351), 'django.urls.reverse', 'reverse', (['"""core:index"""'], {}), "('core:index')\n", (2337, 2351), False, 'from django.urls import reverse\n'), ((953, 976), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (['query'], {}), '(query)\n', (969, 976), False, 'from django.contrib.auth.models import User\n'), ((1710, 1730), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (1715, 1730), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((863, 881), 'django.db.models.Q', 'Q', ([], {'username': 'search'}), '(username=search)\n', (864, 881), False, 'from django.db.models import Q\n'), ((911, 926), 'django.db.models.Q', 'Q', ([], {'email': 'search'}), '(email=search)\n', (912, 926), False, 'from django.db.models import Q\n')]
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
IMPORTANT: This code is taken directly from Tensorflow
(https://github.com/tensorflow/tensorflow) and is copied temporarily
until it is available in a packaged Tensorflow version on pypi.
TODO(dennybritz): Delete this code when it becomes available in TF.
A library of helpers for use with SamplingDecoders.
"""
# pylint: skip-file
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
#from tensorflow.contrib.distributions.python.ops import Bernoulli
#from tensorflow.contrib.distributions.python.ops import Categorical
from tensorflow.python.ops.distributions import bernoulli
from tensorflow.python.ops.distributions import categorical
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import base as layers_base
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.util import nest
from seq2seq.contrib.seq2seq import decoder
__all__ = [
"Helper",
"TrainingHelper",
"GreedyEmbeddingHelper",
"CustomHelper",
"ScheduledEmbeddingTrainingHelper",
"ScheduledOutputTrainingHelper",
]
_transpose_batch_time = decoder._transpose_batch_time # pylint: disable=protected-access
def _unstack_ta(inp):
return tensor_array_ops.TensorArray(
dtype=inp.dtype, size=array_ops.shape(inp)[0],
element_shape=inp.get_shape()[1:]).unstack(inp)
@six.add_metaclass(abc.ABCMeta)
class Helper(object):
"""Helper interface. Helper instances are used by SamplingDecoder."""
@abc.abstractproperty
def batch_size(self):
"""Returns a scalar int32 tensor."""
raise NotImplementedError("batch_size has not been implemented")
@abc.abstractmethod
def initialize(self, name=None):
"""Returns `(initial_finished, initial_inputs)`."""
pass
@abc.abstractmethod
def sample(self, time, outputs, state, name=None):
"""Returns `sample_ids`."""
pass
@abc.abstractmethod
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""Returns `(finished, next_inputs, next_state)`."""
pass
class CustomHelper(Helper):
"""Base abstract class that allows the user to customize sampling."""
def __init__(self, initialize_fn, sample_fn, next_inputs_fn):
"""Initializer.
Args:
initialize_fn: callable that returns `(finished, next_inputs)`
for the first iteration.
sample_fn: callable that takes `(time, outputs, state)`
and emits tensor `sample_ids`.
next_inputs_fn: callable that takes `(time, outputs, state, sample_ids)`
and emits `(finished, next_inputs, next_state)`.
"""
self._initialize_fn = initialize_fn
self._sample_fn = sample_fn
self._next_inputs_fn = next_inputs_fn
self._batch_size = None
@property
def batch_size(self):
if self._batch_size is None:
raise ValueError("batch_size accessed before initialize was called")
return self._batch_size
def initialize(self, name=None):
with ops.name_scope(name, "%sInitialize" % type(self).__name__):
(finished, next_inputs) = self._initialize_fn()
if self._batch_size is None:
self._batch_size = array_ops.size(finished)
return (finished, next_inputs)
def sample(self, time, outputs, state, name=None):
with ops.name_scope(
name, "%sSample" % type(self).__name__, (time, outputs, state)):
return self._sample_fn(time=time, outputs=outputs, state=state)
def next_inputs(self, time, outputs, state, sample_ids, name=None):
with ops.name_scope(
name, "%sNextInputs" % type(self).__name__, (time, outputs, state)):
return self._next_inputs_fn(
time=time, outputs=outputs, state=state, sample_ids=sample_ids)
class TrainingHelper(Helper):
"""A helper for use during training. Only reads inputs.
Returned sample_ids are the argmax of the RNN output logits.
"""
def __init__(self, inputs, sequence_length, time_major=False, name=None):
"""Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
name: Name scope for any created operations.
Raises:
ValueError: if `sequence_length` is not a 1D tensor.
"""
with ops.name_scope(name, "TrainingHelper", [inputs, sequence_length]):
inputs = ops.convert_to_tensor(inputs, name="inputs")
if not time_major:
inputs = nest.map_structure(_transpose_batch_time, inputs)
self._input_tas = nest.map_structure(_unstack_ta, inputs)
self._sequence_length = ops.convert_to_tensor(
sequence_length, name="sequence_length")
if self._sequence_length.get_shape().ndims != 1:
raise ValueError(
"Expected sequence_length to be a vector, but received shape: %s" %
self._sequence_length.get_shape())
self._zero_inputs = nest.map_structure(
lambda inp: array_ops.zeros_like(inp[0, :]), inputs)
self._batch_size = array_ops.size(sequence_length)
@property
def batch_size(self):
return self._batch_size
def initialize(self, name=None):
with ops.name_scope(name, "TrainingHelperInitialize"):
finished = math_ops.equal(0, self._sequence_length)
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished, lambda: self._zero_inputs,
lambda: nest.map_structure(lambda inp: inp.read(0), self._input_tas))
return (finished, next_inputs)
def sample(self, time, outputs, name=None, **unused_kwargs):
with ops.name_scope(name, "TrainingHelperSample", [time, outputs]):
sample_ids = math_ops.cast(
math_ops.argmax(outputs, axis=-1), dtypes.int32)
return sample_ids
def next_inputs(self, time, outputs, state, name=None, **unused_kwargs):
"""next_inputs_fn for TrainingHelper."""
with ops.name_scope(name, "TrainingHelperNextInputs",
[time, outputs, state]):
next_time = time + 1
finished = (next_time >= self._sequence_length)
all_finished = math_ops.reduce_all(finished)
def read_from_ta(inp):
return inp.read(next_time)
next_inputs = control_flow_ops.cond(
all_finished, lambda: self._zero_inputs,
lambda: nest.map_structure(read_from_ta, self._input_tas))
return (finished, next_inputs, state)
class ScheduledEmbeddingTrainingHelper(TrainingHelper):
"""A training helper that adds scheduled sampling.
Returns -1s for sample_ids where no sampling took place; valid sample id
values elsewhere.
"""
def __init__(self, inputs, sequence_length, embedding, sampling_probability,
time_major=False, seed=None, scheduling_seed=None, name=None):
"""Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
sampling_probability: A 0D `float32` tensor: the probability of sampling
categorically from the output ids instead of reading directly from the
inputs.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
seed: The sampling seed.
scheduling_seed: The schedule decision rule sampling seed.
name: Name scope for any created operations.
Raises:
ValueError: if `sampling_probability` is not a scalar or vector.
"""
with ops.name_scope(name, "ScheduledEmbeddingSamplingWrapper",
[embedding, sampling_probability]):
if callable(embedding):
self._embedding_fn = embedding
else:
self._embedding_fn = (
lambda ids: embedding_ops.embedding_lookup(embedding, ids))
self._sampling_probability = ops.convert_to_tensor(
sampling_probability, name="sampling_probability")
if self._sampling_probability.get_shape().ndims not in (0, 1):
raise ValueError(
"sampling_probability must be either a scalar or a vector. "
"saw shape: %s" % (self._sampling_probability.get_shape()))
self._seed = seed
self._scheduling_seed = scheduling_seed
super(ScheduledEmbeddingTrainingHelper, self).__init__(
inputs=inputs,
sequence_length=sequence_length,
time_major=time_major,
name=name)
def initialize(self, name=None):
return super(ScheduledEmbeddingTrainingHelper, self).initialize(name=name)
def sample(self, time, outputs, state, name=None):
with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample",
[time, outputs, state]):
# Return -1s where we did not sample, and sample_ids elsewhere
select_sample_noise = random_ops.random_uniform(
[self.batch_size], seed=self._scheduling_seed)
select_sample = (self._sampling_probability > select_sample_noise)
sample_id_sampler = categorical.Categorical(logits=outputs)
return array_ops.where(
select_sample,
sample_id_sampler.sample(seed=self._seed),
array_ops.tile([-1], [self.batch_size]))
def next_inputs(self, time, outputs, state, sample_ids, name=None):
with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample",
[time, outputs, state, sample_ids]):
(finished, base_next_inputs, state) = (
super(ScheduledEmbeddingTrainingHelper, self).next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name))
def maybe_sample():
"""Perform scheduled sampling."""
where_sampling = math_ops.cast(
array_ops.where(sample_ids > -1), dtypes.int32)
where_not_sampling = math_ops.cast(
array_ops.where(sample_ids <= -1), dtypes.int32)
where_sampling_flat = array_ops.reshape(where_sampling, [-1])
where_not_sampling_flat = array_ops.reshape(where_not_sampling, [-1])
sample_ids_sampling = array_ops.gather(sample_ids, where_sampling_flat)
inputs_not_sampling = array_ops.gather(
base_next_inputs, where_not_sampling_flat)
sampled_next_inputs = self._embedding_fn(sample_ids_sampling)
base_shape = array_ops.shape(base_next_inputs)
return (array_ops.scatter_nd(indices=where_sampling,
updates=sampled_next_inputs,
shape=base_shape)
+ array_ops.scatter_nd(indices=where_not_sampling,
updates=inputs_not_sampling,
shape=base_shape))
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished, lambda: base_next_inputs, maybe_sample)
return (finished, next_inputs, state)
class ScheduledOutputTrainingHelper(TrainingHelper):
"""A training helper that adds scheduled sampling directly to outputs.
Returns False for sample_ids where no sampling took place; True elsewhere.
"""
def __init__(self, inputs, sequence_length, sampling_probability,
time_major=False, seed=None, next_input_layer=None,
auxiliary_inputs=None, name=None):
"""Initializer.
Args:
inputs: A (structure) of input tensors.
sequence_length: An int32 vector tensor.
sampling_probability: A 0D `float32` tensor: the probability of sampling
from the outputs instead of reading directly from the inputs.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
seed: The sampling seed.
next_input_layer: (Optional) An instance of `tf.layers.Layer`, i.e.,
`tf.layers.Dense`. Optional layer to apply to the RNN output to create
the next input.
auxiliary_inputs: An optional (structure of) auxiliary input tensors with
a shape that matches `inputs` in all but (potentially) the final
dimension. These tensors will be concatenated to the sampled output or
the `inputs` when not sampling for use as the next input.
name: Name scope for any created operations.
Raises:
ValueError: if `sampling_probability` is not a scalar or vector.
"""
with ops.name_scope(name, "ScheduledOutputTrainingHelper",
[inputs, auxiliary_inputs, sampling_probability]):
self._sampling_probability = ops.convert_to_tensor(
sampling_probability, name="sampling_probability")
if self._sampling_probability.get_shape().ndims not in (0, 1):
raise ValueError(
"sampling_probability must be either a scalar or a vector. "
"saw shape: %s" % (self._sampling_probability.get_shape()))
if auxiliary_inputs is None:
maybe_concatenated_inputs = inputs
else:
inputs = ops.convert_to_tensor(inputs, name="inputs")
auxiliary_inputs = ops.convert_to_tensor(
auxiliary_inputs, name="auxiliary_inputs")
maybe_concatenated_inputs = nest.map_structure(
lambda x, y: array_ops.concat((x, y), -1),
inputs, auxiliary_inputs)
if not time_major:
auxiliary_inputs = nest.map_structure(
_transpose_batch_time, auxiliary_inputs)
self._auxiliary_input_tas = (
nest.map_structure(_unstack_ta, auxiliary_inputs)
if auxiliary_inputs is not None else None)
self._seed = seed
if (next_input_layer is not None and not isinstance(next_input_layer,
layers_base._Layer)): # pylint: disable=protected-access
raise TypeError("next_input_layer must be a Layer, received: %s" %
type(next_input_layer))
self._next_input_layer = next_input_layer
super(ScheduledOutputTrainingHelper, self).__init__(
inputs=maybe_concatenated_inputs,
sequence_length=sequence_length,
time_major=time_major,
name=name)
def initialize(self, name=None):
return super(ScheduledOutputTrainingHelper, self).initialize(name=name)
def sample(self, time, outputs, state, name=None):
with ops.name_scope(name, "ScheduledOutputTrainingHelperSample",
[time, outputs, state]):
sampler = bernoulli.Bernoulli(probs=self._sampling_probability)
return math_ops.cast(
sampler.sample(sample_shape=self.batch_size, seed=self._seed),
dtypes.bool)
def next_inputs(self, time, outputs, state, sample_ids, name=None):
with ops.name_scope(name, "ScheduledOutputTrainingHelperNextInputs",
[time, outputs, state, sample_ids]):
(finished, base_next_inputs, state) = (
super(ScheduledOutputTrainingHelper, self).next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name))
def maybe_sample():
"""Perform scheduled sampling."""
def maybe_concatenate_auxiliary_inputs(outputs_, indices=None):
"""Concatenate outputs with auxiliary inputs, if they exist."""
if self._auxiliary_input_tas is None:
return outputs_
next_time = time + 1
auxiliary_inputs = nest.map_structure(
lambda ta: ta.read(next_time), self._auxiliary_input_tas)
if indices is not None:
auxiliary_inputs = array_ops.gather_nd(auxiliary_inputs, indices)
return nest.map_structure(
lambda x, y: array_ops.concat((x, y), -1),
outputs_, auxiliary_inputs)
if self._next_input_layer is None:
return array_ops.where(
sample_ids, maybe_concatenate_auxiliary_inputs(outputs),
base_next_inputs)
where_sampling = math_ops.cast(
array_ops.where(sample_ids), dtypes.int32)
where_not_sampling = math_ops.cast(
array_ops.where(math_ops.logical_not(sample_ids)), dtypes.int32)
outputs_sampling = array_ops.gather_nd(outputs, where_sampling)
inputs_not_sampling = array_ops.gather_nd(base_next_inputs,
where_not_sampling)
sampled_next_inputs = maybe_concatenate_auxiliary_inputs(
self._next_input_layer(outputs_sampling), where_sampling)
base_shape = array_ops.shape(base_next_inputs)
return (array_ops.scatter_nd(indices=where_sampling,
updates=sampled_next_inputs,
shape=base_shape)
+ array_ops.scatter_nd(indices=where_not_sampling,
updates=inputs_not_sampling,
shape=base_shape))
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished, lambda: base_next_inputs, maybe_sample)
return (finished, next_inputs, state)
class GreedyEmbeddingHelper(Helper):
"""A helper for use during inference.
Uses the argmax of the output (treated as logits) and passes the
result through an embedding layer to get the next input.
"""
def __init__(self, embedding, start_tokens, end_token):
"""Initializer.
Args:
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
start_tokens: `int32` vector shaped `[batch_size]`, the start tokens.
end_token: `int32` scalar, the token that marks end of decoding.
Raises:
ValueError: if `sequence_length` is not a 1D tensor.
"""
if callable(embedding):
self._embedding_fn = embedding
else:
self._embedding_fn = (
lambda ids: embedding_ops.embedding_lookup(embedding, ids))
self._start_tokens = ops.convert_to_tensor(
start_tokens, dtype=dtypes.int32, name="start_tokens")
self._end_token = ops.convert_to_tensor(
end_token, dtype=dtypes.int32, name="end_token")
if self._start_tokens.get_shape().ndims != 1:
raise ValueError("start_tokens must be a vector")
self._batch_size = array_ops.size(start_tokens)
if self._end_token.get_shape().ndims != 0:
raise ValueError("end_token must be a scalar")
self._start_inputs = self._embedding_fn(self._start_tokens)
@property
def batch_size(self):
return self._batch_size
def initialize(self, name=None):
finished = array_ops.tile([False], [self._batch_size])
return (finished, self._start_inputs)
def sample(self, time, outputs, state, name=None):
"""sample for GreedyEmbeddingHelper."""
del time, state # unused by sample_fn
# Outputs are logits, use argmax to get the most probable id
if not isinstance(outputs, ops.Tensor):
raise TypeError("Expected outputs to be a single Tensor, got: %s" %
type(outputs))
sample_ids = math_ops.cast(
math_ops.argmax(outputs, axis=-1), dtypes.int32)
return sample_ids
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""next_inputs_fn for GreedyEmbeddingHelper."""
del time, outputs # unused by next_inputs_fn
finished = math_ops.equal(sample_ids, self._end_token)
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished,
# If we're finished, the next_inputs value doesn't matter
lambda: self._start_inputs,
lambda: self._embedding_fn(sample_ids))
return (finished, next_inputs, state)
|
[
"tensorflow.python.ops.array_ops.where",
"tensorflow.python.ops.array_ops.reshape",
"six.add_metaclass",
"tensorflow.python.ops.distributions.categorical.Categorical",
"tensorflow.python.util.nest.map_structure",
"tensorflow.python.ops.random_ops.random_uniform",
"tensorflow.python.ops.array_ops.shape",
"tensorflow.python.ops.array_ops.tile",
"tensorflow.python.ops.array_ops.gather_nd",
"tensorflow.python.ops.array_ops.size",
"tensorflow.python.framework.ops.convert_to_tensor",
"tensorflow.python.ops.math_ops.argmax",
"tensorflow.python.ops.control_flow_ops.cond",
"tensorflow.python.ops.array_ops.zeros_like",
"tensorflow.python.ops.math_ops.equal",
"tensorflow.python.ops.distributions.bernoulli.Bernoulli",
"tensorflow.python.ops.array_ops.gather",
"tensorflow.python.ops.embedding_ops.embedding_lookup",
"tensorflow.python.ops.math_ops.reduce_all",
"tensorflow.python.ops.array_ops.concat",
"tensorflow.python.ops.math_ops.logical_not",
"tensorflow.python.framework.ops.name_scope",
"tensorflow.python.ops.array_ops.scatter_nd"
] |
[((2375, 2405), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (2392, 2405), False, 'import six\n'), ((19254, 19330), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['start_tokens'], {'dtype': 'dtypes.int32', 'name': '"""start_tokens"""'}), "(start_tokens, dtype=dtypes.int32, name='start_tokens')\n", (19275, 19330), False, 'from tensorflow.python.framework import ops\n'), ((19362, 19432), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['end_token'], {'dtype': 'dtypes.int32', 'name': '"""end_token"""'}), "(end_token, dtype=dtypes.int32, name='end_token')\n", (19383, 19432), False, 'from tensorflow.python.framework import ops\n'), ((19571, 19599), 'tensorflow.python.ops.array_ops.size', 'array_ops.size', (['start_tokens'], {}), '(start_tokens)\n', (19585, 19599), False, 'from tensorflow.python.ops import array_ops\n'), ((19880, 19923), 'tensorflow.python.ops.array_ops.tile', 'array_ops.tile', (['[False]', '[self._batch_size]'], {}), '([False], [self._batch_size])\n', (19894, 19923), False, 'from tensorflow.python.ops import array_ops\n'), ((20626, 20669), 'tensorflow.python.ops.math_ops.equal', 'math_ops.equal', (['sample_ids', 'self._end_token'], {}), '(sample_ids, self._end_token)\n', (20640, 20669), False, 'from tensorflow.python.ops import math_ops\n'), ((20689, 20718), 'tensorflow.python.ops.math_ops.reduce_all', 'math_ops.reduce_all', (['finished'], {}), '(finished)\n', (20708, 20718), False, 'from tensorflow.python.ops import math_ops\n'), ((5352, 5417), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""TrainingHelper"""', '[inputs, sequence_length]'], {}), "(name, 'TrainingHelper', [inputs, sequence_length])\n", (5366, 5417), False, 'from tensorflow.python.framework import ops\n'), ((5434, 5478), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['inputs'], {'name': '"""inputs"""'}), "(inputs, name='inputs')\n", (5455, 5478), False, 'from tensorflow.python.framework import ops\n'), ((5596, 5635), 'tensorflow.python.util.nest.map_structure', 'nest.map_structure', (['_unstack_ta', 'inputs'], {}), '(_unstack_ta, inputs)\n', (5614, 5635), False, 'from tensorflow.python.util import nest\n'), ((5666, 5728), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['sequence_length'], {'name': '"""sequence_length"""'}), "(sequence_length, name='sequence_length')\n", (5687, 5728), False, 'from tensorflow.python.framework import ops\n'), ((6084, 6115), 'tensorflow.python.ops.array_ops.size', 'array_ops.size', (['sequence_length'], {}), '(sequence_length)\n', (6098, 6115), False, 'from tensorflow.python.ops import array_ops\n'), ((6226, 6274), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""TrainingHelperInitialize"""'], {}), "(name, 'TrainingHelperInitialize')\n", (6240, 6274), False, 'from tensorflow.python.framework import ops\n'), ((6293, 6333), 'tensorflow.python.ops.math_ops.equal', 'math_ops.equal', (['(0)', 'self._sequence_length'], {}), '(0, self._sequence_length)\n', (6307, 6333), False, 'from tensorflow.python.ops import math_ops\n'), ((6355, 6384), 'tensorflow.python.ops.math_ops.reduce_all', 'math_ops.reduce_all', (['finished'], {}), '(finished)\n', (6374, 6384), False, 'from tensorflow.python.ops import math_ops\n'), ((6669, 6730), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""TrainingHelperSample"""', '[time, outputs]'], {}), "(name, 'TrainingHelperSample', [time, outputs])\n", (6683, 6730), False, 'from tensorflow.python.framework import ops\n'), ((6979, 7051), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""TrainingHelperNextInputs"""', '[time, outputs, state]'], {}), "(name, 'TrainingHelperNextInputs', [time, outputs, state])\n", (6993, 7051), False, 'from tensorflow.python.framework import ops\n'), ((7179, 7208), 'tensorflow.python.ops.math_ops.reduce_all', 'math_ops.reduce_all', (['finished'], {}), '(finished)\n', (7198, 7208), False, 'from tensorflow.python.ops import math_ops\n'), ((8678, 8774), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledEmbeddingSamplingWrapper"""', '[embedding, sampling_probability]'], {}), "(name, 'ScheduledEmbeddingSamplingWrapper', [embedding,\n sampling_probability])\n", (8692, 8774), False, 'from tensorflow.python.framework import ops\n'), ((9015, 9087), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['sampling_probability'], {'name': '"""sampling_probability"""'}), "(sampling_probability, name='sampling_probability')\n", (9036, 9087), False, 'from tensorflow.python.framework import ops\n'), ((9771, 9861), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledEmbeddingTrainingHelperSample"""', '[time, outputs, state]'], {}), "(name, 'ScheduledEmbeddingTrainingHelperSample', [time,\n outputs, state])\n", (9785, 9861), False, 'from tensorflow.python.framework import ops\n'), ((9980, 10052), 'tensorflow.python.ops.random_ops.random_uniform', 'random_ops.random_uniform', (['[self.batch_size]'], {'seed': 'self._scheduling_seed'}), '([self.batch_size], seed=self._scheduling_seed)\n', (10005, 10052), False, 'from tensorflow.python.ops import random_ops\n'), ((10163, 10202), 'tensorflow.python.ops.distributions.categorical.Categorical', 'categorical.Categorical', ([], {'logits': 'outputs'}), '(logits=outputs)\n', (10186, 10202), False, 'from tensorflow.python.ops.distributions import categorical\n'), ((10442, 10544), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledEmbeddingTrainingHelperSample"""', '[time, outputs, state, sample_ids]'], {}), "(name, 'ScheduledEmbeddingTrainingHelperSample', [time,\n outputs, state, sample_ids])\n", (10456, 10544), False, 'from tensorflow.python.framework import ops\n'), ((11954, 11983), 'tensorflow.python.ops.math_ops.reduce_all', 'math_ops.reduce_all', (['finished'], {}), '(finished)\n', (11973, 11983), False, 'from tensorflow.python.ops import math_ops\n'), ((12004, 12080), 'tensorflow.python.ops.control_flow_ops.cond', 'control_flow_ops.cond', (['all_finished', '(lambda : base_next_inputs)', 'maybe_sample'], {}), '(all_finished, lambda : base_next_inputs, maybe_sample)\n', (12025, 12080), False, 'from tensorflow.python.ops import control_flow_ops\n'), ((13612, 13719), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledOutputTrainingHelper"""', '[inputs, auxiliary_inputs, sampling_probability]'], {}), "(name, 'ScheduledOutputTrainingHelper', [inputs,\n auxiliary_inputs, sampling_probability])\n", (13626, 13719), False, 'from tensorflow.python.framework import ops\n'), ((13776, 13848), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['sampling_probability'], {'name': '"""sampling_probability"""'}), "(sampling_probability, name='sampling_probability')\n", (13797, 13848), False, 'from tensorflow.python.framework import ops\n'), ((15553, 15640), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledOutputTrainingHelperSample"""', '[time, outputs, state]'], {}), "(name, 'ScheduledOutputTrainingHelperSample', [time, outputs,\n state])\n", (15567, 15640), False, 'from tensorflow.python.framework import ops\n'), ((15678, 15731), 'tensorflow.python.ops.distributions.bernoulli.Bernoulli', 'bernoulli.Bernoulli', ([], {'probs': 'self._sampling_probability'}), '(probs=self._sampling_probability)\n', (15697, 15731), False, 'from tensorflow.python.ops.distributions import bernoulli\n'), ((15936, 16039), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""ScheduledOutputTrainingHelperNextInputs"""', '[time, outputs, state, sample_ids]'], {}), "(name, 'ScheduledOutputTrainingHelperNextInputs', [time,\n outputs, state, sample_ids])\n", (15950, 16039), False, 'from tensorflow.python.framework import ops\n'), ((18209, 18238), 'tensorflow.python.ops.math_ops.reduce_all', 'math_ops.reduce_all', (['finished'], {}), '(finished)\n', (18228, 18238), False, 'from tensorflow.python.ops import math_ops\n'), ((18259, 18335), 'tensorflow.python.ops.control_flow_ops.cond', 'control_flow_ops.cond', (['all_finished', '(lambda : base_next_inputs)', 'maybe_sample'], {}), '(all_finished, lambda : base_next_inputs, maybe_sample)\n', (18280, 18335), False, 'from tensorflow.python.ops import control_flow_ops\n'), ((20367, 20400), 'tensorflow.python.ops.math_ops.argmax', 'math_ops.argmax', (['outputs'], {'axis': '(-1)'}), '(outputs, axis=-1)\n', (20382, 20400), False, 'from tensorflow.python.ops import math_ops\n'), ((4140, 4164), 'tensorflow.python.ops.array_ops.size', 'array_ops.size', (['finished'], {}), '(finished)\n', (4154, 4164), False, 'from tensorflow.python.ops import array_ops\n'), ((5521, 5570), 'tensorflow.python.util.nest.map_structure', 'nest.map_structure', (['_transpose_batch_time', 'inputs'], {}), '(_transpose_batch_time, inputs)\n', (5539, 5570), False, 'from tensorflow.python.util import nest\n'), ((6776, 6809), 'tensorflow.python.ops.math_ops.argmax', 'math_ops.argmax', (['outputs'], {'axis': '(-1)'}), '(outputs, axis=-1)\n', (6791, 6809), False, 'from tensorflow.python.ops import math_ops\n'), ((10321, 10360), 'tensorflow.python.ops.array_ops.tile', 'array_ops.tile', (['[-1]', '[self.batch_size]'], {}), '([-1], [self.batch_size])\n', (10335, 10360), False, 'from tensorflow.python.ops import array_ops\n'), ((11131, 11170), 'tensorflow.python.ops.array_ops.reshape', 'array_ops.reshape', (['where_sampling', '[-1]'], {}), '(where_sampling, [-1])\n', (11148, 11170), False, 'from tensorflow.python.ops import array_ops\n'), ((11205, 11248), 'tensorflow.python.ops.array_ops.reshape', 'array_ops.reshape', (['where_not_sampling', '[-1]'], {}), '(where_not_sampling, [-1])\n', (11222, 11248), False, 'from tensorflow.python.ops import array_ops\n'), ((11279, 11328), 'tensorflow.python.ops.array_ops.gather', 'array_ops.gather', (['sample_ids', 'where_sampling_flat'], {}), '(sample_ids, where_sampling_flat)\n', (11295, 11328), False, 'from tensorflow.python.ops import array_ops\n'), ((11359, 11418), 'tensorflow.python.ops.array_ops.gather', 'array_ops.gather', (['base_next_inputs', 'where_not_sampling_flat'], {}), '(base_next_inputs, where_not_sampling_flat)\n', (11375, 11418), False, 'from tensorflow.python.ops import array_ops\n'), ((11523, 11556), 'tensorflow.python.ops.array_ops.shape', 'array_ops.shape', (['base_next_inputs'], {}), '(base_next_inputs)\n', (11538, 11556), False, 'from tensorflow.python.ops import array_ops\n'), ((14208, 14252), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['inputs'], {'name': '"""inputs"""'}), "(inputs, name='inputs')\n", (14229, 14252), False, 'from tensorflow.python.framework import ops\n'), ((14280, 14344), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['auxiliary_inputs'], {'name': '"""auxiliary_inputs"""'}), "(auxiliary_inputs, name='auxiliary_inputs')\n", (14301, 14344), False, 'from tensorflow.python.framework import ops\n'), ((14685, 14734), 'tensorflow.python.util.nest.map_structure', 'nest.map_structure', (['_unstack_ta', 'auxiliary_inputs'], {}), '(_unstack_ta, auxiliary_inputs)\n', (14703, 14734), False, 'from tensorflow.python.util import nest\n'), ((17437, 17481), 'tensorflow.python.ops.array_ops.gather_nd', 'array_ops.gather_nd', (['outputs', 'where_sampling'], {}), '(outputs, where_sampling)\n', (17456, 17481), False, 'from tensorflow.python.ops import array_ops\n'), ((17512, 17569), 'tensorflow.python.ops.array_ops.gather_nd', 'array_ops.gather_nd', (['base_next_inputs', 'where_not_sampling'], {}), '(base_next_inputs, where_not_sampling)\n', (17531, 17569), False, 'from tensorflow.python.ops import array_ops\n'), ((17778, 17811), 'tensorflow.python.ops.array_ops.shape', 'array_ops.shape', (['base_next_inputs'], {}), '(base_next_inputs)\n', (17793, 17811), False, 'from tensorflow.python.ops import array_ops\n'), ((19180, 19226), 'tensorflow.python.ops.embedding_ops.embedding_lookup', 'embedding_ops.embedding_lookup', (['embedding', 'ids'], {}), '(embedding, ids)\n', (19210, 19226), False, 'from tensorflow.python.ops import embedding_ops\n'), ((6017, 6048), 'tensorflow.python.ops.array_ops.zeros_like', 'array_ops.zeros_like', (['inp[0, :]'], {}), '(inp[0, :])\n', (6037, 6048), False, 'from tensorflow.python.ops import array_ops\n'), ((7385, 7434), 'tensorflow.python.util.nest.map_structure', 'nest.map_structure', (['read_from_ta', 'self._input_tas'], {}), '(read_from_ta, self._input_tas)\n', (7403, 7434), False, 'from tensorflow.python.util import nest\n'), ((8932, 8978), 'tensorflow.python.ops.embedding_ops.embedding_lookup', 'embedding_ops.embedding_lookup', (['embedding', 'ids'], {}), '(embedding, ids)\n', (8962, 8978), False, 'from tensorflow.python.ops import embedding_ops\n'), ((10948, 10980), 'tensorflow.python.ops.array_ops.where', 'array_ops.where', (['(sample_ids > -1)'], {}), '(sample_ids > -1)\n', (10963, 10980), False, 'from tensorflow.python.ops import array_ops\n'), ((11052, 11085), 'tensorflow.python.ops.array_ops.where', 'array_ops.where', (['(sample_ids <= -1)'], {}), '(sample_ids <= -1)\n', (11067, 11085), False, 'from tensorflow.python.ops import array_ops\n'), ((11573, 11668), 'tensorflow.python.ops.array_ops.scatter_nd', 'array_ops.scatter_nd', ([], {'indices': 'where_sampling', 'updates': 'sampled_next_inputs', 'shape': 'base_shape'}), '(indices=where_sampling, updates=sampled_next_inputs,\n shape=base_shape)\n', (11593, 11668), False, 'from tensorflow.python.ops import array_ops\n'), ((11757, 11857), 'tensorflow.python.ops.array_ops.scatter_nd', 'array_ops.scatter_nd', ([], {'indices': 'where_not_sampling', 'updates': 'inputs_not_sampling', 'shape': 'base_shape'}), '(indices=where_not_sampling, updates=\n inputs_not_sampling, shape=base_shape)\n', (11777, 11857), False, 'from tensorflow.python.ops import array_ops\n'), ((14563, 14622), 'tensorflow.python.util.nest.map_structure', 'nest.map_structure', (['_transpose_batch_time', 'auxiliary_inputs'], {}), '(_transpose_batch_time, auxiliary_inputs)\n', (14581, 14622), False, 'from tensorflow.python.util import nest\n'), ((17246, 17273), 'tensorflow.python.ops.array_ops.where', 'array_ops.where', (['sample_ids'], {}), '(sample_ids)\n', (17261, 17273), False, 'from tensorflow.python.ops import array_ops\n'), ((17828, 17923), 'tensorflow.python.ops.array_ops.scatter_nd', 'array_ops.scatter_nd', ([], {'indices': 'where_sampling', 'updates': 'sampled_next_inputs', 'shape': 'base_shape'}), '(indices=where_sampling, updates=sampled_next_inputs,\n shape=base_shape)\n', (17848, 17923), False, 'from tensorflow.python.ops import array_ops\n'), ((18012, 18112), 'tensorflow.python.ops.array_ops.scatter_nd', 'array_ops.scatter_nd', ([], {'indices': 'where_not_sampling', 'updates': 'inputs_not_sampling', 'shape': 'base_shape'}), '(indices=where_not_sampling, updates=\n inputs_not_sampling, shape=base_shape)\n', (18032, 18112), False, 'from tensorflow.python.ops import array_ops\n'), ((2293, 2313), 'tensorflow.python.ops.array_ops.shape', 'array_ops.shape', (['inp'], {}), '(inp)\n', (2308, 2313), False, 'from tensorflow.python.ops import array_ops\n'), ((14439, 14467), 'tensorflow.python.ops.array_ops.concat', 'array_ops.concat', (['(x, y)', '(-1)'], {}), '((x, y), -1)\n', (14455, 14467), False, 'from tensorflow.python.ops import array_ops\n'), ((16829, 16875), 'tensorflow.python.ops.array_ops.gather_nd', 'array_ops.gather_nd', (['auxiliary_inputs', 'indices'], {}), '(auxiliary_inputs, indices)\n', (16848, 16875), False, 'from tensorflow.python.ops import array_ops\n'), ((17361, 17393), 'tensorflow.python.ops.math_ops.logical_not', 'math_ops.logical_not', (['sample_ids'], {}), '(sample_ids)\n', (17381, 17393), False, 'from tensorflow.python.ops import math_ops\n'), ((16940, 16968), 'tensorflow.python.ops.array_ops.concat', 'array_ops.concat', (['(x, y)', '(-1)'], {}), '((x, y), -1)\n', (16956, 16968), False, 'from tensorflow.python.ops import array_ops\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: tshzzz
"""
import numpy as np
import torch
from src.utils import py_cpu_nms,bbox_iou
def gen_yolo_box(featmaps,anchor_wh):
#featmaps = [b,c,h,w]
output = np.zeros((featmaps[0], featmaps[1], len(anchor_wh), 4))
for i in range(featmaps[0]):
for j in range(featmaps[1]):
cx = (j ) #/ featmaps[0]
cy = (i ) #/ featmaps[1]
for k,(w,h) in enumerate(anchor_wh):
output[i,j,k,:] = [cx, cy, w , h ]
return output
class yolo_box_encoder(object):
def __init__(self,anchor,class_num,featmap_size):
# anchor B,13,13,5
self.anchor = gen_yolo_box(featmap_size,anchor)
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
def __call__(self,bs):
#global tw_a,tw_b
# b,c,h,w -> b,c,x,y
bb_class = np.zeros((self.featmap_size[0],self.featmap_size[1],self.boxes_num,self.class_num))
bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
bb_conf = np.zeros((self.featmap_size[0],self.featmap_size[1],self.boxes_num,1))
for i in range(bs.shape[0]):
local_x = int(min(0.999, max(0, bs[i, 0] + bs[i, 2] / 2)) * (self.featmap_size[0]) )
local_y = int(min(0.999, max(0, bs[i, 1] + bs[i, 3] / 2)) * (self.featmap_size[1]) )
ious = []
for k in range(self.boxes_num):
temp_x,temp_y,temp_w,temp_h = self.anchor[local_y,local_x,k,:]
temp_w = temp_w / self.featmap_size[0]
temp_h = temp_h / self.featmap_size[1]
anchor_ = np.array([[0,0,temp_w,temp_h]])
gt = np.array([[0,0,bs[i,2],bs[i,3]]])
ious.append(bbox_iou(anchor_, gt)[0])
selected_ = np.argsort(ious)[::-1]
for kk,selected_anchor in enumerate(selected_):
if bb_conf[local_y,local_x, selected_anchor,0] == 0 and bs[i,2]>0.02 and bs[i,3]>0.02 :
tx = (bs[i, 0] + bs[i, 2] / 2) * self.featmap_size[0] \
- (self.anchor[local_y,local_x,selected_anchor,0] )
ty = (bs[i, 1] + bs[i, 3] / 2) * self.featmap_size[1] \
- (self.anchor[local_y,local_x,selected_anchor,1] )
tw = np.log(max(0.01,bs[i,2]* self.featmap_size[0] / self.anchor[local_y,local_x,selected_anchor,2]) )
th = np.log(max(0.01,bs[i,3]* self.featmap_size[1] / self.anchor[local_y,local_x,selected_anchor,3]) )
bb_boxes[local_y,local_x, selected_anchor,:] = np.array([tx,ty,tw,th])
#考虑背景 使用 softmax
#bb_class[local_x, local_y, selected_anchor,:] = 0
bb_class[local_y, local_x, selected_anchor, int(bs[i, 4])] = 1
bb_conf[local_y,local_x, selected_anchor,0] = 1
break
target = (bb_class,bb_conf,bb_boxes)
return target
class yolo_box_decoder(object):
def __init__(self, anchor, class_num,featmap_size,conf=0.05,nms_thresh=0.5):
self.class_num = class_num#
self.anchor = torch.from_numpy(gen_yolo_box(featmap_size, anchor)).float()
self.boxes_num = len(anchor)
self.featmap_size = featmap_size
self.conf_thresh = conf
self.nms_thresh = nms_thresh
def __call__(self, pred):
boxes = []
classes = []
pred_cls, pred_conf, pred_bboxes = pred
featmap_size = torch.Tensor([pred_cls.shape[1], pred_cls.shape[2]])
pred_cls = pred_cls.cpu().float().view(-1,self.class_num)
pred_conf = pred_conf.cpu().float().view(-1,1)
pred_bboxes = pred_bboxes.cpu().float().view(-1,4)
anchor = self.anchor.repeat(1, 1, 1, 1, 1).cpu().view(-1,4)
#找最anchor中置信度最高的
pred_mask = (pred_conf>self.conf_thresh).view(-1)
pred_bboxes = pred_bboxes[pred_mask]
pred_conf = pred_conf[pred_mask]
pred_cls = pred_cls[pred_mask]
anchor = anchor[pred_mask]
for cls in range(self.class_num):
cls_prob = pred_cls[:, cls].float() * pred_conf[:, 0]
mask_a = cls_prob.gt(self.conf_thresh)
bbox = pred_bboxes[mask_a]
anchor_ = anchor[mask_a]
cls_prob = cls_prob[mask_a]
if bbox.shape[0] > 0:
bbox[:, 2:4] = torch.exp(bbox[:, 2:4]) * anchor_[:, 2:4] / (featmap_size[0:2])
bbox[:, 0:2] = (bbox[:, 0:2] + (anchor_[:, 0:2]))/ (featmap_size[0:2]) - bbox[:, 2:4] / 2
#bbox[:, 0:2] = (bbox[:, 0:2] + (anchor_[:, 0:2])) - bbox[:, 2:4] / 2
pre_cls_box = bbox.data.numpy()
pre_cls_score = cls_prob.data.view(-1).numpy()
keep = py_cpu_nms(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)
for conf_keep, loc_keep in zip(pre_cls_score[keep], pre_cls_box[keep]):
boxes.append(loc_keep)
classes.append([cls, conf_keep])
boxes = np.array(boxes)
classes = np.array(classes)
return boxes,classes
class single_decoder(object):
def __init__(self, anchor, class_num, featmap_size, conf=0.01):
self.class_num = class_num
self.anchor = torch.from_numpy(gen_yolo_box(featmap_size, anchor)).float()
self.boxes_num = len(anchor)
self.featmap_size = featmap_size
self.conf_thresh = conf
def __call__(self, pred):
pred_cls, pred_conf, pred_bboxes = pred
featmap_size = torch.Tensor([pred_cls.shape[1], pred_cls.shape[2]])
pred_cls = pred_cls.cpu().float().view(-1, self.class_num)
pred_conf = pred_conf.cpu().float().view(-1, 1)
pred_bboxes = pred_bboxes.cpu().float().view(-1, 4)
anchor = self.anchor.repeat(1, 1, 1, 1, 1).cpu().view(-1, 4)
# 找最anchor中置信度最高的
pred_mask = (pred_conf > self.conf_thresh).view(-1)
pred_bboxes = pred_bboxes[pred_mask]
pred_conf = pred_conf[pred_mask]
pred_cls = pred_cls[pred_mask]
anchor = anchor[pred_mask]
pred_bboxes[:, 2:4] = torch.exp(pred_bboxes[:, 2:4]) * anchor[:, 2:4] / (featmap_size[0:2])
pred_bboxes[:, 0:2] = (pred_bboxes[:, 0:2] + (anchor[:, 0:2]))/ (featmap_size[0:2]) - pred_bboxes[:, 2:4] / 2
return pred_cls, pred_conf, pred_bboxes
class group_decoder(object):
def __init__(self, anchor, class_num, featmap_size, conf=0.01, nms_thresh=0.5):
self.decoder = []
for i in range(len(anchor)):
self.decoder.append(single_decoder(anchor[i], class_num, featmap_size[i], conf))
self.class_num = class_num
self.conf_thresh = conf
self.nms_thresh = nms_thresh
def __call__(self, preds):
pred_cls = []
pred_conf = []
pred_bboxes = []
for pred,decoder in zip(preds,self.decoder):
cls,conf,bbox = decoder(pred)
pred_cls.append(cls)
pred_conf.append(conf)
pred_bboxes.append(bbox)
pred_cls = torch.cat([cls for cls in pred_cls])
pred_bboxes = torch.cat([bbox for bbox in pred_bboxes])
pred_conf = torch.cat([conf for conf in pred_conf])
boxes = []
classes = []
for cls in range(self.class_num):
cls_prob = pred_cls[:, cls].float() * pred_conf[:, 0]
mask_a = cls_prob.gt(self.conf_thresh)
bbox = pred_bboxes[mask_a]
cls_prob = cls_prob[mask_a]
iou_prob = pred_conf[mask_a]
if bbox.shape[0] > 0:
pre_cls_box = bbox.data.numpy()
pre_cls_score = cls_prob.data.view(-1).numpy()
iou_prob = iou_prob.data.view(-1).numpy()
keep = py_cpu_nms(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)
for conf_keep, loc_keep in zip(pre_cls_score[keep], pre_cls_box[keep]):
boxes.append(loc_keep)
classes.append([cls, conf_keep])
boxes = np.array(boxes)
classes = np.array(classes)
return boxes, classes
class single_encoder(object):
def __init__(self, anchor, class_num, featmap_size):
# anchor B,13,13,5
self.anchor = gen_yolo_box(featmap_size, anchor)
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
self.bb_class = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num))
self.bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
self.bb_conf = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))
def get_target(self):
return (self.bb_class,self.bb_conf,self.bb_boxes)
def clean_target(self):
self.bb_class = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num))
self.bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
self.bb_conf = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))
return
def __call__(self, bs):
local_x = int(min(0.999, max(0, bs[0] + bs[2] / 2)) * (self.featmap_size[0]))
local_y = int(min(0.999, max(0, bs[1] + bs[3] / 2)) * (self.featmap_size[1]))
ious = []
for k in range(self.boxes_num):
temp_x, temp_y, temp_w, temp_h = self.anchor[local_y, local_x, k, :]
temp_w = temp_w / self.featmap_size[0]
temp_h = temp_h / self.featmap_size[1]
anchor_ = np.array([[0, 0, temp_w, temp_h]])
gt = np.array([[0, 0, bs[2], bs[3]]])
ious.append(bbox_iou(anchor_, gt)[0])
selected_ = np.argsort(ious)[::-1]
for kk, selected_anchor in enumerate(selected_):
if self.bb_conf[local_y, local_x, selected_anchor, 0] == 0 and bs[2] > 0.02 and bs[3] > 0.02:
tx = (bs[0] + bs[2] / 2) * self.featmap_size[0] - (self.anchor[local_y, local_x, selected_anchor, 0])
ty = (bs[1] + bs[3] / 2) * self.featmap_size[1] - (self.anchor[local_y, local_x, selected_anchor, 1])
tw = np.log(max(0.01, bs[2] * self.featmap_size[0] / self.anchor[local_y, local_x, selected_anchor, 2]))
th = np.log(max(0.01, bs[3] * self.featmap_size[1] / self.anchor[local_y, local_x, selected_anchor, 3]))
self.bb_boxes[local_y, local_x, selected_anchor, :] = np.array([tx, ty, tw, th])
# 考虑背景 使用 softmax
self.bb_class[local_y, local_x, selected_anchor, int(bs[4])] = 1
self.bb_conf[local_y, local_x, selected_anchor, 0] = 1
break
return
class group_encoder(object):
def __init__(self, anchor, class_num, featmap_size):
# anchor B,13,13,5
self.anchor = anchor
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
self.featmap_num = len(featmap_size)
self.encoder = []
for i in range(len(anchor)):
self.encoder.append(single_encoder(anchor[i], class_num, featmap_size[i]))
def __call__(self, bs):
# global tw_a,tw_b
# b,c,h,w -> b,c,x,y
for i in range(bs.shape[0]):
for encoder in self.encoder:
encoder(bs[i])
target = []
for encoder in self.encoder:
target.append(encoder.get_target())
for encoder in self.encoder:
encoder.clean_target()
return target
|
[
"numpy.zeros",
"torch.cat",
"numpy.argsort",
"torch.exp",
"torch.Tensor",
"numpy.array",
"src.utils.py_cpu_nms",
"src.utils.bbox_iou"
] |
[((936, 1027), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.\n class_num))\n', (944, 1027), True, 'import numpy as np\n'), ((1039, 1112), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))\n', (1047, 1112), True, 'import numpy as np\n'), ((1131, 1204), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))\n', (1139, 1204), True, 'import numpy as np\n'), ((3620, 3672), 'torch.Tensor', 'torch.Tensor', (['[pred_cls.shape[1], pred_cls.shape[2]]'], {}), '([pred_cls.shape[1], pred_cls.shape[2]])\n', (3632, 3672), False, 'import torch\n'), ((5170, 5185), 'numpy.array', 'np.array', (['boxes'], {}), '(boxes)\n', (5178, 5185), True, 'import numpy as np\n'), ((5204, 5221), 'numpy.array', 'np.array', (['classes'], {}), '(classes)\n', (5212, 5221), True, 'import numpy as np\n'), ((5683, 5735), 'torch.Tensor', 'torch.Tensor', (['[pred_cls.shape[1], pred_cls.shape[2]]'], {}), '([pred_cls.shape[1], pred_cls.shape[2]])\n', (5695, 5735), False, 'import torch\n'), ((7209, 7245), 'torch.cat', 'torch.cat', (['[cls for cls in pred_cls]'], {}), '([cls for cls in pred_cls])\n', (7218, 7245), False, 'import torch\n'), ((7268, 7309), 'torch.cat', 'torch.cat', (['[bbox for bbox in pred_bboxes]'], {}), '([bbox for bbox in pred_bboxes])\n', (7277, 7309), False, 'import torch\n'), ((7330, 7369), 'torch.cat', 'torch.cat', (['[conf for conf in pred_conf]'], {}), '([conf for conf in pred_conf])\n', (7339, 7369), False, 'import torch\n'), ((8186, 8201), 'numpy.array', 'np.array', (['boxes'], {}), '(boxes)\n', (8194, 8201), True, 'import numpy as np\n'), ((8220, 8237), 'numpy.array', 'np.array', (['classes'], {}), '(classes)\n', (8228, 8237), True, 'import numpy as np\n'), ((8584, 8675), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.\n class_num))\n', (8592, 8675), True, 'import numpy as np\n'), ((8695, 8768), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))\n', (8703, 8768), True, 'import numpy as np\n'), ((8792, 8865), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))\n', (8800, 8865), True, 'import numpy as np\n'), ((9005, 9096), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.\n class_num))\n', (9013, 9096), True, 'import numpy as np\n'), ((9116, 9189), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))\n', (9124, 9189), True, 'import numpy as np\n'), ((9213, 9286), 'numpy.zeros', 'np.zeros', (['(self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1)'], {}), '((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))\n', (9221, 9286), True, 'import numpy as np\n'), ((9768, 9802), 'numpy.array', 'np.array', (['[[0, 0, temp_w, temp_h]]'], {}), '([[0, 0, temp_w, temp_h]])\n', (9776, 9802), True, 'import numpy as np\n'), ((9820, 9852), 'numpy.array', 'np.array', (['[[0, 0, bs[2], bs[3]]]'], {}), '([[0, 0, bs[2], bs[3]]])\n', (9828, 9852), True, 'import numpy as np\n'), ((9924, 9940), 'numpy.argsort', 'np.argsort', (['ious'], {}), '(ious)\n', (9934, 9940), True, 'import numpy as np\n'), ((1717, 1751), 'numpy.array', 'np.array', (['[[0, 0, temp_w, temp_h]]'], {}), '([[0, 0, temp_w, temp_h]])\n', (1725, 1751), True, 'import numpy as np\n'), ((1770, 1808), 'numpy.array', 'np.array', (['[[0, 0, bs[i, 2], bs[i, 3]]]'], {}), '([[0, 0, bs[i, 2], bs[i, 3]]])\n', (1778, 1808), True, 'import numpy as np\n'), ((1884, 1900), 'numpy.argsort', 'np.argsort', (['ious'], {}), '(ious)\n', (1894, 1900), True, 'import numpy as np\n'), ((4906, 4968), 'src.utils.py_cpu_nms', 'py_cpu_nms', (['pre_cls_box', 'pre_cls_score'], {'thresh': 'self.nms_thresh'}), '(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)\n', (4916, 4968), False, 'from src.utils import py_cpu_nms, bbox_iou\n'), ((6270, 6300), 'torch.exp', 'torch.exp', (['pred_bboxes[:, 2:4]'], {}), '(pred_bboxes[:, 2:4])\n', (6279, 6300), False, 'import torch\n'), ((7922, 7984), 'src.utils.py_cpu_nms', 'py_cpu_nms', (['pre_cls_box', 'pre_cls_score'], {'thresh': 'self.nms_thresh'}), '(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)\n', (7932, 7984), False, 'from src.utils import py_cpu_nms, bbox_iou\n'), ((10663, 10689), 'numpy.array', 'np.array', (['[tx, ty, tw, th]'], {}), '([tx, ty, tw, th])\n', (10671, 10689), True, 'import numpy as np\n'), ((2718, 2744), 'numpy.array', 'np.array', (['[tx, ty, tw, th]'], {}), '([tx, ty, tw, th])\n', (2726, 2744), True, 'import numpy as np\n'), ((9877, 9898), 'src.utils.bbox_iou', 'bbox_iou', (['anchor_', 'gt'], {}), '(anchor_, gt)\n', (9885, 9898), False, 'from src.utils import py_cpu_nms, bbox_iou\n'), ((1832, 1853), 'src.utils.bbox_iou', 'bbox_iou', (['anchor_', 'gt'], {}), '(anchor_, gt)\n', (1840, 1853), False, 'from src.utils import py_cpu_nms, bbox_iou\n'), ((4514, 4537), 'torch.exp', 'torch.exp', (['bbox[:, 2:4]'], {}), '(bbox[:, 2:4])\n', (4523, 4537), False, 'import torch\n')]
|
#!/usr/bin/env python3
# Copyright 2018 ckb-next Development Team <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import re
BLD_LEN = 0x6000
APP_LEN = 0xC000
ISP_LEN = 0x20000
FWINFO_OFFSET = 0x102
def printFWInfo(ids, data_str, prefix):
print(prefix + " Vendor ID: 0x" + format(ids[1], "02x") + format(ids[0], "02x"))
print(prefix + " Product ID: 0x" + format(ids[3], "02x") + format(ids[2], "02x"))
print(prefix + " Version: 0x" + format(ids[5], "02x") + format(ids[4], "02x"))
# Try to find the protocol version
r = re.compile(b"P\x00[0x]\x00[0-9x]\x00")
res = r.findall(data_str)
if len(res) > 0:
if len(res) > 1:
print("Possible " + prefix + " Protocol Versions:")
for i in res:
print(i.decode("utf-16"))
else:
print(prefix + " Protocol Version: " + res[0].decode("utf-16"))
else:
print("Could not detect " + prefix + " Protocol Version")
|
[
"re.compile"
] |
[((2053, 2091), 're.compile', 're.compile', (["b'P\\x00[0x]\\x00[0-9x]\\x00'"], {}), "(b'P\\x00[0x]\\x00[0-9x]\\x00')\n", (2063, 2091), False, 'import re\n')]
|
from flask import current_app, render_template
from flask_mail import Message
# Code from
# https://github.com/lingthio/Flask-User/blob/master/flask_user/emails.py
def render_email(filename, **kwargs):
"""
Render email message in HTML and raw format
"""
html_message = render_template(filename + '.html', **kwargs)
text_message = render_template(filename + '.txt', **kwargs)
return (html_message, text_message)
def send_email(recipient, subject, html_message, text_message):
"""
Send the email to a specific recipient
"""
mail_engine = current_app.extensions.get('mail')
message = EmailMessage(
subject, recipients=[recipient], html=html_message, body=text_message)
mail_engine.send(message)
def send_activate_account_email(user_email, token):
"""
Send an email with a link letting the user's account
"""
activate_account_link = 'http://{0}:5000/activate_account?token='.format(
current_app.config.get('SERVER_IP')) + token
subject = 'Action Required: Activate Your Account!'
html_message, text_message = render_email(
'activate_account', activate_account_link=activate_account_link)
send_email(user_email, subject, html_message, text_message)
def send_forget_password_email(user_email, token):
"""
Send an email with a link letting the user to reset password
"""
forget_password_link = 'http://{0}:5000/forget_password?token='.format(
current_app.config.get('SERVER_IP'))+ token
subject = 'Alerts: You Requested to Reset Your Password!'
html_message, text_message = render_email(
'forget_password', forget_password_link=forget_password_link)
send_email(user_email, subject, html_message, text_message)
|
[
"flask.current_app.extensions.get",
"flask.current_app.config.get",
"flask.render_template"
] |
[((288, 333), 'flask.render_template', 'render_template', (["(filename + '.html')"], {}), "(filename + '.html', **kwargs)\n", (303, 333), False, 'from flask import current_app, render_template\n'), ((353, 397), 'flask.render_template', 'render_template', (["(filename + '.txt')"], {}), "(filename + '.txt', **kwargs)\n", (368, 397), False, 'from flask import current_app, render_template\n'), ((582, 616), 'flask.current_app.extensions.get', 'current_app.extensions.get', (['"""mail"""'], {}), "('mail')\n", (608, 616), False, 'from flask import current_app, render_template\n'), ((967, 1002), 'flask.current_app.config.get', 'current_app.config.get', (['"""SERVER_IP"""'], {}), "('SERVER_IP')\n", (989, 1002), False, 'from flask import current_app, render_template\n'), ((1472, 1507), 'flask.current_app.config.get', 'current_app.config.get', (['"""SERVER_IP"""'], {}), "('SERVER_IP')\n", (1494, 1507), False, 'from flask import current_app, render_template\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for custom SQLAlchemy types via Magnum DB."""
from oslo_db import exception as db_exc
from oslo_utils import uuidutils
import magnum.db.sqlalchemy.api as sa_api
from magnum.db.sqlalchemy import models
from magnum.tests.unit.db import base
class SqlAlchemyCustomTypesTestCase(base.DbTestCase):
def test_JSONEncodedDict_default_value(self):
# Create ClusterTemplate w/o labels
cluster_template1_id = uuidutils.generate_uuid()
self.dbapi.create_cluster_template({'uuid': cluster_template1_id})
cluster_template1 = sa_api.model_query(
models.ClusterTemplate).filter_by(uuid=cluster_template1_id).one()
self.assertEqual({}, cluster_template1.labels)
# Create ClusterTemplate with labels
cluster_template2_id = uuidutils.generate_uuid()
self.dbapi.create_cluster_template(
{'uuid': cluster_template2_id, 'labels': {'bar': 'foo'}})
cluster_template2 = sa_api.model_query(
models.ClusterTemplate).filter_by(uuid=cluster_template2_id).one()
self.assertEqual('foo', cluster_template2.labels['bar'])
def test_JSONEncodedDict_type_check(self):
self.assertRaises(db_exc.DBError,
self.dbapi.create_cluster_template,
{'labels':
['this is not a dict']})
def test_JSONEncodedList_default_value(self):
# Create nodegroup w/o node_addresses
nodegroup1_id = uuidutils.generate_uuid()
self.dbapi.create_nodegroup({'uuid': nodegroup1_id})
nodegroup1 = sa_api.model_query(
models.NodeGroup).filter_by(uuid=nodegroup1_id).one()
self.assertEqual([], nodegroup1.node_addresses)
# Create nodegroup with node_addresses
nodegroup2_id = uuidutils.generate_uuid()
self.dbapi.create_nodegroup({
'uuid': nodegroup2_id,
'node_addresses': ['mynode_address1',
'mynode_address2']
})
nodegroup2 = sa_api.model_query(
models.NodeGroup).filter_by(uuid=nodegroup2_id).one()
self.assertEqual(['mynode_address1', 'mynode_address2'],
nodegroup2.node_addresses)
def test_JSONEncodedList_type_check(self):
self.assertRaises(db_exc.DBError,
self.dbapi.create_nodegroup,
{'node_addresses':
{'this is not a list': 'test'}})
|
[
"magnum.db.sqlalchemy.api.model_query",
"oslo_utils.uuidutils.generate_uuid"
] |
[((1005, 1030), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (1028, 1030), False, 'from oslo_utils import uuidutils\n'), ((1365, 1390), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (1388, 1390), False, 'from oslo_utils import uuidutils\n'), ((2059, 2084), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (2082, 2084), False, 'from oslo_utils import uuidutils\n'), ((2381, 2406), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (2404, 2406), False, 'from oslo_utils import uuidutils\n'), ((1134, 1176), 'magnum.db.sqlalchemy.api.model_query', 'sa_api.model_query', (['models.ClusterTemplate'], {}), '(models.ClusterTemplate)\n', (1152, 1176), True, 'import magnum.db.sqlalchemy.api as sa_api\n'), ((1533, 1575), 'magnum.db.sqlalchemy.api.model_query', 'sa_api.model_query', (['models.ClusterTemplate'], {}), '(models.ClusterTemplate)\n', (1551, 1575), True, 'import magnum.db.sqlalchemy.api as sa_api\n'), ((2167, 2203), 'magnum.db.sqlalchemy.api.model_query', 'sa_api.model_query', (['models.NodeGroup'], {}), '(models.NodeGroup)\n', (2185, 2203), True, 'import magnum.db.sqlalchemy.api as sa_api\n'), ((2612, 2648), 'magnum.db.sqlalchemy.api.model_query', 'sa_api.model_query', (['models.NodeGroup'], {}), '(models.NodeGroup)\n', (2630, 2648), True, 'import magnum.db.sqlalchemy.api as sa_api\n')]
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
from aws_cdk import (
aws_iam as iam,
# aws_lambda as _lambda,
# aws_sagemaker as sm,
core
)
import os
ROLE_NAME_PREFIX = os.environ["ROLE_NAME_PREFIX"]
class SageMakerStudioStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
# CloudFormation Parameters
studio_authentication = core.CfnParameter(self, "StudioAuthentication",
type="String",
description="Authentication method for SageMaker Studio.",
allowed_values=[
"AWS IAM with IAM users",
"AWS IAM with AWS account federation (external IdP)"
],
default = "AWS IAM with IAM users"
)
user_data_scientist_1 = core.CfnParameter(self, "DataScientistFullAccessUsername",
type="String",
description="Username for Data Scientist with full access to Amazon Reviews.",
allowed_pattern="^[a-zA-Z0-9](-*[a-zA-Z0-9])*",
default = "data-scientist-full"
)
user_data_scientist_2 = core.CfnParameter(self, "DataScientistLimitedAccessUsername",
type="String",
description="Username for Data Scientist with limited access to Amazon Reviews.",
allowed_pattern="^[a-zA-Z0-9](-*[a-zA-Z0-9])*",
default = "data-scientist-limited"
)
federated_user_data_scientist_1 = core.CfnParameter(self, "FederatedDataScientistFullAccess",
type="String",
description="\
IdP user name for data scientist with full access to Amazon Reviews (e.g., \"username\", or \"username@domain\").",
)
federated_user_data_scientist_2 = core.CfnParameter(self, "FederatedDataScientistLimitedAccess",
type="String",
description="\
IdP user name for data scientist with limited access to Amazon Reviews (e.g., \"username\", or \"<EMAIL>\").",
)
sagemaker_studio_vpc = core.CfnParameter(self, "SageMakerStudioVpcId",
type="String",
description="VPC that SageMaker Studio will use for communication with the EFS volume."
)
sagemaker_studio_subnets = core.CfnParameter(self, "SageMakerStudiosubnetIds",
type="CommaDelimitedList",
description="Subnet(s) that SageMaker Studio will use for communication with the EFS volume. Must be in the selected VPC and in different AZs."
)
self.template_options.template_format_version = "2010-09-09"
self.template_options.description = "SageMaker Studio and Studio User Profiles."
self.template_options.metadata = { "License": "MIT-0" }
# Conditions for SageMaker Studio authentication
aws_iam_users = core.CfnCondition(self, "IsIAMUserAuthentication",
expression = core.Fn.condition_equals("AWS IAM with IAM users", studio_authentication)
)
aws_federation = core.CfnCondition(self, "IsFederatedAuthentication",
expression = core.Fn.condition_equals("AWS IAM with AWS account federation (external IdP)", studio_authentication)
)
# IAM Users and Roles for Data Scientists
data_scientist_role_1 = core.Fn.condition_if(
aws_iam_users.logical_id,
user_data_scientist_1.value_as_string,
core.Fn.condition_if(aws_federation.logical_id, federated_user_data_scientist_1.value_as_string, "")
)
data_scientist_role_2 = core.Fn.condition_if(
aws_iam_users.logical_id,
user_data_scientist_2.value_as_string,
core.Fn.condition_if(aws_federation.logical_id, federated_user_data_scientist_2.value_as_string, "")
)
role_1 = iam.Role.from_role_arn(self, "DataScientistFullIAMRole",
role_arn = f"arn:aws:iam::{core.Aws.ACCOUNT_ID}:role/{ROLE_NAME_PREFIX}{data_scientist_role_1.to_string()}"
)
role_2 = iam.Role.from_role_arn(self, "DataScientistLimitedIAMRole",
role_arn = f"arn:aws:iam::{core.Aws.ACCOUNT_ID}:role/{ROLE_NAME_PREFIX}{data_scientist_role_2.to_string()}"
)
# Create SageMaker Studio Domain (as CfnResource)
sm_default_execution_role = iam.Role(self, "SageMakerStudioDefaultExecutionRole",
role_name = ROLE_NAME_PREFIX + "Default",
assumed_by = iam.ServicePrincipal('sagemaker.amazonaws.com'),
managed_policies = [iam.ManagedPolicy.from_aws_managed_policy_name("AmazonSageMakerFullAccess")]
)
sm_domain = core.CfnResource(self, "SageMakerDomain",
type = "AWS::SageMaker::Domain",
properties = {
"AuthMode" : "IAM",
"DefaultUserSettings" : {
"ExecutionRole": sm_default_execution_role.role_arn
},
"DomainName" : "default-domain",
"SubnetIds" : sagemaker_studio_subnets.value_as_list,
"VpcId" : sagemaker_studio_vpc.value_as_string
})
sm_domain_id = sm_domain.ref
# Create SageMaker Studio User Profiles (as CfnResources)
sm_profile_full = core.CfnResource(self, "SageMakerUserProfileDataScientistFull",
type = "AWS::SageMaker::UserProfile",
properties = {
"DomainId" : sm_domain_id,
"Tags" : [{
"Key" : "studiouserid",
"Value" : data_scientist_role_1
}],
"UserProfileName" : user_data_scientist_1.value_as_string,
"UserSettings" : {
"ExecutionRole" : role_1.role_arn,
}
})
sm_profile_limited = core.CfnResource(self, "SageMakerUserProfileDataScientistLimited",
type = "AWS::SageMaker::UserProfile",
properties = {
"DomainId" : sm_domain_id,
"Tags" : [{
"Key" : "studiouserid",
"Value" : data_scientist_role_2
}],
"UserProfileName" : user_data_scientist_2.value_as_string,
"UserSettings" : {
"ExecutionRole" : role_2.role_arn,
}
})
sm_profile_full.node.add_dependency(sm_domain)
sm_profile_limited.node.add_dependency(sm_domain)
|
[
"aws_cdk.core.CfnParameter",
"aws_cdk.aws_iam.ManagedPolicy.from_aws_managed_policy_name",
"aws_cdk.core.CfnResource",
"aws_cdk.core.Fn.condition_equals",
"aws_cdk.aws_iam.ServicePrincipal",
"aws_cdk.core.Fn.condition_if"
] |
[((472, 743), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""StudioAuthentication"""'], {'type': '"""String"""', 'description': '"""Authentication method for SageMaker Studio."""', 'allowed_values': "['AWS IAM with IAM users', 'AWS IAM with AWS account federation (external IdP)'\n ]", 'default': '"""AWS IAM with IAM users"""'}), "(self, 'StudioAuthentication', type='String', description=\n 'Authentication method for SageMaker Studio.', allowed_values=[\n 'AWS IAM with IAM users',\n 'AWS IAM with AWS account federation (external IdP)'], default=\n 'AWS IAM with IAM users')\n", (489, 743), False, 'from aws_cdk import aws_iam as iam, core\n'), ((791, 1040), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""DataScientistFullAccessUsername"""'], {'type': '"""String"""', 'description': '"""Username for Data Scientist with full access to Amazon Reviews."""', 'allowed_pattern': '"""^[a-zA-Z0-9](-*[a-zA-Z0-9])*"""', 'default': '"""data-scientist-full"""'}), "(self, 'DataScientistFullAccessUsername', type='String',\n description=\n 'Username for Data Scientist with full access to Amazon Reviews.',\n allowed_pattern='^[a-zA-Z0-9](-*[a-zA-Z0-9])*', default=\n 'data-scientist-full')\n", (808, 1040), False, 'from aws_cdk import aws_iam as iam, core\n'), ((1073, 1331), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""DataScientistLimitedAccessUsername"""'], {'type': '"""String"""', 'description': '"""Username for Data Scientist with limited access to Amazon Reviews."""', 'allowed_pattern': '"""^[a-zA-Z0-9](-*[a-zA-Z0-9])*"""', 'default': '"""data-scientist-limited"""'}), "(self, 'DataScientistLimitedAccessUsername', type='String',\n description=\n 'Username for Data Scientist with limited access to Amazon Reviews.',\n allowed_pattern='^[a-zA-Z0-9](-*[a-zA-Z0-9])*', default=\n 'data-scientist-limited')\n", (1090, 1331), False, 'from aws_cdk import aws_iam as iam, core\n'), ((1374, 1587), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""FederatedDataScientistFullAccess"""'], {'type': '"""String"""', 'description': '"""IdP user name for data scientist with full access to Amazon Reviews (e.g., "username", or "username@domain")."""'}), '(self, \'FederatedDataScientistFullAccess\', type=\'String\',\n description=\n \'IdP user name for data scientist with full access to Amazon Reviews (e.g., "username", or "username@domain").\'\n )\n', (1391, 1587), False, 'from aws_cdk import aws_iam as iam, core\n'), ((1631, 1843), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""FederatedDataScientistLimitedAccess"""'], {'type': '"""String"""', 'description': '"""IdP user name for data scientist with limited access to Amazon Reviews (e.g., "username", or "<EMAIL>")."""'}), '(self, \'FederatedDataScientistLimitedAccess\', type=\n \'String\', description=\n \'IdP user name for data scientist with limited access to Amazon Reviews (e.g., "username", or "<EMAIL>").\'\n )\n', (1648, 1843), False, 'from aws_cdk import aws_iam as iam, core\n'), ((1875, 2036), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""SageMakerStudioVpcId"""'], {'type': '"""String"""', 'description': '"""VPC that SageMaker Studio will use for communication with the EFS volume."""'}), "(self, 'SageMakerStudioVpcId', type='String', description=\n 'VPC that SageMaker Studio will use for communication with the EFS volume.'\n )\n", (1892, 2036), False, 'from aws_cdk import aws_iam as iam, core\n'), ((2070, 2308), 'aws_cdk.core.CfnParameter', 'core.CfnParameter', (['self', '"""SageMakerStudiosubnetIds"""'], {'type': '"""CommaDelimitedList"""', 'description': '"""Subnet(s) that SageMaker Studio will use for communication with the EFS volume. Must be in the selected VPC and in different AZs."""'}), "(self, 'SageMakerStudiosubnetIds', type=\n 'CommaDelimitedList', description=\n 'Subnet(s) that SageMaker Studio will use for communication with the EFS volume. Must be in the selected VPC and in different AZs.'\n )\n", (2087, 2308), False, 'from aws_cdk import aws_iam as iam, core\n'), ((4171, 4501), 'aws_cdk.core.CfnResource', 'core.CfnResource', (['self', '"""SageMakerDomain"""'], {'type': '"""AWS::SageMaker::Domain"""', 'properties': "{'AuthMode': 'IAM', 'DefaultUserSettings': {'ExecutionRole':\n sm_default_execution_role.role_arn}, 'DomainName': 'default-domain',\n 'SubnetIds': sagemaker_studio_subnets.value_as_list, 'VpcId':\n sagemaker_studio_vpc.value_as_string}"}), "(self, 'SageMakerDomain', type='AWS::SageMaker::Domain',\n properties={'AuthMode': 'IAM', 'DefaultUserSettings': {'ExecutionRole':\n sm_default_execution_role.role_arn}, 'DomainName': 'default-domain',\n 'SubnetIds': sagemaker_studio_subnets.value_as_list, 'VpcId':\n sagemaker_studio_vpc.value_as_string})\n", (4187, 4501), False, 'from aws_cdk import aws_iam as iam, core\n'), ((4654, 4986), 'aws_cdk.core.CfnResource', 'core.CfnResource', (['self', '"""SageMakerUserProfileDataScientistFull"""'], {'type': '"""AWS::SageMaker::UserProfile"""', 'properties': "{'DomainId': sm_domain_id, 'Tags': [{'Key': 'studiouserid', 'Value':\n data_scientist_role_1}], 'UserProfileName': user_data_scientist_1.\n value_as_string, 'UserSettings': {'ExecutionRole': role_1.role_arn}}"}), "(self, 'SageMakerUserProfileDataScientistFull', type=\n 'AWS::SageMaker::UserProfile', properties={'DomainId': sm_domain_id,\n 'Tags': [{'Key': 'studiouserid', 'Value': data_scientist_role_1}],\n 'UserProfileName': user_data_scientist_1.value_as_string,\n 'UserSettings': {'ExecutionRole': role_1.role_arn}})\n", (4670, 4986), False, 'from aws_cdk import aws_iam as iam, core\n'), ((5061, 5396), 'aws_cdk.core.CfnResource', 'core.CfnResource', (['self', '"""SageMakerUserProfileDataScientistLimited"""'], {'type': '"""AWS::SageMaker::UserProfile"""', 'properties': "{'DomainId': sm_domain_id, 'Tags': [{'Key': 'studiouserid', 'Value':\n data_scientist_role_2}], 'UserProfileName': user_data_scientist_2.\n value_as_string, 'UserSettings': {'ExecutionRole': role_2.role_arn}}"}), "(self, 'SageMakerUserProfileDataScientistLimited', type=\n 'AWS::SageMaker::UserProfile', properties={'DomainId': sm_domain_id,\n 'Tags': [{'Key': 'studiouserid', 'Value': data_scientist_role_2}],\n 'UserProfileName': user_data_scientist_2.value_as_string,\n 'UserSettings': {'ExecutionRole': role_2.role_arn}})\n", (5077, 5396), False, 'from aws_cdk import aws_iam as iam, core\n'), ((3093, 3197), 'aws_cdk.core.Fn.condition_if', 'core.Fn.condition_if', (['aws_federation.logical_id', 'federated_user_data_scientist_1.value_as_string', '""""""'], {}), "(aws_federation.logical_id,\n federated_user_data_scientist_1.value_as_string, '')\n", (3113, 3197), False, 'from aws_cdk import aws_iam as iam, core\n'), ((3323, 3427), 'aws_cdk.core.Fn.condition_if', 'core.Fn.condition_if', (['aws_federation.logical_id', 'federated_user_data_scientist_2.value_as_string', '""""""'], {}), "(aws_federation.logical_id,\n federated_user_data_scientist_2.value_as_string, '')\n", (3343, 3427), False, 'from aws_cdk import aws_iam as iam, core\n'), ((2650, 2723), 'aws_cdk.core.Fn.condition_equals', 'core.Fn.condition_equals', (['"""AWS IAM with IAM users"""', 'studio_authentication'], {}), "('AWS IAM with IAM users', studio_authentication)\n", (2674, 2723), False, 'from aws_cdk import aws_iam as iam, core\n'), ((2818, 2923), 'aws_cdk.core.Fn.condition_equals', 'core.Fn.condition_equals', (['"""AWS IAM with AWS account federation (external IdP)"""', 'studio_authentication'], {}), "('AWS IAM with AWS account federation (external IdP)',\n studio_authentication)\n", (2842, 2923), False, 'from aws_cdk import aws_iam as iam, core\n'), ((4002, 4049), 'aws_cdk.aws_iam.ServicePrincipal', 'iam.ServicePrincipal', (['"""sagemaker.amazonaws.com"""'], {}), "('sagemaker.amazonaws.com')\n", (4022, 4049), True, 'from aws_cdk import aws_iam as iam, core\n'), ((4074, 4149), 'aws_cdk.aws_iam.ManagedPolicy.from_aws_managed_policy_name', 'iam.ManagedPolicy.from_aws_managed_policy_name', (['"""AmazonSageMakerFullAccess"""'], {}), "('AmazonSageMakerFullAccess')\n", (4120, 4149), True, 'from aws_cdk import aws_iam as iam, core\n')]
|
from brownie import FundMe, network, config, MockV3Aggregator
from scripts.utils import get_account, deploy_mock_priceFeed, LOCAL_BLOCKCHAIN_ENV
from scripts.fund_and_withdraw import fund, withdraw
from web3 import Web3
def deploy_fund_me():
account = get_account()
# After the changes in the contract's constructor, it now needs an
# additional parameter (Rinkeby Chainlink contract address)
# This allows dynamically changing the used address depending on the env (Test, Local, Live,...)
active_network = network.show_active()
if active_network not in LOCAL_BLOCKCHAIN_ENV:
priceFeed_address = config["networks"][active_network]["eth_usd_priceFeed"]
else:
# If in development, deploy a mock
print(
f"Active network: {active_network}.\nDeploying mock priceFeed contract..."
)
# Check whether the mock contract has already been deployed
deploy_mock_priceFeed()
priceFeed_address = MockV3Aggregator[-1].address
contract_fm = FundMe.deploy(
priceFeed_address,
{"from": account},
publish_source=config["networks"][active_network]["verify"],
)
print(f"Contract deployed to: {contract_fm.address}")
return contract_fm
def main():
deploy_fund_me()
fund()
withdraw()
|
[
"scripts.utils.deploy_mock_priceFeed",
"brownie.FundMe.deploy",
"scripts.fund_and_withdraw.fund",
"scripts.fund_and_withdraw.withdraw",
"scripts.utils.get_account",
"brownie.network.show_active"
] |
[((260, 273), 'scripts.utils.get_account', 'get_account', ([], {}), '()\n', (271, 273), False, 'from scripts.utils import get_account, deploy_mock_priceFeed, LOCAL_BLOCKCHAIN_ENV\n'), ((531, 552), 'brownie.network.show_active', 'network.show_active', ([], {}), '()\n', (550, 552), False, 'from brownie import FundMe, network, config, MockV3Aggregator\n'), ((1029, 1146), 'brownie.FundMe.deploy', 'FundMe.deploy', (['priceFeed_address', "{'from': account}"], {'publish_source': "config['networks'][active_network]['verify']"}), "(priceFeed_address, {'from': account}, publish_source=config[\n 'networks'][active_network]['verify'])\n", (1042, 1146), False, 'from brownie import FundMe, network, config, MockV3Aggregator\n'), ((1293, 1299), 'scripts.fund_and_withdraw.fund', 'fund', ([], {}), '()\n', (1297, 1299), False, 'from scripts.fund_and_withdraw import fund, withdraw\n'), ((1304, 1314), 'scripts.fund_and_withdraw.withdraw', 'withdraw', ([], {}), '()\n', (1312, 1314), False, 'from scripts.fund_and_withdraw import fund, withdraw\n'), ((929, 952), 'scripts.utils.deploy_mock_priceFeed', 'deploy_mock_priceFeed', ([], {}), '()\n', (950, 952), False, 'from scripts.utils import get_account, deploy_mock_priceFeed, LOCAL_BLOCKCHAIN_ENV\n')]
|
"""The SSDP integration."""
import asyncio
from datetime import timedelta
import logging
from urllib.parse import urlparse
import aiohttp
from defusedxml import ElementTree
from netdisco import ssdp, util
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.generated.ssdp import SSDP
DOMAIN = "ssdp"
SCAN_INTERVAL = timedelta(seconds=60)
ATTR_HOST = "host"
ATTR_PORT = "port"
ATTR_SSDP_DESCRIPTION = "ssdp_description"
ATTR_ST = "ssdp_st"
ATTR_NAME = "name"
ATTR_MODEL_NAME = "model_name"
ATTR_MODEL_NUMBER = "model_number"
ATTR_SERIAL = "serial_number"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MANUFACTURERURL = "manufacturerURL"
ATTR_UDN = "udn"
ATTR_UPNP_DEVICE_TYPE = "upnp_device_type"
ATTR_PRESENTATIONURL = "presentation_url"
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up the SSDP integration."""
async def initialize():
scanner = Scanner(hass)
await scanner.async_scan(None)
async_track_time_interval(hass, scanner.async_scan, SCAN_INTERVAL)
hass.loop.create_task(initialize())
return True
class Scanner:
"""Class to manage SSDP scanning."""
def __init__(self, hass):
"""Initialize class."""
self.hass = hass
self.seen = set()
self._description_cache = {}
async def async_scan(self, _):
"""Scan for new entries."""
_LOGGER.debug("Scanning")
# Run 3 times as packets can get lost
for _ in range(3):
entries = await self.hass.async_add_executor_job(ssdp.scan)
await self._process_entries(entries)
# We clear the cache after each run. We track discovered entries
# so will never need a description twice.
self._description_cache.clear()
async def _process_entries(self, entries):
"""Process SSDP entries."""
tasks = []
for entry in entries:
key = (entry.st, entry.location)
if key in self.seen:
continue
self.seen.add(key)
tasks.append(self._process_entry(entry))
if not tasks:
return
to_load = [
result for result in await asyncio.gather(*tasks) if result is not None
]
if not to_load:
return
tasks = []
for entry, info, domains in to_load:
for domain in domains:
_LOGGER.debug("Discovered %s at %s", domain, entry.location)
tasks.append(
self.hass.config_entries.flow.async_init(
domain, context={"source": DOMAIN}, data=info
)
)
await asyncio.wait(tasks)
async def _process_entry(self, entry):
"""Process a single entry."""
info = {"st": entry.st}
if entry.location:
# Multiple entries usually share same location. Make sure
# we fetch it only once.
info_req = self._description_cache.get(entry.location)
if info_req is None:
info_req = self._description_cache[
entry.location
] = self.hass.async_create_task(self._fetch_description(entry.location))
info.update(await info_req)
domains = set()
for domain, matchers in SSDP.items():
for matcher in matchers:
if all(info.get(k) == v for (k, v) in matcher.items()):
domains.add(domain)
if domains:
return (entry, info_from_entry(entry, info), domains)
return None
async def _fetch_description(self, xml_location):
"""Fetch an XML description."""
session = self.hass.helpers.aiohttp_client.async_get_clientsession()
try:
resp = await session.get(xml_location, timeout=5)
xml = await resp.text()
# Samsung Smart TV sometimes returns an empty document the
# first time. Retry once.
if not xml:
resp = await session.get(xml_location, timeout=5)
xml = await resp.text()
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
_LOGGER.debug("Error fetching %s: %s", xml_location, err)
return {}
try:
tree = ElementTree.fromstring(xml)
except ElementTree.ParseError as err:
_LOGGER.debug("Error parsing %s: %s", xml_location, err)
return {}
return util.etree_to_dict(tree).get("root", {}).get("device", {})
def info_from_entry(entry, device_info):
"""Get most important info from an entry."""
url = urlparse(entry.location)
info = {
ATTR_HOST: url.hostname,
ATTR_PORT: url.port,
ATTR_SSDP_DESCRIPTION: entry.location,
ATTR_ST: entry.st,
}
if device_info:
info[ATTR_NAME] = device_info.get("friendlyName")
info[ATTR_MODEL_NAME] = device_info.get("modelName")
info[ATTR_MODEL_NUMBER] = device_info.get("modelNumber")
info[ATTR_SERIAL] = device_info.get("serialNumber")
info[ATTR_MANUFACTURER] = device_info.get("manufacturer")
info[ATTR_MANUFACTURERURL] = device_info.get("manufacturerURL")
info[ATTR_UDN] = device_info.get("UDN")
info[ATTR_UPNP_DEVICE_TYPE] = device_info.get("deviceType")
info[ATTR_PRESENTATIONURL] = device_info.get("presentationURL")
return info
|
[
"asyncio.gather",
"homeassistant.generated.ssdp.SSDP.items",
"logging.getLogger",
"netdisco.util.etree_to_dict",
"datetime.timedelta",
"homeassistant.helpers.event.async_track_time_interval",
"asyncio.wait",
"urllib.parse.urlparse",
"defusedxml.ElementTree.fromstring"
] |
[((352, 373), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (361, 373), False, 'from datetime import timedelta\n'), ((780, 807), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (797, 807), False, 'import logging\n'), ((4686, 4710), 'urllib.parse.urlparse', 'urlparse', (['entry.location'], {}), '(entry.location)\n', (4694, 4710), False, 'from urllib.parse import urlparse\n'), ((994, 1060), 'homeassistant.helpers.event.async_track_time_interval', 'async_track_time_interval', (['hass', 'scanner.async_scan', 'SCAN_INTERVAL'], {}), '(hass, scanner.async_scan, SCAN_INTERVAL)\n', (1019, 1060), False, 'from homeassistant.helpers.event import async_track_time_interval\n'), ((3358, 3370), 'homeassistant.generated.ssdp.SSDP.items', 'SSDP.items', ([], {}), '()\n', (3368, 3370), False, 'from homeassistant.generated.ssdp import SSDP\n'), ((2712, 2731), 'asyncio.wait', 'asyncio.wait', (['tasks'], {}), '(tasks)\n', (2724, 2731), False, 'import asyncio\n'), ((4344, 4371), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (['xml'], {}), '(xml)\n', (4366, 4371), False, 'from defusedxml import ElementTree\n'), ((2218, 2240), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (2232, 2240), False, 'import asyncio\n'), ((4525, 4549), 'netdisco.util.etree_to_dict', 'util.etree_to_dict', (['tree'], {}), '(tree)\n', (4543, 4549), False, 'from netdisco import ssdp, util\n')]
|
"""Test functions for datetime_utils.py.
"""
from datetime import datetime
from aracnid_utils.datetime_utils import isoweek, fromisoweek
# initialize module variables
REF_ISO_WEEK = '2020-W25'
REF_WEEK_DATE1 = '2020-06-15T00:00:00-04:00'
REF_WEEK_DATE2 = '2020-06-21T23:59:59-04:00'
REF_WEEK_DATE3 = '2020-06-22T00:00:00-04:00'
def test_get_isoweek_from_date_start():
"""Tests isoweek() given the earliest possible date in the week.
"""
week_date = datetime.fromisoformat(REF_WEEK_DATE1)
week_str = isoweek(week_date)
assert week_str == REF_ISO_WEEK
def test_get_isoweek_from_date_end():
"""Tests isoweek() given the latest possible date in the week.
"""
week_date = datetime.fromisoformat(REF_WEEK_DATE2)
week_str = isoweek(week_date)
assert week_str == REF_ISO_WEEK
def test_get_isoweek_from_date_next_week():
"""Tests isoweek() given a date in the following week.
"""
week_date = datetime.fromisoformat(REF_WEEK_DATE3)
week_str = isoweek(week_date)
assert week_str != REF_ISO_WEEK
def test_get_date_from_isoweek():
"""Tests fromisoweek() given a standard ISO Week date string.
"""
week_date = fromisoweek(REF_ISO_WEEK)
assert week_date.isoformat() == REF_WEEK_DATE1
|
[
"aracnid_utils.datetime_utils.isoweek",
"datetime.datetime.fromisoformat",
"aracnid_utils.datetime_utils.fromisoweek"
] |
[((464, 502), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['REF_WEEK_DATE1'], {}), '(REF_WEEK_DATE1)\n', (486, 502), False, 'from datetime import datetime\n'), ((518, 536), 'aracnid_utils.datetime_utils.isoweek', 'isoweek', (['week_date'], {}), '(week_date)\n', (525, 536), False, 'from aracnid_utils.datetime_utils import isoweek, fromisoweek\n'), ((703, 741), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['REF_WEEK_DATE2'], {}), '(REF_WEEK_DATE2)\n', (725, 741), False, 'from datetime import datetime\n'), ((757, 775), 'aracnid_utils.datetime_utils.isoweek', 'isoweek', (['week_date'], {}), '(week_date)\n', (764, 775), False, 'from aracnid_utils.datetime_utils import isoweek, fromisoweek\n'), ((940, 978), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['REF_WEEK_DATE3'], {}), '(REF_WEEK_DATE3)\n', (962, 978), False, 'from datetime import datetime\n'), ((994, 1012), 'aracnid_utils.datetime_utils.isoweek', 'isoweek', (['week_date'], {}), '(week_date)\n', (1001, 1012), False, 'from aracnid_utils.datetime_utils import isoweek, fromisoweek\n'), ((1174, 1199), 'aracnid_utils.datetime_utils.fromisoweek', 'fromisoweek', (['REF_ISO_WEEK'], {}), '(REF_ISO_WEEK)\n', (1185, 1199), False, 'from aracnid_utils.datetime_utils import isoweek, fromisoweek\n')]
|
from django.db.models.expressions import F
from .filters import *
from . models import *
from .parsers import *
from .permissions import *
from .serializers import *
import csv
import datetime
from django.conf import settings
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
import jwt
import os
from rest_framework import permissions
from rest_framework.authtoken.views import Token
from rest_framework.exceptions import PermissionDenied
from rest_framework.parsers import JSONParser
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
import shutil
def send_response(token):
user = token.user
access_token = jwt.encode(
{'id': user.pk, 'exp': timezone.now().utcnow() + datetime.timedelta(hours=12)},
settings.SECRET_KEY,
)
response = Response({
'id': user.pk,
'email': user.email,
'name': user.get_full_name(),
'token': access_token,
'active': user.is_active,
'staff': user.is_staff,
'admin': user.is_superuser,
})
response.set_cookie(
'user',
token.key,
expires=(timezone.now().utcnow() + datetime.timedelta(hours=12)),
httponly=True,
)
return response
def change_password(user, password):
if not user.check_password(password):
user.set_password(password)
user.save()
else:
raise PermissionDenied("New password cannot be old password")
class ChildViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
serializer_class = ChildSerializer
parser_classes = [MultipartJsonParser, JSONParser]
filter_backends = [DjangoFilterBackend,]
filterset_class = ChildFilter
queryset = Child.objects.all().order_by('-pk')
def perform_update(self, serializer):
if serializer.partial:
child = serializer.instance
if 'add' in self.request.data.keys():
guardian = get_object_or_404(Guardian, pk=self.request.data['add'])
child.guardians.add(guardian)
if 'remove' in self.request.data.keys():
guardian = get_object_or_404(Guardian, pk=self.request.data['remove'])
child.guardians.remove(guardian)
serializer.save()
class GuardianViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
serializer_class = GuardianSerializer
filter_backends = [DjangoFilterBackend,]
filterset_class = GuardianFilter
queryset = Guardian.objects.all().order_by('-pk')
class CheckViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
serializer_class = CheckSerializer
filter_backends = [DjangoFilterBackend,]
filterset_class = CheckFilter
queryset = Check.objects.all().order_by('-pk')
def perform_update(self, serializer):
if serializer.partial and 'out_guardian' in self.request.data.keys():
serializer.save(out_supervisor=self.request.user, out_time=timezone.localtime(timezone.now()).time())
else:
serializer.save()
class UserViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticated, UserMixin]
serializer_class = UserSerializer
queryset = User.objects.all().order_by('-pk')
def perform_update(self, serializer):
if serializer.partial:
user = serializer.instance
data = self.request.data
if all(x in data.keys() for x in ['old_password', 'new_password']):
if user.check_password(data['old_password']):
change_password(user, data['new_password'])
else:
raise PermissionDenied("Current Password is Incorrect")
serializer.save()
class ScanView(APIView):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
def get(self, request, format=None):
code = request.query_params.get('id', None)
child = Child.objects.filter(child_id=code)
if child.exists():
child = child.first()
data = ChildSerializer(child).to_representation(child)
return Response(data)
guardian = Guardian.objects.filter(guardian_id=code)
if guardian.exists():
guardian = guardian.first()
data = GuardianSerializer(guardian).to_representation(guardian)
return Response(data)
return Response("Id not Found", 404)
class ReportView(APIView):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
def post(self, request, format=None):
if 'date' in request.data.keys():
checks = Check.objects.filter(date=request.data['date'], out_time__isnull=False).order_by('child__last_name', 'child__first_name', 'date')
elif 'range' in request.data.keys():
checks = Check.objects.filter(date__range=request.data['range'], out_time__isnull=False).order_by('child__last_name', 'child__first_name', 'date')
else:
return Response('Invalid Input', 400)
response = HttpResponse(
content_type='text/csv',
headers={'Content-Disposition': 'attachment; filename="childcheck_report.csv"'},
)
writer = csv.writer(response)
writer.writerow(['First Name', 'Last Name', 'date', 'Time (hr)'])
for check in checks:
delta = datetime.datetime.combine(datetime.date.today(), check.out_time) - datetime.datetime.combine(datetime.date.today(), check.in_time)
writer.writerow([
check.child.first_name,
check.child.last_name,
check.date,
round(delta.days * 24 + delta.seconds / 3600.0, 2)
])
return response
class BackupView(APIView):
permission_classes = [permissions.IsAuthenticated, StaffMixin]
def get(self, request, format=None):
dir = settings.APP_DATA or settings.BASE_DIR
dir = str(dir)
shutil.copytree(dir + '/media', dir + '/backup')
shutil.copy(dir + '/db.sqlite3', dir + '/backup/db.sqlite3')
shutil.make_archive(dir + '/backup', 'zip', dir + '/backup')
shutil.rmtree(dir + '/backup')
file = open(dir + '/backup.zip', 'rb').read()
response = HttpResponse(
file,
content_type='application/zip',
headers={'Content-Disposition': 'attachment; filename="backup.zip"'},
)
os.remove(dir + '/backup.zip')
return response
def post(self, request, format=None):
dir = settings.APP_DATA or settings.BASE_DIR
dir = str(dir)
default_storage.save('backup.zip', ContentFile(request.data['archive'].read()))
shutil.move(dir + '/media/backup.zip', dir + '/backup.zip')
shutil.unpack_archive(dir + '/backup.zip', dir + '/backup')
if os.path.exists(dir + '/backup/db.sqlite3'):
os.remove(dir + '/db.sqlite3')
shutil.rmtree(dir + '/media')
shutil.move(dir + '/backup/db.sqlite3', dir + '/db.sqlite3')
shutil.move(dir + '/backup', dir + '/media')
os.remove(dir + '/backup.zip')
else:
shutil.rmtree(dir + '/backup')
os.remove(dir + '/backup.zip')
return Response('Incorrect Archive', 400)
return Response('Success')
class LoginView(APIView):
authentication_classes = []
permission_classes = []
def get(self, request, format=None):
refresh = request.COOKIES.get('user', None)
token = get_object_or_404(Token, key=refresh)
return send_response(token)
def post(self, request, format=None):
user = User.objects.create_user(
email=request.data['email'],
password=request.data['password'],
first_name=request.data['first_name'],
last_name=request.data['last_name'],
is_active=False,
)
if User.objects.all().count() == 1:
user.is_active = True
user.is_staff = True
user.is_superuser = True
user.save()
token = Token.objects.get(user=user)
return send_response(token)
def put(self, request, format=None):
user = get_object_or_404(User, email=request.data.get('email', None))
if user.check_password(request.data.get('password', None)):
return send_response(get_object_or_404(Token, user=user))
else:
raise PermissionDenied("Invalid Email or Password")
def delete(self, request, format=None):
response = Response('Logout Successful')
response.delete_cookie('user')
return response
|
[
"os.remove",
"shutil.unpack_archive",
"csv.writer",
"django.http.HttpResponse",
"shutil.make_archive",
"shutil.rmtree",
"django.utils.timezone.now",
"os.path.exists",
"datetime.date.today",
"rest_framework.exceptions.PermissionDenied",
"django.shortcuts.get_object_or_404",
"rest_framework.response.Response",
"datetime.timedelta",
"shutil.move",
"shutil.copytree",
"rest_framework.authtoken.views.Token.objects.get",
"shutil.copy"
] |
[((1670, 1725), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (['"""New password cannot be old password"""'], {}), "('New password cannot be old password')\n", (1686, 1725), False, 'from rest_framework.exceptions import PermissionDenied\n'), ((4724, 4753), 'rest_framework.response.Response', 'Response', (['"""Id not Found"""', '(404)'], {}), "('Id not Found', 404)\n", (4732, 4753), False, 'from rest_framework.response import Response\n'), ((5374, 5496), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""', 'headers': '{\'Content-Disposition\': \'attachment; filename="childcheck_report.csv"\'}'}), '(content_type=\'text/csv\', headers={\'Content-Disposition\':\n \'attachment; filename="childcheck_report.csv"\'})\n', (5386, 5496), False, 'from django.http import HttpResponse\n'), ((5545, 5565), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (5555, 5565), False, 'import csv\n'), ((6285, 6333), 'shutil.copytree', 'shutil.copytree', (["(dir + '/media')", "(dir + '/backup')"], {}), "(dir + '/media', dir + '/backup')\n", (6300, 6333), False, 'import shutil\n'), ((6342, 6402), 'shutil.copy', 'shutil.copy', (["(dir + '/db.sqlite3')", "(dir + '/backup/db.sqlite3')"], {}), "(dir + '/db.sqlite3', dir + '/backup/db.sqlite3')\n", (6353, 6402), False, 'import shutil\n'), ((6411, 6471), 'shutil.make_archive', 'shutil.make_archive', (["(dir + '/backup')", '"""zip"""', "(dir + '/backup')"], {}), "(dir + '/backup', 'zip', dir + '/backup')\n", (6430, 6471), False, 'import shutil\n'), ((6480, 6510), 'shutil.rmtree', 'shutil.rmtree', (["(dir + '/backup')"], {}), "(dir + '/backup')\n", (6493, 6510), False, 'import shutil\n'), ((6584, 6709), 'django.http.HttpResponse', 'HttpResponse', (['file'], {'content_type': '"""application/zip"""', 'headers': '{\'Content-Disposition\': \'attachment; filename="backup.zip"\'}'}), '(file, content_type=\'application/zip\', headers={\n \'Content-Disposition\': \'attachment; filename="backup.zip"\'})\n', (6596, 6709), False, 'from django.http import HttpResponse\n'), ((6760, 6790), 'os.remove', 'os.remove', (["(dir + '/backup.zip')"], {}), "(dir + '/backup.zip')\n", (6769, 6790), False, 'import os\n'), ((7030, 7089), 'shutil.move', 'shutil.move', (["(dir + '/media/backup.zip')", "(dir + '/backup.zip')"], {}), "(dir + '/media/backup.zip', dir + '/backup.zip')\n", (7041, 7089), False, 'import shutil\n'), ((7098, 7157), 'shutil.unpack_archive', 'shutil.unpack_archive', (["(dir + '/backup.zip')", "(dir + '/backup')"], {}), "(dir + '/backup.zip', dir + '/backup')\n", (7119, 7157), False, 'import shutil\n'), ((7169, 7211), 'os.path.exists', 'os.path.exists', (["(dir + '/backup/db.sqlite3')"], {}), "(dir + '/backup/db.sqlite3')\n", (7183, 7211), False, 'import os\n'), ((7640, 7659), 'rest_framework.response.Response', 'Response', (['"""Success"""'], {}), "('Success')\n", (7648, 7659), False, 'from rest_framework.response import Response\n'), ((7858, 7895), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Token'], {'key': 'refresh'}), '(Token, key=refresh)\n', (7875, 7895), False, 'from django.shortcuts import get_object_or_404\n'), ((8431, 8459), 'rest_framework.authtoken.views.Token.objects.get', 'Token.objects.get', ([], {'user': 'user'}), '(user=user)\n', (8448, 8459), False, 'from rest_framework.authtoken.views import Token\n'), ((8896, 8925), 'rest_framework.response.Response', 'Response', (['"""Logout Successful"""'], {}), "('Logout Successful')\n", (8904, 8925), False, 'from rest_framework.response import Response\n'), ((4453, 4467), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (4461, 4467), False, 'from rest_framework.response import Response\n'), ((4694, 4708), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (4702, 4708), False, 'from rest_framework.response import Response\n'), ((7225, 7255), 'os.remove', 'os.remove', (["(dir + '/db.sqlite3')"], {}), "(dir + '/db.sqlite3')\n", (7234, 7255), False, 'import os\n'), ((7268, 7297), 'shutil.rmtree', 'shutil.rmtree', (["(dir + '/media')"], {}), "(dir + '/media')\n", (7281, 7297), False, 'import shutil\n'), ((7310, 7370), 'shutil.move', 'shutil.move', (["(dir + '/backup/db.sqlite3')", "(dir + '/db.sqlite3')"], {}), "(dir + '/backup/db.sqlite3', dir + '/db.sqlite3')\n", (7321, 7370), False, 'import shutil\n'), ((7383, 7427), 'shutil.move', 'shutil.move', (["(dir + '/backup')", "(dir + '/media')"], {}), "(dir + '/backup', dir + '/media')\n", (7394, 7427), False, 'import shutil\n'), ((7440, 7470), 'os.remove', 'os.remove', (["(dir + '/backup.zip')"], {}), "(dir + '/backup.zip')\n", (7449, 7470), False, 'import os\n'), ((7497, 7527), 'shutil.rmtree', 'shutil.rmtree', (["(dir + '/backup')"], {}), "(dir + '/backup')\n", (7510, 7527), False, 'import shutil\n'), ((7540, 7570), 'os.remove', 'os.remove', (["(dir + '/backup.zip')"], {}), "(dir + '/backup.zip')\n", (7549, 7570), False, 'import os\n'), ((7590, 7624), 'rest_framework.response.Response', 'Response', (['"""Incorrect Archive"""', '(400)'], {}), "('Incorrect Archive', 400)\n", (7598, 7624), False, 'from rest_framework.response import Response\n'), ((8786, 8831), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (['"""Invalid Email or Password"""'], {}), "('Invalid Email or Password')\n", (8802, 8831), False, 'from rest_framework.exceptions import PermissionDenied\n'), ((1004, 1032), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(12)'}), '(hours=12)\n', (1022, 1032), False, 'import datetime\n'), ((1430, 1458), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(12)'}), '(hours=12)\n', (1448, 1458), False, 'import datetime\n'), ((2244, 2300), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Guardian'], {'pk': "self.request.data['add']"}), "(Guardian, pk=self.request.data['add'])\n", (2261, 2300), False, 'from django.shortcuts import get_object_or_404\n'), ((2427, 2486), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Guardian'], {'pk': "self.request.data['remove']"}), "(Guardian, pk=self.request.data['remove'])\n", (2444, 2486), False, 'from django.shortcuts import get_object_or_404\n'), ((5323, 5353), 'rest_framework.response.Response', 'Response', (['"""Invalid Input"""', '(400)'], {}), "('Invalid Input', 400)\n", (5331, 5353), False, 'from rest_framework.response import Response\n'), ((8717, 8752), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Token'], {'user': 'user'}), '(Token, user=user)\n', (8734, 8752), False, 'from django.shortcuts import get_object_or_404\n'), ((3990, 4039), 'rest_framework.exceptions.PermissionDenied', 'PermissionDenied', (['"""Current Password is Incorrect"""'], {}), "('Current Password is Incorrect')\n", (4006, 4039), False, 'from rest_framework.exceptions import PermissionDenied\n'), ((5715, 5736), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5734, 5736), False, 'import datetime\n'), ((5782, 5803), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5801, 5803), False, 'import datetime\n'), ((978, 992), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (990, 992), False, 'from django.utils import timezone\n'), ((1404, 1418), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1416, 1418), False, 'from django.utils import timezone\n'), ((3329, 3343), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3341, 3343), False, 'from django.utils import timezone\n')]
|
from functools import partial
import albumentations as A
import cv2
import matplotlib.pyplot as plt
import tensorflow as tf
from watch_recognition.targets_encoding import (
add_sample_weights,
encode_keypoints_to_angle,
encode_keypoints_to_mask,
set_shapes,
set_shapes_with_sample_weight,
)
EMPTY_TRANSFORMS = A.Compose(
[],
# format="xyas" is required while using tf.Data pipielines, otherwise
# tf cannot validate the output shapes # TODO check if this is correct
# remove_invisible=False is required to preserve the order and number of keypoints
keypoint_params=A.KeypointParams(format="xyas", remove_invisible=False),
)
DEFAULT_TRANSFORMS = A.Compose(
[
A.OneOf(
[
A.HueSaturationValue(),
A.RGBShift(),
A.ChannelShuffle(),
],
p=1,
),
# A.MotionBlur(),
],
# format="xyas" is required while using tf.Data pipielines, otherwise
# tf cannot validate the output shapes # TODO check if this is correct
# remove_invisible=False is required to preserve the order and number of keypoints
keypoint_params=A.KeypointParams(format="xyas", remove_invisible=False),
)
DEFAULT_TRANSFORMS_FOR_MASKS = A.Compose(
[
# A.HorizontalFlip(),
# A.VerticalFlip(),
A.RandomRotate90(),
A.RandomSizedCrop(
(96, 128),
128,
128,
# interpolation=cv2.INTER_NEAREST,
p=0.5,
),
# A.Transpose(),
# A.OneOf(
# [
# # A.Downscale(),
# A.ElasticTransform(),
# A.GridDistortion(),
# # A.OpticalDistortion(),
# ],
# ),
A.OneOf(
[
A.HueSaturationValue(p=0.8),
A.RGBShift(p=0.8),
A.ChannelShuffle(p=0.8),
A.ChannelDropout(p=0.8),
A.CLAHE(p=0.8),
A.ISONoise(p=0.8),
# A.ImageCompression(p=0.8),
A.InvertImg(p=0.8),
],
p=1,
),
],
)
# A.VerticalFlip(p=0.5),
# A.RandomRotate90(p=0.5),
# A.OneOf([
# A.ElasticTransform(p=0.5, alpha=120, sigma=120 * 0.05, alpha_affine=120 * 0.03),
# A.GridDistortion(p=0.5),
# A.OpticalDistortion(distort_limit=1, shift_limit=0.5, p=1),
# ], p=0.8)])
# TODO deduplicate with DEFAULT_TRANSFORMS
DEFAULT_TRANSFORMS_FOR_ANGLE_CLASSIFIER = A.Compose(
[
A.ShiftScaleRotate(
p=0.8,
rotate_limit=180,
),
A.OneOf(
[
A.HueSaturationValue(p=1),
A.RGBShift(p=1),
A.ChannelShuffle(p=1),
A.RandomBrightnessContrast(p=1),
],
p=1,
),
A.MotionBlur(),
],
# format="xyas" is required while using tf.Data pipielines, otherwise
# tf cannot validate the output shapes # TODO check if this is correct
# remove_invisible=False is required to preserve the order and number of keypoints
keypoint_params=A.KeypointParams(format="xyas", remove_invisible=False),
)
def kp_aug_fn(image, keypoints):
data = {
"image": image,
"keypoints": keypoints,
}
aug_data = DEFAULT_TRANSFORMS(**data)
aug_img = aug_data["image"]
aug_kp = aug_data["keypoints"]
aug_kp = tf.cast(aug_kp, tf.float32)
return aug_img, aug_kp
def kp_angle_fn(image, keypoints):
data = {
"image": image,
"keypoints": keypoints,
}
aug_data = DEFAULT_TRANSFORMS_FOR_ANGLE_CLASSIFIER(**data)
aug_img = aug_data["image"]
aug_kp = aug_data["keypoints"]
aug_kp = tf.cast(aug_kp, tf.float32)
return aug_img, aug_kp
def mask_aug_fn(image, mask):
data = {
"image": image,
"mask": mask,
}
aug_data = DEFAULT_TRANSFORMS_FOR_MASKS(**data)
aug_img = aug_data["image"]
aug_mask = aug_data["mask"]
aug_mask = tf.cast(aug_mask, tf.float32)
return aug_img, aug_mask
def augment_kp_data(image, kp):
image, kp = tf.numpy_function(
func=kp_aug_fn,
inp=[image, kp],
Tout=(tf.uint8, tf.float32),
)
return image, kp
def augment_mask_data(image, mask):
image, mask = tf.numpy_function(
func=mask_aug_fn,
inp=[image, mask],
Tout=(tf.uint8, tf.float32),
)
return image, mask
def augment_kp_angle_cls_data(image, kp):
image, kp = tf.numpy_function(
func=kp_angle_fn,
inp=[image, kp],
Tout=(tf.uint8, tf.float32),
)
return image, kp
def view_image(ds):
batch = next(iter(ds)) # extract 1 batch from the dataset
image, masks = batch[0], batch[1]
image = image.numpy()
masks = masks.numpy()
if masks.shape[-1] > 1:
fig, axarr = plt.subplots(5, masks.shape[-1] + 2, figsize=(15, 15))
for i in range(5):
ax = axarr[i]
img = image[i]
ax_idx = 0
ax[ax_idx].imshow(img.astype("uint8"))
ax[ax_idx].set_xticks([])
ax[ax_idx].set_yticks([])
ax[ax_idx].set_title("Image")
for j in range(masks.shape[-1]):
ax_idx = j + 1
ax[ax_idx].imshow(masks[i, :, :, j])
ax[ax_idx].set_title("Masks")
merged = cv2.addWeighted(
img, 0.5, (masks[i] * 255).astype("uint8"), 0.5, 0.0
)
ax_idx += 1
ax[ax_idx].imshow(merged)
ax[ax_idx].set_title("Masks + Image")
else:
fig, axarr = plt.subplots(5, 3, figsize=(15, 15))
for i in range(5):
ax = axarr[i]
img = image[i]
ax_idx = 0
ax[ax_idx].imshow(img.astype("uint8"))
ax[ax_idx].set_xticks([])
ax[ax_idx].set_yticks([])
ax[ax_idx].set_title("Image")
ax_idx += 1
ax[ax_idx].imshow(masks[i, :, :, -1].astype("uint8"))
ax[ax_idx].set_title("Masks")
merged = cv2.addWeighted(
img, 0.5, (masks[i, :, :, -1] * 255).astype("uint8"), 0.5, 0.0
)
ax_idx += 1
ax[ax_idx].imshow(merged)
ax[ax_idx].set_title("Masks + Image")
def get_watch_angle_dataset(
X, y, augment: bool = True, bin_size=90, image_size=(224, 224)
) -> tf.data.Dataset:
encode_kp = partial(encode_keypoints_to_angle, bin_size=bin_size)
set_shape_f = partial(
set_shapes, img_shape=(*image_size, 3), target_shape=(360 // bin_size,)
)
dataset = tf.data.Dataset.from_tensor_slices((X, y))
AUTOTUNE = tf.data.experimental.AUTOTUNE
if augment:
dataset = dataset.map(
augment_kp_angle_cls_data,
num_parallel_calls=AUTOTUNE,
)
dataset = (
dataset.map(encode_kp, num_parallel_calls=AUTOTUNE)
.map(set_shape_f, num_parallel_calls=AUTOTUNE)
.shuffle(8 * 32)
.batch(32)
.prefetch(AUTOTUNE)
)
return dataset
def get_watch_hands_mask_dataset(
X, y, augment: bool = True, image_size=(224, 224), batch_size=32, class_weights=None
) -> tf.data.Dataset:
dataset = tf.data.Dataset.from_tensor_slices((X, y))
AUTOTUNE = tf.data.experimental.AUTOTUNE
if augment:
dataset = dataset.map(
augment_mask_data,
num_parallel_calls=AUTOTUNE,
)
if class_weights:
add_sample_weights_f = partial(add_sample_weights, class_weights=[1, 10])
dataset = dataset.map(add_sample_weights_f)
set_shape_f = partial(
set_shapes_with_sample_weight,
img_shape=(*image_size, 3),
target_shape=(*image_size, 1),
)
else:
set_shape_f = partial(
set_shapes, img_shape=(*image_size, 3), target_shape=(*image_size, 1)
)
dataset = (
dataset.map(set_shape_f, num_parallel_calls=AUTOTUNE)
.shuffle(8 * batch_size)
.batch(batch_size)
.prefetch(AUTOTUNE)
)
return dataset
def get_watch_keypoints_dataset(
X,
y,
augment: bool = True,
batch_size=32,
image_size=None,
mask_size=None,
shuffle=True,
) -> tf.data.Dataset:
encode_kp = partial(
encode_keypoints_to_mask,
image_size=(*image_size, 3),
mask_size=mask_size,
radius=4,
include_background=False,
separate_hour_and_minute_hands=False,
add_perimeter=True,
with_perimeter_for_hands=True,
sparse=False,
blur=True,
hands_as_lines=True,
)
set_shape_f = partial(
set_shapes, img_shape=(*image_size, 3), target_shape=(*mask_size, 3)
)
dataset = tf.data.Dataset.from_tensor_slices((X, y))
AUTOTUNE = tf.data.experimental.AUTOTUNE
if augment:
dataset = dataset.map(
augment_kp_data,
num_parallel_calls=AUTOTUNE,
)
dataset = dataset.map(encode_kp, num_parallel_calls=AUTOTUNE).map(
set_shape_f, num_parallel_calls=AUTOTUNE
)
if shuffle:
dataset = dataset.shuffle(8 * batch_size)
if batch_size > 0:
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(AUTOTUNE)
return dataset
|
[
"functools.partial",
"albumentations.ISONoise",
"albumentations.InvertImg",
"albumentations.RGBShift",
"albumentations.RandomSizedCrop",
"tensorflow.data.Dataset.from_tensor_slices",
"albumentations.HueSaturationValue",
"albumentations.RandomBrightnessContrast",
"tensorflow.cast",
"albumentations.ChannelShuffle",
"albumentations.ShiftScaleRotate",
"albumentations.ChannelDropout",
"tensorflow.numpy_function",
"albumentations.CLAHE",
"matplotlib.pyplot.subplots",
"albumentations.KeypointParams",
"albumentations.MotionBlur",
"albumentations.RandomRotate90"
] |
[((3463, 3490), 'tensorflow.cast', 'tf.cast', (['aug_kp', 'tf.float32'], {}), '(aug_kp, tf.float32)\n', (3470, 3490), True, 'import tensorflow as tf\n'), ((3773, 3800), 'tensorflow.cast', 'tf.cast', (['aug_kp', 'tf.float32'], {}), '(aug_kp, tf.float32)\n', (3780, 3800), True, 'import tensorflow as tf\n'), ((4056, 4085), 'tensorflow.cast', 'tf.cast', (['aug_mask', 'tf.float32'], {}), '(aug_mask, tf.float32)\n', (4063, 4085), True, 'import tensorflow as tf\n'), ((4165, 4244), 'tensorflow.numpy_function', 'tf.numpy_function', ([], {'func': 'kp_aug_fn', 'inp': '[image, kp]', 'Tout': '(tf.uint8, tf.float32)'}), '(func=kp_aug_fn, inp=[image, kp], Tout=(tf.uint8, tf.float32))\n', (4182, 4244), True, 'import tensorflow as tf\n'), ((4353, 4441), 'tensorflow.numpy_function', 'tf.numpy_function', ([], {'func': 'mask_aug_fn', 'inp': '[image, mask]', 'Tout': '(tf.uint8, tf.float32)'}), '(func=mask_aug_fn, inp=[image, mask], Tout=(tf.uint8, tf.\n float32))\n', (4370, 4441), True, 'import tensorflow as tf\n'), ((4551, 4637), 'tensorflow.numpy_function', 'tf.numpy_function', ([], {'func': 'kp_angle_fn', 'inp': '[image, kp]', 'Tout': '(tf.uint8, tf.float32)'}), '(func=kp_angle_fn, inp=[image, kp], Tout=(tf.uint8, tf.\n float32))\n', (4568, 4637), True, 'import tensorflow as tf\n'), ((6501, 6554), 'functools.partial', 'partial', (['encode_keypoints_to_angle'], {'bin_size': 'bin_size'}), '(encode_keypoints_to_angle, bin_size=bin_size)\n', (6508, 6554), False, 'from functools import partial\n'), ((6573, 6658), 'functools.partial', 'partial', (['set_shapes'], {'img_shape': '(*image_size, 3)', 'target_shape': '(360 // bin_size,)'}), '(set_shapes, img_shape=(*image_size, 3), target_shape=(360 // bin_size,)\n )\n', (6580, 6658), False, 'from functools import partial\n'), ((6683, 6725), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(X, y)'], {}), '((X, y))\n', (6717, 6725), True, 'import tensorflow as tf\n'), ((7298, 7340), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(X, y)'], {}), '((X, y))\n', (7332, 7340), True, 'import tensorflow as tf\n'), ((8354, 8626), 'functools.partial', 'partial', (['encode_keypoints_to_mask'], {'image_size': '(*image_size, 3)', 'mask_size': 'mask_size', 'radius': '(4)', 'include_background': '(False)', 'separate_hour_and_minute_hands': '(False)', 'add_perimeter': '(True)', 'with_perimeter_for_hands': '(True)', 'sparse': '(False)', 'blur': '(True)', 'hands_as_lines': '(True)'}), '(encode_keypoints_to_mask, image_size=(*image_size, 3), mask_size=\n mask_size, radius=4, include_background=False,\n separate_hour_and_minute_hands=False, add_perimeter=True,\n with_perimeter_for_hands=True, sparse=False, blur=True, hands_as_lines=True\n )\n', (8361, 8626), False, 'from functools import partial\n'), ((8722, 8799), 'functools.partial', 'partial', (['set_shapes'], {'img_shape': '(*image_size, 3)', 'target_shape': '(*mask_size, 3)'}), '(set_shapes, img_shape=(*image_size, 3), target_shape=(*mask_size, 3))\n', (8729, 8799), False, 'from functools import partial\n'), ((8828, 8870), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(X, y)'], {}), '((X, y))\n', (8862, 8870), True, 'import tensorflow as tf\n'), ((608, 663), 'albumentations.KeypointParams', 'A.KeypointParams', ([], {'format': '"""xyas"""', 'remove_invisible': '(False)'}), "(format='xyas', remove_invisible=False)\n", (624, 663), True, 'import albumentations as A\n'), ((1175, 1230), 'albumentations.KeypointParams', 'A.KeypointParams', ([], {'format': '"""xyas"""', 'remove_invisible': '(False)'}), "(format='xyas', remove_invisible=False)\n", (1191, 1230), True, 'import albumentations as A\n'), ((1349, 1367), 'albumentations.RandomRotate90', 'A.RandomRotate90', ([], {}), '()\n', (1365, 1367), True, 'import albumentations as A\n'), ((1377, 1422), 'albumentations.RandomSizedCrop', 'A.RandomSizedCrop', (['(96, 128)', '(128)', '(128)'], {'p': '(0.5)'}), '((96, 128), 128, 128, p=0.5)\n', (1394, 1422), True, 'import albumentations as A\n'), ((2567, 2610), 'albumentations.ShiftScaleRotate', 'A.ShiftScaleRotate', ([], {'p': '(0.8)', 'rotate_limit': '(180)'}), '(p=0.8, rotate_limit=180)\n', (2585, 2610), True, 'import albumentations as A\n'), ((2893, 2907), 'albumentations.MotionBlur', 'A.MotionBlur', ([], {}), '()\n', (2905, 2907), True, 'import albumentations as A\n'), ((3172, 3227), 'albumentations.KeypointParams', 'A.KeypointParams', ([], {'format': '"""xyas"""', 'remove_invisible': '(False)'}), "(format='xyas', remove_invisible=False)\n", (3188, 3227), True, 'import albumentations as A\n'), ((4910, 4964), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(5)', '(masks.shape[-1] + 2)'], {'figsize': '(15, 15)'}), '(5, masks.shape[-1] + 2, figsize=(15, 15))\n', (4922, 4964), True, 'import matplotlib.pyplot as plt\n'), ((5678, 5714), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(5)', '(3)'], {'figsize': '(15, 15)'}), '(5, 3, figsize=(15, 15))\n', (5690, 5714), True, 'import matplotlib.pyplot as plt\n'), ((7569, 7619), 'functools.partial', 'partial', (['add_sample_weights'], {'class_weights': '[1, 10]'}), '(add_sample_weights, class_weights=[1, 10])\n', (7576, 7619), False, 'from functools import partial\n'), ((7694, 7795), 'functools.partial', 'partial', (['set_shapes_with_sample_weight'], {'img_shape': '(*image_size, 3)', 'target_shape': '(*image_size, 1)'}), '(set_shapes_with_sample_weight, img_shape=(*image_size, 3),\n target_shape=(*image_size, 1))\n', (7701, 7795), False, 'from functools import partial\n'), ((7871, 7949), 'functools.partial', 'partial', (['set_shapes'], {'img_shape': '(*image_size, 3)', 'target_shape': '(*image_size, 1)'}), '(set_shapes, img_shape=(*image_size, 3), target_shape=(*image_size, 1))\n', (7878, 7949), False, 'from functools import partial\n'), ((753, 775), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {}), '()\n', (773, 775), True, 'import albumentations as A\n'), ((793, 805), 'albumentations.RGBShift', 'A.RGBShift', ([], {}), '()\n', (803, 805), True, 'import albumentations as A\n'), ((823, 841), 'albumentations.ChannelShuffle', 'A.ChannelShuffle', ([], {}), '()\n', (839, 841), True, 'import albumentations as A\n'), ((1823, 1850), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.8)'}), '(p=0.8)\n', (1843, 1850), True, 'import albumentations as A\n'), ((1868, 1885), 'albumentations.RGBShift', 'A.RGBShift', ([], {'p': '(0.8)'}), '(p=0.8)\n', (1878, 1885), True, 'import albumentations as A\n'), ((1903, 1926), 'albumentations.ChannelShuffle', 'A.ChannelShuffle', ([], {'p': '(0.8)'}), '(p=0.8)\n', (1919, 1926), True, 'import albumentations as A\n'), ((1944, 1967), 'albumentations.ChannelDropout', 'A.ChannelDropout', ([], {'p': '(0.8)'}), '(p=0.8)\n', (1960, 1967), True, 'import albumentations as A\n'), ((1985, 1999), 'albumentations.CLAHE', 'A.CLAHE', ([], {'p': '(0.8)'}), '(p=0.8)\n', (1992, 1999), True, 'import albumentations as A\n'), ((2017, 2034), 'albumentations.ISONoise', 'A.ISONoise', ([], {'p': '(0.8)'}), '(p=0.8)\n', (2027, 2034), True, 'import albumentations as A\n'), ((2097, 2115), 'albumentations.InvertImg', 'A.InvertImg', ([], {'p': '(0.8)'}), '(p=0.8)\n', (2108, 2115), True, 'import albumentations as A\n'), ((2694, 2719), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(1)'}), '(p=1)\n', (2714, 2719), True, 'import albumentations as A\n'), ((2737, 2752), 'albumentations.RGBShift', 'A.RGBShift', ([], {'p': '(1)'}), '(p=1)\n', (2747, 2752), True, 'import albumentations as A\n'), ((2770, 2791), 'albumentations.ChannelShuffle', 'A.ChannelShuffle', ([], {'p': '(1)'}), '(p=1)\n', (2786, 2791), True, 'import albumentations as A\n'), ((2809, 2840), 'albumentations.RandomBrightnessContrast', 'A.RandomBrightnessContrast', ([], {'p': '(1)'}), '(p=1)\n', (2835, 2840), True, 'import albumentations as A\n')]
|
# aoc.py
from typing import List
from itertools import tee, islice
from collections import deque
def input_as_string(filename:str) -> str:
"""returns the content of the input file as a string"""
with open(filename, encoding="utf-8") as file:
return file.read().rstrip("\n")
def input_as_lines(filename:str) -> List[str]:
"""Return a list where each line in the input file is an element of the list"""
return input_as_string(filename).split("\n")
def input_as_ints(filename:str) -> List[int]:
"""Return a list where each line in the input file is an element of the list,
converted into an integer
"""
lines = input_as_lines(filename)
line_as_int = lambda l: int(l.rstrip('\n'))
return list(map(line_as_int, lines))
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
first, second = tee(iterable)
next(second, None)
return zip(first, second)
def sliding_window_iter(iterable, size):
"""..."""
iterable = iter(iterable)
window = deque(islice(iterable, size), maxlen=size)
for item in iterable:
yield tuple(window)
window.append(item)
if window:
# needed because if iterable was already empty before the `for`,
# then the window would be yielded twice.
yield tuple(window)
|
[
"itertools.tee",
"itertools.islice"
] |
[((854, 867), 'itertools.tee', 'tee', (['iterable'], {}), '(iterable)\n', (857, 867), False, 'from itertools import tee, islice\n'), ((1027, 1049), 'itertools.islice', 'islice', (['iterable', 'size'], {}), '(iterable, size)\n', (1033, 1049), False, 'from itertools import tee, islice\n')]
|
#plots.py
import os
import pandas
import numpy as np
import matplotlib.pyplot as plt
#plots.py
# . . .
def plot_lines(df, linewidth = 1, figsize = (40,20),secondary_y = None, legend=True, pp = None, save_fig = False):
fig, ax = plt.subplots(figsize = figsize)
# If no secondary_y (axis), plot all variables at once
df.dropna(axis=0, how = "all").plot.line(linewidth = linewidth, ax = ax, secondary_y=secondary_y, legend = legend)
# Turn the text on the x-axis so that it reads vertically
ax.tick_params(axis='x', rotation=90)
# Get rid of tick lines perpendicular to the axis for aesthetic
ax.tick_params('both', length=0, which='both')
# transform y-axis values from sci notation to integers
vals = ax.get_yticks()
ax.set_yticklabels([round(x,2) for x in vals])
# format image filename
remove_chars = "[]:$'\\"
filename = str(list(df.keys()))
for char in remove_chars:
filename = filename.replace(char, "")
if save_fig:
try:
os.mkdir("plots")
except:
pass
plt.savefig("plots/" + filename[:50] + " line.png",
bbox_inches = "tight")
#[:50] + " line.png"
# save image if PdfPages object was passed
if pp != None: pp.savefig(fig, bbox_inches = "tight")
def plot_scatter(data, s = 75, figsize = (40, 20), save_fig = False, pp = None):
# Create plot for every unique pair of variables
df = data.copy()
for var1 in df:
for var2 in df:
if var1 != var2:
fig, ax = plt.subplots(figsize = figsize)
# Create list of years from index
# Year will be represented by color
if "Year" not in df.keys():
df["Year"] = [int(str(ind)[:4]) for ind in df.index]
df.plot.scatter(x = var1, y = var2, s = s, ax = ax,
c = "Year", cmap = "viridis")
# Turn the text on the x-axis so that it reads vertically
ax.tick_params(axis='x', rotation=90)
# Get rid of tick lines perpendicular to the axis for aesthetic
ax.tick_params('both', length=0, which='both')
# save image if PdfPages object was passed
if save_fig:
try:
os.mkdir("plots")
except:
pass
plt.savefig("plots/" + str(list(df.keys())).replace("[", "").replace("]","")[:40] + " scatter.png",
bbox_inches = "tight")
if pp != None: pp.savefig(fig, bbox_inches = "tight")
def corr_matrix_heatmap(df, save_fig = False, pp = None, title = "Correlation"):
#Create a figure to visualize a corr matrix
fig, ax = plt.subplots(figsize=(20,20))
# use ax.imshow() to create a heatmap of correlation values
# seismic mapping shows negative values as blue and positive values as red
im = ax.imshow(df, norm = plt.cm.colors.Normalize(-1,1), cmap = "seismic")
# create a list of labels, stacking each word in a label by replacing " "
# with "\n"
labels = df.keys()
num_vars = len(labels)
tick_labels = [lab.replace(" ", "\n") for lab in labels]
# adjust font size according to the number of variables visualized
tick_font_size = 120 / num_vars
val_font_size = 200 / num_vars
plt.rcParams.update({'font.size': tick_font_size})
# prepare space for label of each column
x_ticks = np.arange(num_vars)
# select labels and rotate them 90 degrees so that they are vertical
plt.xticks(x_ticks, tick_labels, fontsize = tick_font_size, rotation = 90)
# prepare space for label of each row
y_ticks = np.arange(len(labels))
# select labels
plt.yticks(y_ticks, tick_labels, fontsize = tick_font_size)
# show values in each tile of the heatmap
for i in range(len(labels)):
for j in range(len(labels)):
text = ax.text(i, j, str(round(df.values[i][j],2)),
fontsize= val_font_size, ha="center",
va="center", color = "w")
#Create title with Times New Roman Font
title_font = {"fontname":"Times New Roman"}
plt.title(title, fontsize = 50, **title_font)
#Call scale to show value of colors
cbar = fig.colorbar(im)
plt.show()
if save_fig:
try:
os.mkdir("plots")
except:
pass
plt.savefig("plots/" + str(list(df.keys())).replace("[", "").replace("]","")[:40] + " corrMatrix.png",
bbox_inches = "tight")
if pp != None: pp.savefig(fig, bbox_inches="tight")
plt.close()
def plot_stacked_lines(df, plot_vars, linewidth = 1,
figsize = (40, 20),
pp = None, total_var = False,
title = False):
fig, ax = plt.subplots(figsize = figsize)
# df.plot.area() created a stacked plot
df[plot_vars].plot.area(stacked = True, linewidth = linewidth,
ax = ax)
if total_var != False:
df[total_var].plot.line(linewidth = linewidth, ax = ax,
c = "k",label = total_var,
ls = "--")
# place legend in top left corner of plot
# format legend so that there are two columns of names
ax.legend(loc = 2, ncol = 2)
if title != False:
plt.title(title)
|
[
"matplotlib.pyplot.title",
"os.mkdir",
"matplotlib.pyplot.show",
"matplotlib.pyplot.close",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.rcParams.update",
"numpy.arange",
"matplotlib.pyplot.cm.colors.Normalize",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.savefig"
] |
[((239, 268), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (251, 268), True, 'import matplotlib.pyplot as plt\n'), ((2829, 2859), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(20, 20)'}), '(figsize=(20, 20))\n', (2841, 2859), True, 'import matplotlib.pyplot as plt\n'), ((3456, 3506), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'font.size': tick_font_size}"], {}), "({'font.size': tick_font_size})\n", (3475, 3506), True, 'import matplotlib.pyplot as plt\n'), ((3569, 3588), 'numpy.arange', 'np.arange', (['num_vars'], {}), '(num_vars)\n', (3578, 3588), True, 'import numpy as np\n'), ((3670, 3740), 'matplotlib.pyplot.xticks', 'plt.xticks', (['x_ticks', 'tick_labels'], {'fontsize': 'tick_font_size', 'rotation': '(90)'}), '(x_ticks, tick_labels, fontsize=tick_font_size, rotation=90)\n', (3680, 3740), True, 'import matplotlib.pyplot as plt\n'), ((3856, 3913), 'matplotlib.pyplot.yticks', 'plt.yticks', (['y_ticks', 'tick_labels'], {'fontsize': 'tick_font_size'}), '(y_ticks, tick_labels, fontsize=tick_font_size)\n', (3866, 3913), True, 'import matplotlib.pyplot as plt\n'), ((4329, 4372), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(50)'}), '(title, fontsize=50, **title_font)\n', (4338, 4372), True, 'import matplotlib.pyplot as plt\n'), ((4450, 4460), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4458, 4460), True, 'import matplotlib.pyplot as plt\n'), ((4765, 4776), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4774, 4776), True, 'import matplotlib.pyplot as plt\n'), ((4980, 5009), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (4992, 5009), True, 'import matplotlib.pyplot as plt\n'), ((1093, 1165), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('plots/' + filename[:50] + ' line.png')"], {'bbox_inches': '"""tight"""'}), "('plots/' + filename[:50] + ' line.png', bbox_inches='tight')\n", (1104, 1165), True, 'import matplotlib.pyplot as plt\n'), ((5523, 5539), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (5532, 5539), True, 'import matplotlib.pyplot as plt\n'), ((1034, 1051), 'os.mkdir', 'os.mkdir', (['"""plots"""'], {}), "('plots')\n", (1042, 1051), False, 'import os\n'), ((3038, 3068), 'matplotlib.pyplot.cm.colors.Normalize', 'plt.cm.colors.Normalize', (['(-1)', '(1)'], {}), '(-1, 1)\n', (3061, 3068), True, 'import matplotlib.pyplot as plt\n'), ((4503, 4520), 'os.mkdir', 'os.mkdir', (['"""plots"""'], {}), "('plots')\n", (4511, 4520), False, 'import os\n'), ((1570, 1599), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (1582, 1599), True, 'import matplotlib.pyplot as plt\n'), ((2361, 2378), 'os.mkdir', 'os.mkdir', (['"""plots"""'], {}), "('plots')\n", (2369, 2378), False, 'import os\n')]
|
from django.db import models
# Create your models here.
class Switch(models.Model):
brand = models.CharField(max_length=50)
model = models.CharField(max_length=50)
type = models.CharField(max_length=50, null=True)
actuation_distance = models.DecimalField(max_digits=5, decimal_places=2)
bottom_distance = models.DecimalField(max_digits=5, decimal_places=2)
operating_force = models.IntegerField()
bottom_force = models.IntegerField()
mount = models.CharField(max_length=50)
image = models.FileField(upload_to='', null=True)
image_source = models.CharField(max_length=50, null=True)
class Meta:
verbose_name_plural = "Switches"
def __str__(self):
return f"Switch({self.brand}, {self.model})"
def __repr__(self):
return f"Switch({self.brand}, {self.model})"
|
[
"django.db.models.CharField",
"django.db.models.DecimalField",
"django.db.models.IntegerField",
"django.db.models.FileField"
] |
[((98, 129), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (114, 129), False, 'from django.db import models\n'), ((142, 173), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (158, 173), False, 'from django.db import models\n'), ((186, 228), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (202, 228), False, 'from django.db import models\n'), ((255, 306), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (274, 306), False, 'from django.db import models\n'), ((329, 380), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (348, 380), False, 'from django.db import models\n'), ((404, 425), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (423, 425), False, 'from django.db import models\n'), ((445, 466), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (464, 466), False, 'from django.db import models\n'), ((480, 511), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (496, 511), False, 'from django.db import models\n'), ((525, 566), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '""""""', 'null': '(True)'}), "(upload_to='', null=True)\n", (541, 566), False, 'from django.db import models\n'), ((586, 628), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (602, 628), False, 'from django.db import models\n')]
|
import numpy as np
import pytest
import astropy
import astropy.units as u
from astropy.tests.helper import quantity_allclose, assert_quantity_allclose
from astropy.coordinates import (SkyCoord, get_body_barycentric, Angle,
ConvertError, Longitude, CartesianRepresentation,
get_body_barycentric_posvel,
CartesianDifferential, SphericalDifferential)
# Versions of Astropy that do not have HeliocentricMeanEcliptic have the same frame
# with the misleading name HeliocentricTrueEcliptic
try:
from astropy.coordinates import HeliocentricMeanEcliptic
except ImportError:
from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic
from astropy.time import Time
from sunpy.coordinates import (Helioprojective, HeliographicStonyhurst,
HeliographicCarrington, Heliocentric,
HeliocentricEarthEcliptic, GeocentricSolarEcliptic,
HeliocentricInertial, GeocentricEarthEquatorial,
get_earth)
from sunpy.coordinates import sun
from sunpy.coordinates.frames import _J2000
from sunpy.coordinates.transformations import transform_with_sun_center
from sunpy.time import parse_time
def test_hcc_to_hgs():
'''
Check that a coordinate pointing to the observer in Heliocentric
coordinates maps to the lattitude/longitude of the observer in
HeliographicStonyhurst coordinates.
'''
lat = 10 * u.deg
lon = 20 * u.deg
observer = HeliographicStonyhurst(lat=lat, lon=lon)
hcc_in = Heliocentric(x=0*u.km, y=0*u.km, z=1*u.km, observer=observer)
hgs_out = hcc_in.transform_to(HeliographicStonyhurst)
assert_quantity_allclose(hgs_out.lat, lat)
assert_quantity_allclose(hgs_out.lon, lon)
def test_hpc_hpc():
# Use some unphysical values for solar parameters for testing, to make it
# easier to calculate expected results.
rsun = 1*u.m
D0 = 1*u.km
L0 = 1*u.deg
observer_in = HeliographicStonyhurst(lat=0*u.deg, lon=0*u.deg, radius=D0)
observer_out = HeliographicStonyhurst(lat=0*u.deg, lon=L0, radius=D0)
hpc_in = Helioprojective(0*u.arcsec, 0*u.arcsec, rsun=rsun, observer=observer_in)
hpc_out = Helioprojective(observer=observer_out, rsun=rsun)
hpc_new = hpc_in.transform_to(hpc_out)
assert hpc_new.observer == hpc_out.observer
# Calculate the distance subtended by an angle of L0 from the centre of the
# Sun.
dd = -1 * rsun * np.tan(L0)
# Calculate the angle corresponding to that distance as seen by the new
# observer.
theta = np.arctan2(dd, (D0 - rsun))
assert quantity_allclose(theta, hpc_new.Tx, rtol=1e-3)
def test_hpc_hpc_sc():
# Use some unphysical values for solar parameters for testing, to make it
# easier to calculate expected results.
rsun = 1*u.m
D0 = 1*u.km
L0 = 1*u.deg
observer_in = HeliographicStonyhurst(lat=0*u.deg, lon=0*u.deg, radius=D0)
observer_out = HeliographicStonyhurst(lat=0*u.deg, lon=L0, radius=D0)
sc_in = SkyCoord(0*u.arcsec, 0*u.arcsec, rsun=rsun, observer=observer_in,
frame='helioprojective')
hpc_out = Helioprojective(observer=observer_out, rsun=rsun)
hpc_new = sc_in.transform_to(hpc_out)
assert hpc_new.observer.lat == hpc_out.observer.lat
assert hpc_new.observer.lon == hpc_out.observer.lon
assert hpc_new.observer.radius == hpc_out.observer.radius
def test_hpc_hpc_null():
hpc_in = Helioprojective(0*u.arcsec, 0*u.arcsec)
hpc_out = Helioprojective()
hpc_new = hpc_in.transform_to(hpc_out)
assert hpc_new is not hpc_in
assert quantity_allclose(hpc_new.Tx, hpc_in.Tx)
assert quantity_allclose(hpc_new.Ty, hpc_in.Ty)
assert hpc_out.observer == hpc_new.observer
def test_hcrs_hgs():
# Get the current Earth location in HCRS
adate = parse_time('2015/05/01 01:13:00')
earth_hcrs = SkyCoord(get_body_barycentric('earth', adate), frame='icrs', obstime=adate).hcrs
# Convert from HCRS to HGS
earth_hgs = earth_hcrs.transform_to(HeliographicStonyhurst)
# The HGS longitude of the Earth should be zero within numerical error
# Due to an issue with wrapping at +-360, we shift it to pass the test.
assert quantity_allclose((earth_hgs.lon+1*u.deg) % (360*u.deg), 1*u.deg, atol=1e-12*u.deg)
# The HGS latitude and radius should be within valid ranges
assert quantity_allclose(earth_hgs.lat, 0*u.deg, atol=7.3*u.deg)
assert quantity_allclose(earth_hgs.radius, 1*u.AU, atol=0.017*u.AU)
def test_hcrs_hgs_array_obstime():
# Get the Earth location in HCRS at two times
times = Time(['2017-01-01', '2017-06-01'])
earth_hcrs = SkyCoord(get_body_barycentric('earth', times), frame='icrs', obstime=times).hcrs
# Transform each time in separate calls (uses scalar obstime)
earth_hgs_0 = earth_hcrs[0].transform_to(HeliographicStonyhurst)
earth_hgs_1 = earth_hcrs[1].transform_to(HeliographicStonyhurst)
# Transform both times in one call (uses array obstime)
earth_hgs = earth_hcrs.transform_to(HeliographicStonyhurst)
# Confirm that the two approaches produce the same results
assert quantity_allclose(earth_hgs_0.lon, earth_hgs[0].lon, atol=1e-12*u.deg)
assert quantity_allclose(earth_hgs_0.lat, earth_hgs[0].lat, rtol=1e-10)
assert quantity_allclose(earth_hgs_0.radius, earth_hgs[0].radius, rtol=1e-10)
assert quantity_allclose(earth_hgs_1.lon, earth_hgs[1].lon, atol=1e-12*u.deg)
assert quantity_allclose(earth_hgs_1.lat, earth_hgs[1].lat, rtol=1e-10)
assert quantity_allclose(earth_hgs_1.radius, earth_hgs[1].radius, rtol=1e-10)
def test_hgs_hcrs():
# This test checks the HGS->HCRS transformation by transforming from HGS to
# HeliocentricMeanEcliptic (HME). It will fail if there are errors in Astropy's
# HCRS->ICRS or ICRS->HME transformations.
# Use published HGS coordinates in the Astronomical Almanac (2013), pages C6-C7
obstime = Time('2013-01-28')
earth_hgs = SkyCoord(0*u.deg, -5.73*u.deg, 0.9848139*u.AU, frame=HeliographicStonyhurst,
obstime=obstime)
# Transform to HME at observation-time equinox
earth_hme = earth_hgs.transform_to(HeliocentricMeanEcliptic(equinox=obstime))
# Validate against published values from the Astronomical Almanac (2013), page C6 per page E2
# The dominant source of inaccuracy is the limited precision of the published B0 used above
assert quantity_allclose(earth_hme.lon, Angle('308d13m30.51s') - 180*u.deg, atol=5*u.arcsec)
assert quantity_allclose(earth_hme.lat, -Angle('-0.27s'), atol=10*u.arcsec)
assert quantity_allclose(earth_hme.distance, 0.9848139*u.AU, atol=5e-7*u.AU)
def test_hgs_hgc_roundtrip():
obstime = "2011-01-01"
hgsin = HeliographicStonyhurst(lat=10*u.deg, lon=20*u.deg, obstime=obstime)
hgcout = hgsin.transform_to(HeliographicCarrington(obstime=obstime))
assert_quantity_allclose(hgsin.lat, hgcout.lat)
assert_quantity_allclose(hgsin.lon + sun.L0(obstime), hgcout.lon)
hgsout = hgcout.transform_to(HeliographicStonyhurst(obstime=obstime))
assert_quantity_allclose(hgsout.lat, hgsin.lat)
assert_quantity_allclose(hgsout.lon, hgsin.lon)
def test_hgs_cartesian_rep_to_hpc():
# This test checks transformation HGS->HPC when the coordinate is in a Cartesian
# representation and that it is the same as a transformation from an HGS frame with a
# spherical representation
obstime = "2011-01-01"
hgscoord_cart = SkyCoord(x=1*u.km, y=0.*u.km, z=0.*u.km,
frame=HeliographicStonyhurst(obstime=obstime),
representation_type='cartesian')
hpc_frame = Helioprojective(observer='earth', obstime=obstime)
hgscoord_sph = hgscoord_cart.copy()
hgscoord_sph.representation_type = 'spherical'
hpccoord_cart = hgscoord_cart.transform_to(hpc_frame)
hpccoord_sph = hgscoord_sph.transform_to(hpc_frame)
assert_quantity_allclose(hpccoord_cart.Tx, hpccoord_sph.Tx)
assert_quantity_allclose(hpccoord_cart.Ty, hpccoord_sph.Ty)
assert_quantity_allclose(hpccoord_cart.distance, hpccoord_sph.distance)
def test_hgs_cartesian_rep_to_hcc():
# This test checks transformation HGS->HCC when the coordinate is in a Cartesian
# representation and that it is the same as a transformation from an HGS frame with a
# spherical representation
obstime = "2011-01-01"
hgscoord_cart = SkyCoord(x=1*u.km, y=0.*u.km, z=0.*u.km,
frame=HeliographicStonyhurst(obstime=obstime),
representation_type='cartesian')
hcc_frame = Heliocentric(observer='earth', obstime=obstime)
hgscoord_sph = hgscoord_cart.copy()
hgscoord_sph.representation_type = 'spherical'
hcccoord_cart = hgscoord_cart.transform_to(hcc_frame)
hcccoord_sph = hgscoord_sph.transform_to(hcc_frame)
assert_quantity_allclose(hcccoord_cart.x, hcccoord_sph.x)
assert_quantity_allclose(hcccoord_cart.y, hcccoord_sph.y)
assert_quantity_allclose(hcccoord_cart.z, hcccoord_sph.z)
def test_hgs_cartesian_rep_to_hgc():
# This test checks transformation HGS->HCC when the coordinate is in a Cartesian
# representation and that it is the same as a transformation from an HGS frame with a
# spherical representation
obstime = "2011-01-01"
hgscoord_cart = SkyCoord(x=1*u.km, y=0.*u.km, z=0.*u.km,
frame=HeliographicStonyhurst(obstime=obstime),
representation_type='cartesian')
hgscoord_sph = hgscoord_cart.copy()
hgscoord_sph.representation_type = 'spherical'
# HGC
hgccoord_cart = hgscoord_cart.transform_to(HeliographicCarrington(obstime=obstime))
hgccoord_sph = hgscoord_sph.transform_to(HeliographicCarrington(obstime=obstime))
assert_quantity_allclose(hgccoord_cart.lat, hgccoord_sph.lat)
assert_quantity_allclose(hgccoord_cart.lon, hgccoord_sph.lon)
assert_quantity_allclose(hgccoord_cart.radius, hgccoord_sph.radius)
def test_hcc_to_hpc_different_observer():
# This test checks transformation HCC->HPC in the case where the HCC and HPC frames are
# defined by different observers.
rsun = 1*u.m
D0 = 1*u.km
L0 = 1*u.deg
observer_1 = HeliographicStonyhurst(lat=0*u.deg, lon=0*u.deg, radius=D0)
observer_2 = HeliographicStonyhurst(lat=0*u.deg, lon=L0, radius=D0)
hcc_frame = Heliocentric(observer=observer_1)
hpc_frame = Helioprojective(observer=observer_2)
hcccoord = SkyCoord(x=rsun, y=rsun, z=rsun, frame=hcc_frame)
hpccoord_out = hcccoord.transform_to(hpc_frame)
hpccoord_expected = hcccoord.transform_to(HeliographicStonyhurst).transform_to(hpc_frame)
assert_quantity_allclose(hpccoord_out.Tx, hpccoord_expected.Tx)
assert_quantity_allclose(hpccoord_out.Ty, hpccoord_expected.Ty)
assert_quantity_allclose(hpccoord_out.distance, hpccoord_expected.distance)
def test_hpc_to_hcc_different_observer():
# This test checks transformation HPC->HCC in the case where the HCC and HPC frames are
# defined by different observers.
rsun = 1*u.m
D0 = 1*u.km
L0 = 1*u.deg
observer_1 = HeliographicStonyhurst(lat=0*u.deg, lon=0*u.deg, radius=D0)
observer_2 = HeliographicStonyhurst(lat=0*u.deg, lon=L0, radius=D0)
hcc_frame = Heliocentric(observer=observer_1)
hpc_frame = Helioprojective(observer=observer_2, rsun=rsun)
hpccoord = SkyCoord(Tx=0*u.arcsec, Ty=0*u.arcsec, frame=hpc_frame)
hcccoord_out = hpccoord.transform_to(hcc_frame)
hcccoord_expected = hpccoord.transform_to(HeliographicStonyhurst).transform_to(hcc_frame)
assert_quantity_allclose(hcccoord_out.x, hcccoord_expected.x)
assert_quantity_allclose(hcccoord_out.y, hcccoord_expected.y)
assert_quantity_allclose(hcccoord_out.z, hcccoord_expected.z)
def test_hcc_to_hpc_same_observer():
# This test checks transformation HCC->HPC in the case of same observer
rsun = 1*u.m
D0 = 1*u.km
observer = HeliographicStonyhurst(lat=0*u.deg, lon=0*u.deg, radius=D0)
hcc_frame = Heliocentric(observer=observer)
hpc_frame = Helioprojective(observer=observer, rsun=rsun)
hcccoord = SkyCoord(x=rsun, y=rsun, z=rsun, frame=hcc_frame)
hpccoord_out = hcccoord.transform_to(hpc_frame)
hpccoord_expected = hcccoord.transform_to(HeliographicStonyhurst).transform_to(hpc_frame)
assert_quantity_allclose(hpccoord_out.Tx, hpccoord_expected.Tx)
assert_quantity_allclose(hpccoord_out.Ty, hpccoord_expected.Ty)
assert_quantity_allclose(hpccoord_out.distance, hpccoord_expected.distance)
def test_hpc_to_hcc_same_observer():
# This test checks transformation HPC->HCC in the case of same observer
rsun = 1*u.m
D0 = 1 * u.km
observer = HeliographicStonyhurst(lat=0 * u.deg, lon=0 * u.deg, radius=D0)
hcc_frame = Heliocentric(observer=observer)
hpc_frame = Helioprojective(observer=observer, rsun=rsun)
hpccoord = SkyCoord(Tx=0 * u.arcsec, Ty=0 * u.arcsec, frame=hpc_frame)
hcccoord_out = hpccoord.transform_to(hcc_frame)
hcccoord_expected = hpccoord.transform_to(HeliographicStonyhurst).transform_to(hcc_frame)
assert_quantity_allclose(hcccoord_out.x, hcccoord_expected.x)
assert_quantity_allclose(hcccoord_out.y, hcccoord_expected.y)
assert_quantity_allclose(hcccoord_out.z, hcccoord_expected.z)
def test_hpc_hcc_different_observer_radius():
# Tests HPC->HCC with a change in observer at different distances from the Sun
observer1 = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU)
hpc = Helioprojective(0*u.arcsec, 0*u.arcsec, 0.5*u.AU, observer=observer1)
observer2 = HeliographicStonyhurst(90*u.deg, 0*u.deg, 0.75*u.AU)
hcc = hpc.transform_to(Heliocentric(observer=observer2))
assert_quantity_allclose(hcc.x, -0.5*u.AU)
assert_quantity_allclose(hcc.y, 0*u.AU, atol=1e-10*u.AU)
assert_quantity_allclose(hcc.z, 0*u.AU, atol=1e-10*u.AU)
def test_hgs_hgs():
# Test HGS loopback transformation
obstime = Time('2001-01-01')
old = SkyCoord(90*u.deg, 10*u.deg, 1*u.AU, frame=HeliographicStonyhurst(obstime=obstime))
new = old.transform_to(HeliographicStonyhurst(obstime=obstime + 1*u.day))
assert_quantity_allclose(new.lon, old.lon - 1*u.deg, atol=0.1*u.deg) # due to Earth motion
assert_quantity_allclose(new.lat, old.lat, atol=1e-3*u.deg)
assert_quantity_allclose(new.radius, old.radius, atol=1e-5*u.AU)
def test_hgc_hgc():
# Test HGC loopback transformation
obstime = Time('2001-01-01')
old = SkyCoord(90*u.deg, 10*u.deg, 1*u.AU, frame=HeliographicCarrington(obstime=obstime))
new = old.transform_to(HeliographicCarrington(obstime=obstime + 1*u.day))
assert_quantity_allclose(new.lon, 75.815607 * u.deg, atol=1e-7*u.deg) # solar rotation
# These are not equal to the old values, because the coordinates stay fixed
# in inertial space, whilst the frame (fixed to the center of the Sun)
# moves slightly.
assert_quantity_allclose(new.lat, 9.999963 * u.deg, atol=1e-7*u.deg)
assert_quantity_allclose(new.radius, 1.000009 * u.AU, atol=1e-7*u.AU)
def test_hcc_hcc():
# Test same observer and changing obstime
observer = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU, obstime='2001-02-01')
from_hcc = Heliocentric(0.2*u.AU, 0.3*u.AU, 0.4*u.AU, observer=observer, obstime='2001-01-01')
to_hcc = from_hcc.transform_to(Heliocentric(observer=observer, obstime='2001-03-31'))
# Since the observer is the same, the coordinates should be nearly the same but not exactly
# equal due to motion of the origin (the Sun)
assert np.all(from_hcc.cartesian.xyz != to_hcc.cartesian.xyz)
assert_quantity_allclose(from_hcc.cartesian.xyz, to_hcc.cartesian.xyz, rtol=2e-3)
# Test changing observer and same obstime
observer1 = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU, obstime='2001-01-01')
observer2 = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU, obstime='2001-03-31')
from_hcc = Heliocentric(0.2*u.AU, 0.3*u.AU, 0.4*u.AU, observer=observer1, obstime='2001-02-01')
to_hcc = from_hcc.transform_to(Heliocentric(observer=observer2, obstime='2001-02-01'))
# This change in observer is approximately a 90-degree rotation about the Y axis
assert_quantity_allclose(to_hcc.x, -from_hcc.z, rtol=2e-3)
assert_quantity_allclose(to_hcc.y, from_hcc.y, rtol=2e-3)
assert_quantity_allclose(to_hcc.z, from_hcc.x, rtol=2e-3)
def test_hcc_hgs_observer_mismatch():
# Test whether the transformation gives the same answer regardless of what obstime the observer
# coordinate is represented in
observer1 = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU, obstime='2001-01-01')
observer2 = observer1.transform_to(HeliographicStonyhurst(obstime='2001-03-31'))
hcc1 = Heliocentric(0.2*u.AU, 0.3*u.AU, 0.4*u.AU, observer=observer1, obstime=observer1.obstime)
hgs1 = hcc1.transform_to(HeliographicStonyhurst(obstime=hcc1.obstime))
hcc2 = Heliocentric(0.2*u.AU, 0.3*u.AU, 0.4*u.AU, observer=observer2, obstime=observer1.obstime)
hgs2 = hcc2.transform_to(HeliographicStonyhurst(obstime=hcc2.obstime))
assert_quantity_allclose(hgs1.lon, hgs2.lon)
assert_quantity_allclose(hgs1.lat, hgs2.lat)
assert_quantity_allclose(hgs1.radius, hgs2.radius)
def test_hgs_hcc_observer_mismatch():
# Test whether the transformation gives the same answer regardless of what obstime the observer
# coordinate is represented in
observer1 = HeliographicStonyhurst(0*u.deg, 0*u.deg, 1*u.AU, obstime='2001-01-01')
observer2 = observer1.transform_to(HeliographicStonyhurst(obstime='2001-03-31'))
hgs = HeliographicStonyhurst(20*u.deg, 40*u.deg, 0.5*u.AU, obstime=observer1.obstime)
hcc1 = hgs.transform_to(Heliocentric(observer=observer1, obstime=hgs.obstime))
hcc2 = hgs.transform_to(Heliocentric(observer=observer2, obstime=hgs.obstime))
assert_quantity_allclose(hcc1.cartesian.xyz, hcc2.cartesian.xyz)
def test_hgs_hcrs_sunspice():
# Compare our HGS->HCRS transformation against SunSPICE by transforming beyond it
# "HEQ" is another name for HEEQ, which is equivalent to Heliographic Stonyhurst
# "HAE" is equivalent to Astropy's Heliocentric Mean Ecliptic, and defaults to J2000.0
#
# IDL> coord = [1.d, 0.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HEQ', 'HAE', /au, /degrees
# IDL> print, coord
# 1.0000000 -108.65371 10.642778
old = SkyCoord(0*u.deg, 10*u.deg, 1*u.AU, frame=HeliographicStonyhurst(obstime='2019-06-01'))
new = old.transform_to(HeliocentricMeanEcliptic)
assert_quantity_allclose(new.lon, Longitude(-108.65371*u.deg), atol=0.1*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 10.642778*u.deg, atol=0.1*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, old.radius)
# Transform to HAE precessed to the mean ecliptic of date instead of J2000.0
# IDL> coord = [1.d, 0.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HEQ', 'HAE', /precess, /au, /degrees
# IDL> print, coord
# 1.0000000 -108.38240 10.640314
new = old.transform_to(HeliocentricMeanEcliptic(equinox='2019-06-01'))
assert_quantity_allclose(new.lon, Longitude(-108.38240*u.deg), atol=0.1*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 10.640314*u.deg, atol=0.1*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, old.radius)
def test_hgs_hgc_sunspice():
# Compare our HGS->HGC transformation against SunSPICE
# "HEQ" is another name for HEEQ, which is equivalent to Heliographic Stonyhurst
# "Carrington" is offset by 0.076 degrees in longitude from our Heliographic Carrington (HGC)
# because "Carrington" does not include light travel time to the observer, while our
# HGC includes the light travel time to Earth (see Seidelmann et al. 2007).
#
# IDL> coord = [1.d, 0.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HEQ', 'Carrington', /au, /degrees
# IDL> print, coord
# 1.0000000 16.688242 10.000000
old = SkyCoord(0*u.deg, 10*u.deg, 1*u.AU, frame=HeliographicStonyhurst(obstime='2019-06-01'))
new = old.heliographic_carrington
assert_quantity_allclose(new.lon, 16.688242*u.deg + 0.076*u.deg, atol=1e-2*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, old.lat)
assert_quantity_allclose(new.radius, old.radius)
def test_hgs_hcc_sunspice():
# Compare our HGS->HCC transformation against SunSPICE
# "HEQ" is another name for HEEQ, which is equivalent to Heliographic Stonyhurst
# "HGRTN" is equivalent to our Heliocentric, but with the axes permuted
# SunSPICE, like us, assumes an Earth observer if not explicitly specified
#
# IDL> coord = [7d5, 8d5, 9d5]
# IDL> convert_sunspice_coord, '2019-06-01', coord, 'HEQ', 'HGRTN'
# Assuming Earth observation
# IDL> print, coord
# 688539.32 800000.00 908797.89
old = SkyCoord(CartesianRepresentation([7e5, 8e5, 9e5]*u.km),
frame=HeliographicStonyhurst(obstime='2019-06-01'))
new = old.transform_to(Heliocentric(observer='earth'))
assert_quantity_allclose(new.x, 800000.00*u.km, atol=1e-2*u.km)
assert_quantity_allclose(new.y, 908797.89*u.km, atol=1e-2*u.km)
assert_quantity_allclose(new.z, 688539.32*u.km, atol=1e-2*u.km)
def test_hpc_hgs_implicit_hcc():
# An HPC->HGS transformation should give the same answer whether the transformation step
# through HCC is implicit or explicit
start = SkyCoord(0*u.arcsec, 0*u.arcsec, 0.5*u.AU,
frame=Helioprojective(obstime='2019-06-01', observer='earth'))
frame = HeliographicStonyhurst(obstime='2019-12-01')
implicit = start.transform_to(frame)
explicit1 = start.transform_to(Heliocentric(obstime=start.obstime, observer='earth')).\
transform_to(frame)
explicit2 = start.transform_to(Heliocentric(obstime=frame.obstime, observer='earth')).\
transform_to(frame)
assert_quantity_allclose(implicit.separation_3d(explicit1), 0*u.AU, atol=1e-10*u.AU)
assert_quantity_allclose(implicit.separation_3d(explicit2), 0*u.AU, atol=1e-10*u.AU)
@pytest.mark.skipif(astropy.__version__ < '3.2.0', reason="Not supported by Astropy <3.2")
def test_velocity_hcrs_hgs():
# Obtain the position/velocity of Earth in ICRS
obstime = Time(['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01'])
pos, vel = get_body_barycentric_posvel('earth', obstime)
loc = pos.with_differentials(vel.represent_as(CartesianDifferential))
earth = SkyCoord(loc, frame='icrs', obstime=obstime)
# The velocity of Earth in HGS should be very close to zero. The velocity in the HGS Y
# direction is slightly further away from zero because there is true latitudinal motion.
new = earth.heliographic_stonyhurst
assert_quantity_allclose(new.velocity.d_x, 0*u.km/u.s, atol=1e-15*u.km/u.s)
assert_quantity_allclose(new.velocity.d_y, 0*u.km/u.s, atol=1e-14*u.km/u.s)
assert_quantity_allclose(new.velocity.d_x, 0*u.km/u.s, atol=1e-15*u.km/u.s)
# Test the loopback to ICRS
newer = new.icrs
assert_quantity_allclose(newer.velocity.d_x, vel.x)
assert_quantity_allclose(newer.velocity.d_y, vel.y)
assert_quantity_allclose(newer.velocity.d_z, vel.z)
def test_velocity_hgs_hgc():
# Construct a simple HGS coordinate with zero velocity
obstime = Time(['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01'])
pos = CartesianRepresentation(1, 0, 0)*u.AU
vel = CartesianDifferential(0, 0, 0)*u.km/u.s
loc = (pos.with_differentials(vel))._apply('repeat', obstime.size)
coord = SkyCoord(HeliographicStonyhurst(loc, obstime=obstime))
# The induced velocity in HGC should be entirely longitudinal, and approximately equal to one
# full rotation every mean synodic period (27.2753 days)
new = coord.heliographic_carrington
new_vel = new.data.differentials['s'].represent_as(SphericalDifferential, new.data)
assert_quantity_allclose(new_vel.d_lon, -360*u.deg / (27.27253*u.day), rtol=1e-2)
assert_quantity_allclose(new_vel.d_lat, 0*u.deg/u.s)
assert_quantity_allclose(new_vel.d_distance, 0*u.km/u.s, atol=1e-7*u.km/u.s)
def test_hme_hee_sunspice():
# Compare our HME->HEE transformation against SunSPICE
# "HAE" is equivalent to Astropy's Heliocentric Mean Ecliptic, and defaults to J2000.0
#
# IDL> coord = [1.d, 0.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HAE', 'HEE', /au, /degrees
# IDL> print, coord
# 1.0000000 110.01610 10.000300
old = SkyCoord(0*u.deg, 10*u.deg, 1*u.AU, frame=HeliocentricMeanEcliptic(obstime='2019-06-01'))
new = old.transform_to(HeliocentricEarthEcliptic)
assert_quantity_allclose(new.lon, Longitude(110.01610*u.deg), atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 10.000300*u.deg, atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, old.distance)
# Transform from HAE precessed to the mean ecliptic of date instead of J2000.0
# IDL> coord = [1.d, 0.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HAE', 'HEE', /au, /degrees, /precess
# IDL> print, coord
# 1.0000000 109.74535 10.000070
old = SkyCoord(0*u.deg, 10*u.deg, 1*u.AU, frame=HeliocentricMeanEcliptic(obstime='2019-06-01',
equinox='2019-06-01'))
new = old.transform_to(HeliocentricEarthEcliptic)
assert_quantity_allclose(new.lon, Longitude(109.74535*u.deg), atol=0.05*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 10.000070*u.deg, atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, old.distance)
def test_hee_hee():
# Test HEE loopback transformation
obstime = Time('2001-01-01')
old = SkyCoord(90*u.deg, 10*u.deg, 1*u.AU, frame=HeliocentricEarthEcliptic(obstime=obstime))
new = old.transform_to(HeliocentricEarthEcliptic)
assert_quantity_allclose(new.lon, old.lon)
assert_quantity_allclose(new.lat, old.lat)
assert_quantity_allclose(new.distance, old.distance)
new = old.transform_to(HeliocentricEarthEcliptic(obstime=obstime + 1*u.day))
assert_quantity_allclose(new.lon, old.lon - 1*u.deg, atol=0.1*u.deg) # due to Earth motion
assert_quantity_allclose(new.lat, old.lat, atol=0.5*u.arcsec)
assert_quantity_allclose(new.distance, old.distance, rtol=1e-5)
def test_hee_gse_sunspice():
# Compare our HEE->GSE transformation against SunSPICE
#
# IDL> coord = [0.7d, -20.d, 10.d]
# IDL> convert_sunspice_coord, '2019-06-01', coord, 'HEE', 'GSE', /au, /degrees
# IDL> print, coord
# 0.45215884 32.777377 15.594639
old = SkyCoord(-20*u.deg, 10*u.deg, 0.7*u.AU,
frame=HeliocentricEarthEcliptic(obstime='2019-06-01'))
new = old.geocentricsolarecliptic
assert_quantity_allclose(new.lon, 32.777377*u.deg, atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 15.594639*u.deg, atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, 0.45215884*u.AU)
def test_gse_gse():
# Test GSE loopback transformation
old = SkyCoord(90*u.deg, 10*u.deg, 0.7*u.AU,
frame=GeocentricSolarEcliptic(obstime='2001-01-01'))
new = old.transform_to(GeocentricSolarEcliptic)
assert_quantity_allclose(new.lon, old.lon)
assert_quantity_allclose(new.lat, old.lat)
assert_quantity_allclose(new.distance, old.distance)
def test_hgs_hci_sunspice():
# Compare our HGS->HCI transformation against SunSPICE
# "HEQ" is another name for HEEQ, which is equivalent to Heliographic Stonyhurst
#
# IDL> coord = [1.d, 120.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HEQ', 'HCI', /au, /degrees
# IDL> print, coord
# 1.0000000 -65.736793 10.000000
old = SkyCoord(120*u.deg, 10*u.deg, 1*u.AU, frame=HeliographicStonyhurst(obstime='2019-06-01'))
new = old.transform_to(HeliocentricInertial)
assert_quantity_allclose(new.lon, -65.736793*u.deg, atol=0.5*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, old.lat)
assert_quantity_allclose(new.distance, old.radius)
def test_hci_hci():
# Test HCI loopback transformation
obstime = Time('2001-01-01')
old = SkyCoord(90*u.deg, 10*u.deg, 0.7*u.AU, frame=HeliocentricInertial(obstime=obstime))
new = old.transform_to(HeliocentricInertial)
assert_quantity_allclose(new.lon, old.lon)
assert_quantity_allclose(new.lat, old.lat)
assert_quantity_allclose(new.distance, old.distance)
new = old.transform_to(HeliocentricInertial(obstime=obstime + 1*u.day))
assert_quantity_allclose(new.lon, old.lon, atol=0.1*u.deg) # due to Earth motion
assert_quantity_allclose(new.lat, old.lat, atol=1e-3*u.deg)
assert_quantity_allclose(new.distance, old.distance, atol=1e-5*u.AU)
def test_hme_gei_sunspice():
# Compare our HME->GEI transformation against SunSPICE
# "HAE" is equivalent to Astropy's Heliocentric Mean Ecliptic, and defaults to J2000.0
#
# IDL> coord = [1.d, 120.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HAE', 'GEI', /au, /degrees
# IDL> print, coord
# 1.8197210 95.230617 28.830109
old = SkyCoord(120*u.deg, 10*u.deg, 1*u.AU,
frame=HeliocentricMeanEcliptic(obstime='2019-06-01'))
new = old.transform_to(GeocentricEarthEquatorial)
assert_quantity_allclose(new.lon, Longitude(95.230617*u.deg), atol=0.01*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 28.830109*u.deg, atol=0.05*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, 1.8197210*u.AU)
# Transform from HAE precessed to the mean ecliptic of date instead of J2000.0
# IDL> coord = [1.d, 120.d, 10.d]
# IDL> convert_sunspice_lonlat, '2019-06-01', coord, 'HAE', 'GEI', /au, /degrees, /precess
# IDL> print, coord
# 1.8217103 95.079030 28.827750
old = SkyCoord(120*u.deg, 10*u.deg, 1*u.AU,
frame=HeliocentricMeanEcliptic(obstime='2019-06-01', equinox='2019-06-01'))
new = old.transform_to(GeocentricEarthEquatorial(equinox=_J2000))
assert_quantity_allclose(new.lon, Longitude(95.079030*u.deg), atol=0.05*u.arcsec, rtol=0)
assert_quantity_allclose(new.lat, 28.827750*u.deg, atol=0.05*u.arcsec, rtol=0)
assert_quantity_allclose(new.distance, 1.8217103*u.AU)
def test_gei_gei():
# Test GEI loopback transformation using the 2017 revision to Franz & Harper 2002
t = Time('1996-08-28 16:46:00', scale='tt')
gei_j2000 = CartesianRepresentation([-5.7840451, -4.1082375, 1.9146822] * (6378.14*u.km))
gei_d = CartesianRepresentation([-5.7864918, -4.1039136, 1.9165612] * (6378.14*u.km))
old = SkyCoord(gei_j2000, frame=GeocentricEarthEquatorial(obstime=t))
new = old.transform_to(GeocentricEarthEquatorial(equinox=t, obstime=t)).cartesian
assert_quantity_allclose(new.xyz, gei_d.xyz)
def test_no_observer():
# Tests transformations to and from observer-based frames with no observer defined
frames_in = [Heliocentric(0*u.km, 0*u.km, 0*u.km, observer=None),
Heliocentric(0*u.km, 0*u.km, 0*u.km, observer=None, obstime='2001-01-01'),
Helioprojective(0*u.deg, 0*u.deg, observer=None),
Helioprojective(0*u.deg, 0*u.deg, observer=None, obstime='2001-01-01')]
frames_out = frames_in + [
HeliographicStonyhurst(0*u.deg, 0*u.deg, obstime=None),
HeliographicStonyhurst(0*u.deg, 0*u.deg, obstime='2001-01-01'),
Heliocentric(0*u.km, 0*u.km, 0*u.km, observer=None, obstime='2012-12-12'),
Heliocentric(0*u.km, 0*u.km, 0*u.km, observer="earth", obstime=None),
Heliocentric(0*u.km, 0*u.km, 0*u.km, observer="earth", obstime='2001-01-01'),
Helioprojective(0*u.deg, 0*u.deg, observer=None, obstime='2012-12-12'),
Helioprojective(0*u.deg, 0*u.deg, observer="earth", obstime=None),
Helioprojective(0*u.deg, 0*u.deg, observer="earth", obstime='2001-01-01')]
# Self-transformations should succeed
for f in frames_in:
f.transform_to(f.replicate_without_data())
# All other transformations should error
for i, f1 in enumerate(frames_in):
for f2 in frames_out[i + 1:]:
with pytest.raises(ConvertError):
f1.transform_to(f2)
with pytest.raises(ConvertError):
f2.transform_to(f1)
def test_array_obstime():
# Validate that you can transform from an array of obstimes to no obstimes,
# or different obstimes.
a = SkyCoord([10]*2, [10]*2, unit=u.deg,
observer="earth",
obstime=["2019-01-01", "2019-01-02"],
frame="heliographic_carrington")
t = a.transform_to(Helioprojective)
assert isinstance(t.frame, Helioprojective)
t2 = a.transform_to(Helioprojective(obstime=["2019-01-03", "2019-01-04"]))
assert isinstance(t2.frame, Helioprojective)
_frameset1 = [HeliographicStonyhurst, HeliographicCarrington, HeliocentricInertial]
_frameset2 = [Heliocentric, Helioprojective]
@pytest.mark.parametrize("start_class", _frameset1 + _frameset2)
@pytest.mark.parametrize("end_class", _frameset1)
def test_no_obstime_on_one_end(start_class, end_class):
start_obstime = Time("2001-01-01")
if hasattr(start_class, 'observer'):
coord = start_class(CartesianRepresentation(0, 0, 0)*u.km,
obstime=start_obstime, observer="earth")
else:
coord = start_class(CartesianRepresentation(0, 0, 0)*u.km, obstime=start_obstime)
result = coord.transform_to(end_class)
assert result.obstime == start_obstime
def test_transform_with_sun_center():
sun_center = SkyCoord(0*u.deg, 0*u.deg, 0*u.AU,
frame=HeliographicStonyhurst(obstime="2001-01-01"))
with transform_with_sun_center():
result1 = sun_center.transform_to(HeliographicStonyhurst(obstime="2001-02-01"))
# The coordinate should stay pointing at Sun center
assert_quantity_allclose(result1.lon, sun_center.lon)
assert_quantity_allclose(result1.lat, sun_center.lat)
assert_quantity_allclose(result1.radius, sun_center.radius)
other = SkyCoord(10*u.deg, 20*u.deg, 1*u.AU,
frame=HeliographicStonyhurst(obstime="2001-01-01"))
with transform_with_sun_center():
result2 = other.transform_to(HeliographicCarrington(obstime="2001-02-01"))
# The coordinate should stay at the same latitude and the same distance from Sun center
assert_quantity_allclose(result2.lat, other.lat)
assert_quantity_allclose(result2.radius, other.radius)
def test_transform_with_sun_center_reset():
# This test sequence ensures that the context manager does not change anything permanently
sun_center = SkyCoord(0*u.deg, 0*u.deg, 0*u.AU,
frame=HeliographicStonyhurst(obstime="2001-01-01"))
end_frame = HeliocentricInertial(obstime="2001-02-01")
# Without the context manager, the coordinate should not point at Sun center
result1 = sun_center.transform_to(end_frame)
assert result1.lon != sun_center.lon
assert result1.lat != sun_center.lat
assert result1.distance != sun_center.radius
# Using the context manager, the coordinate should point at Sun center
with transform_with_sun_center():
result2 = sun_center.transform_to(end_frame)
assert_quantity_allclose(result2.lon, sun_center.lon)
assert_quantity_allclose(result2.lat, sun_center.lat)
assert_quantity_allclose(result2.distance, sun_center.radius)
# After the context manager, the coordinate should have the same result as the first transform
result3 = sun_center.transform_to(end_frame)
assert_quantity_allclose(result3.lon, result1.lon)
assert_quantity_allclose(result3.lat, result1.lat)
assert_quantity_allclose(result3.distance, result1.distance)
|
[
"astropy.coordinates.Longitude",
"numpy.arctan2",
"astropy.coordinates.get_body_barycentric_posvel",
"sunpy.coordinates.HeliocentricInertial",
"sunpy.coordinates.Heliocentric",
"pytest.mark.skipif",
"astropy.coordinates.CartesianRepresentation",
"sunpy.coordinates.transformations.transform_with_sun_center",
"pytest.mark.parametrize",
"astropy.coordinates.Angle",
"astropy.coordinates.HeliocentricTrueEcliptic",
"pytest.raises",
"numpy.tan",
"sunpy.coordinates.GeocentricEarthEquatorial",
"astropy.tests.helper.quantity_allclose",
"sunpy.time.parse_time",
"astropy.coordinates.get_body_barycentric",
"astropy.time.Time",
"astropy.tests.helper.assert_quantity_allclose",
"sunpy.coordinates.HeliocentricEarthEcliptic",
"sunpy.coordinates.GeocentricSolarEcliptic",
"sunpy.coordinates.sun.L0",
"numpy.all",
"astropy.coordinates.CartesianDifferential",
"sunpy.coordinates.HeliographicCarrington",
"sunpy.coordinates.Helioprojective",
"sunpy.coordinates.HeliographicStonyhurst",
"astropy.coordinates.SkyCoord"
] |
[((22357, 22451), 'pytest.mark.skipif', 'pytest.mark.skipif', (["(astropy.__version__ < '3.2.0')"], {'reason': '"""Not supported by Astropy <3.2"""'}), "(astropy.__version__ < '3.2.0', reason=\n 'Not supported by Astropy <3.2')\n", (22375, 22451), False, 'import pytest\n'), ((33417, 33480), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""start_class"""', '(_frameset1 + _frameset2)'], {}), "('start_class', _frameset1 + _frameset2)\n", (33440, 33480), False, 'import pytest\n'), ((33482, 33530), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""end_class"""', '_frameset1'], {}), "('end_class', _frameset1)\n", (33505, 33530), False, 'import pytest\n'), ((1595, 1635), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': 'lat', 'lon': 'lon'}), '(lat=lat, lon=lon)\n', (1617, 1635), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((1649, 1716), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'x': '(0 * u.km)', 'y': '(0 * u.km)', 'z': '(1 * u.km)', 'observer': 'observer'}), '(x=0 * u.km, y=0 * u.km, z=1 * u.km, observer=observer)\n', (1661, 1716), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((1774, 1816), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgs_out.lat', 'lat'], {}), '(hgs_out.lat, lat)\n', (1798, 1816), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((1821, 1863), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgs_out.lon', 'lon'], {}), '(hgs_out.lon, lon)\n', (1845, 1863), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((2076, 2139), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (2098, 2139), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((2155, 2211), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': 'L0', 'radius': 'D0'}), '(lat=0 * u.deg, lon=L0, radius=D0)\n', (2177, 2211), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((2224, 2300), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.arcsec)', '(0 * u.arcsec)'], {'rsun': 'rsun', 'observer': 'observer_in'}), '(0 * u.arcsec, 0 * u.arcsec, rsun=rsun, observer=observer_in)\n', (2239, 2300), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((2311, 2360), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer_out', 'rsun': 'rsun'}), '(observer=observer_out, rsun=rsun)\n', (2326, 2360), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((2682, 2707), 'numpy.arctan2', 'np.arctan2', (['dd', '(D0 - rsun)'], {}), '(dd, D0 - rsun)\n', (2692, 2707), True, 'import numpy as np\n'), ((2722, 2770), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['theta', 'hpc_new.Tx'], {'rtol': '(0.001)'}), '(theta, hpc_new.Tx, rtol=0.001)\n', (2739, 2770), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((2985, 3048), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (3007, 3048), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((3064, 3120), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': 'L0', 'radius': 'D0'}), '(lat=0 * u.deg, lon=L0, radius=D0)\n', (3086, 3120), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((3132, 3231), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(0 * u.arcsec)', '(0 * u.arcsec)'], {'rsun': 'rsun', 'observer': 'observer_in', 'frame': '"""helioprojective"""'}), "(0 * u.arcsec, 0 * u.arcsec, rsun=rsun, observer=observer_in, frame\n ='helioprojective')\n", (3140, 3231), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((3258, 3307), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer_out', 'rsun': 'rsun'}), '(observer=observer_out, rsun=rsun)\n', (3273, 3307), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((3566, 3609), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.arcsec)', '(0 * u.arcsec)'], {}), '(0 * u.arcsec, 0 * u.arcsec)\n', (3581, 3609), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((3620, 3637), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {}), '()\n', (3635, 3637), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((3727, 3767), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['hpc_new.Tx', 'hpc_in.Tx'], {}), '(hpc_new.Tx, hpc_in.Tx)\n', (3744, 3767), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((3779, 3819), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['hpc_new.Ty', 'hpc_in.Ty'], {}), '(hpc_new.Ty, hpc_in.Ty)\n', (3796, 3819), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((3948, 3981), 'sunpy.time.parse_time', 'parse_time', (['"""2015/05/01 01:13:00"""'], {}), "('2015/05/01 01:13:00')\n", (3958, 3981), False, 'from sunpy.time import parse_time\n'), ((4339, 4436), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['((earth_hgs.lon + 1 * u.deg) % (360 * u.deg))', '(1 * u.deg)'], {'atol': '(1e-12 * u.deg)'}), '((earth_hgs.lon + 1 * u.deg) % (360 * u.deg), 1 * u.deg,\n atol=1e-12 * u.deg)\n', (4356, 4436), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((4499, 4560), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs.lat', '(0 * u.deg)'], {'atol': '(7.3 * u.deg)'}), '(earth_hgs.lat, 0 * u.deg, atol=7.3 * u.deg)\n', (4516, 4560), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((4568, 4632), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs.radius', '(1 * u.AU)'], {'atol': '(0.017 * u.AU)'}), '(earth_hgs.radius, 1 * u.AU, atol=0.017 * u.AU)\n', (4585, 4632), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((4728, 4762), 'astropy.time.Time', 'Time', (["['2017-01-01', '2017-06-01']"], {}), "(['2017-01-01', '2017-06-01'])\n", (4732, 4762), False, 'from astropy.time import Time\n'), ((5266, 5338), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_0.lon', 'earth_hgs[0].lon'], {'atol': '(1e-12 * u.deg)'}), '(earth_hgs_0.lon, earth_hgs[0].lon, atol=1e-12 * u.deg)\n', (5283, 5338), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((5348, 5412), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_0.lat', 'earth_hgs[0].lat'], {'rtol': '(1e-10)'}), '(earth_hgs_0.lat, earth_hgs[0].lat, rtol=1e-10)\n', (5365, 5412), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((5424, 5494), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_0.radius', 'earth_hgs[0].radius'], {'rtol': '(1e-10)'}), '(earth_hgs_0.radius, earth_hgs[0].radius, rtol=1e-10)\n', (5441, 5494), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((5506, 5578), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_1.lon', 'earth_hgs[1].lon'], {'atol': '(1e-12 * u.deg)'}), '(earth_hgs_1.lon, earth_hgs[1].lon, atol=1e-12 * u.deg)\n', (5523, 5578), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((5588, 5652), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_1.lat', 'earth_hgs[1].lat'], {'rtol': '(1e-10)'}), '(earth_hgs_1.lat, earth_hgs[1].lat, rtol=1e-10)\n', (5605, 5652), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((5664, 5734), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hgs_1.radius', 'earth_hgs[1].radius'], {'rtol': '(1e-10)'}), '(earth_hgs_1.radius, earth_hgs[1].radius, rtol=1e-10)\n', (5681, 5734), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((6069, 6087), 'astropy.time.Time', 'Time', (['"""2013-01-28"""'], {}), "('2013-01-28')\n", (6073, 6087), False, 'from astropy.time import Time\n'), ((6104, 6208), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(0 * u.deg)', '(-5.73 * u.deg)', '(0.9848139 * u.AU)'], {'frame': 'HeliographicStonyhurst', 'obstime': 'obstime'}), '(0 * u.deg, -5.73 * u.deg, 0.9848139 * u.AU, frame=\n HeliographicStonyhurst, obstime=obstime)\n', (6112, 6208), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((6740, 6814), 'astropy.tests.helper.quantity_allclose', 'quantity_allclose', (['earth_hme.distance', '(0.9848139 * u.AU)'], {'atol': '(5e-07 * u.AU)'}), '(earth_hme.distance, 0.9848139 * u.AU, atol=5e-07 * u.AU)\n', (6757, 6814), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((6882, 6953), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(10 * u.deg)', 'lon': '(20 * u.deg)', 'obstime': 'obstime'}), '(lat=10 * u.deg, lon=20 * u.deg, obstime=obstime)\n', (6904, 6953), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((7028, 7075), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgsin.lat', 'hgcout.lat'], {}), '(hgsin.lat, hgcout.lat)\n', (7052, 7075), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((7226, 7273), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgsout.lat', 'hgsin.lat'], {}), '(hgsout.lat, hgsin.lat)\n', (7250, 7273), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((7278, 7325), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgsout.lon', 'hgsin.lon'], {}), '(hgsout.lon, hgsin.lon)\n', (7302, 7325), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((7814, 7864), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': '"""earth"""', 'obstime': 'obstime'}), "(observer='earth', obstime=obstime)\n", (7829, 7864), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((8074, 8133), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_cart.Tx', 'hpccoord_sph.Tx'], {}), '(hpccoord_cart.Tx, hpccoord_sph.Tx)\n', (8098, 8133), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((8138, 8197), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_cart.Ty', 'hpccoord_sph.Ty'], {}), '(hpccoord_cart.Ty, hpccoord_sph.Ty)\n', (8162, 8197), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((8202, 8273), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_cart.distance', 'hpccoord_sph.distance'], {}), '(hpccoord_cart.distance, hpccoord_sph.distance)\n', (8226, 8273), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((8762, 8809), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': '"""earth"""', 'obstime': 'obstime'}), "(observer='earth', obstime=obstime)\n", (8774, 8809), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((9019, 9076), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_cart.x', 'hcccoord_sph.x'], {}), '(hcccoord_cart.x, hcccoord_sph.x)\n', (9043, 9076), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((9081, 9138), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_cart.y', 'hcccoord_sph.y'], {}), '(hcccoord_cart.y, hcccoord_sph.y)\n', (9105, 9138), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((9143, 9200), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_cart.z', 'hcccoord_sph.z'], {}), '(hcccoord_cart.z, hcccoord_sph.z)\n', (9167, 9200), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((9952, 10013), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgccoord_cart.lat', 'hgccoord_sph.lat'], {}), '(hgccoord_cart.lat, hgccoord_sph.lat)\n', (9976, 10013), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((10018, 10079), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgccoord_cart.lon', 'hgccoord_sph.lon'], {}), '(hgccoord_cart.lon, hgccoord_sph.lon)\n', (10042, 10079), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((10084, 10151), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgccoord_cart.radius', 'hgccoord_sph.radius'], {}), '(hgccoord_cart.radius, hgccoord_sph.radius)\n', (10108, 10151), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((10394, 10457), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (10416, 10457), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((10471, 10527), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': 'L0', 'radius': 'D0'}), '(lat=0 * u.deg, lon=L0, radius=D0)\n', (10493, 10527), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((10542, 10575), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer_1'}), '(observer=observer_1)\n', (10554, 10575), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((10592, 10628), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer_2'}), '(observer=observer_2)\n', (10607, 10628), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((10644, 10693), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'x': 'rsun', 'y': 'rsun', 'z': 'rsun', 'frame': 'hcc_frame'}), '(x=rsun, y=rsun, z=rsun, frame=hcc_frame)\n', (10652, 10693), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((10844, 10907), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.Tx', 'hpccoord_expected.Tx'], {}), '(hpccoord_out.Tx, hpccoord_expected.Tx)\n', (10868, 10907), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((10912, 10975), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.Ty', 'hpccoord_expected.Ty'], {}), '(hpccoord_out.Ty, hpccoord_expected.Ty)\n', (10936, 10975), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((10980, 11055), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.distance', 'hpccoord_expected.distance'], {}), '(hpccoord_out.distance, hpccoord_expected.distance)\n', (11004, 11055), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((11298, 11361), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (11320, 11361), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((11375, 11431), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': 'L0', 'radius': 'D0'}), '(lat=0 * u.deg, lon=L0, radius=D0)\n', (11397, 11431), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((11446, 11479), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer_1'}), '(observer=observer_1)\n', (11458, 11479), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((11496, 11543), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer_2', 'rsun': 'rsun'}), '(observer=observer_2, rsun=rsun)\n', (11511, 11543), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((11559, 11618), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'Tx': '(0 * u.arcsec)', 'Ty': '(0 * u.arcsec)', 'frame': 'hpc_frame'}), '(Tx=0 * u.arcsec, Ty=0 * u.arcsec, frame=hpc_frame)\n', (11567, 11618), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((11765, 11826), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.x', 'hcccoord_expected.x'], {}), '(hcccoord_out.x, hcccoord_expected.x)\n', (11789, 11826), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((11831, 11892), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.y', 'hcccoord_expected.y'], {}), '(hcccoord_out.y, hcccoord_expected.y)\n', (11855, 11892), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((11897, 11958), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.z', 'hcccoord_expected.z'], {}), '(hcccoord_out.z, hcccoord_expected.z)\n', (11921, 11958), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((12123, 12186), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (12145, 12186), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((12199, 12230), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer'}), '(observer=observer)\n', (12211, 12230), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((12247, 12292), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer', 'rsun': 'rsun'}), '(observer=observer, rsun=rsun)\n', (12262, 12292), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((12308, 12357), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'x': 'rsun', 'y': 'rsun', 'z': 'rsun', 'frame': 'hcc_frame'}), '(x=rsun, y=rsun, z=rsun, frame=hcc_frame)\n', (12316, 12357), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((12508, 12571), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.Tx', 'hpccoord_expected.Tx'], {}), '(hpccoord_out.Tx, hpccoord_expected.Tx)\n', (12532, 12571), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((12576, 12639), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.Ty', 'hpccoord_expected.Ty'], {}), '(hpccoord_out.Ty, hpccoord_expected.Ty)\n', (12600, 12639), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((12644, 12719), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hpccoord_out.distance', 'hpccoord_expected.distance'], {}), '(hpccoord_out.distance, hpccoord_expected.distance)\n', (12668, 12719), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((12886, 12949), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'lat': '(0 * u.deg)', 'lon': '(0 * u.deg)', 'radius': 'D0'}), '(lat=0 * u.deg, lon=0 * u.deg, radius=D0)\n', (12908, 12949), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((12966, 12997), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer'}), '(observer=observer)\n', (12978, 12997), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13014, 13059), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'observer': 'observer', 'rsun': 'rsun'}), '(observer=observer, rsun=rsun)\n', (13029, 13059), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13075, 13134), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'Tx': '(0 * u.arcsec)', 'Ty': '(0 * u.arcsec)', 'frame': 'hpc_frame'}), '(Tx=0 * u.arcsec, Ty=0 * u.arcsec, frame=hpc_frame)\n', (13083, 13134), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((13285, 13346), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.x', 'hcccoord_expected.x'], {}), '(hcccoord_out.x, hcccoord_expected.x)\n', (13309, 13346), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((13351, 13412), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.y', 'hcccoord_expected.y'], {}), '(hcccoord_out.y, hcccoord_expected.y)\n', (13375, 13412), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((13417, 13478), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcccoord_out.z', 'hcccoord_expected.z'], {}), '(hcccoord_out.z, hcccoord_expected.z)\n', (13441, 13478), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((13626, 13680), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {}), '(0 * u.deg, 0 * u.deg, 1 * u.AU)\n', (13648, 13680), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13685, 13760), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.arcsec)', '(0 * u.arcsec)', '(0.5 * u.AU)'], {'observer': 'observer1'}), '(0 * u.arcsec, 0 * u.arcsec, 0.5 * u.AU, observer=observer1)\n', (13700, 13760), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13772, 13830), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(90 * u.deg)', '(0 * u.deg)', '(0.75 * u.AU)'], {}), '(90 * u.deg, 0 * u.deg, 0.75 * u.AU)\n', (13794, 13830), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13891, 13935), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcc.x', '(-0.5 * u.AU)'], {}), '(hcc.x, -0.5 * u.AU)\n', (13915, 13935), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((13938, 13998), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcc.y', '(0 * u.AU)'], {'atol': '(1e-10 * u.AU)'}), '(hcc.y, 0 * u.AU, atol=1e-10 * u.AU)\n', (13962, 13998), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((13999, 14059), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcc.z', '(0 * u.AU)'], {'atol': '(1e-10 * u.AU)'}), '(hcc.z, 0 * u.AU, atol=1e-10 * u.AU)\n', (14023, 14059), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((14131, 14149), 'astropy.time.Time', 'Time', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (14135, 14149), False, 'from astropy.time import Time\n'), ((14327, 14399), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(old.lon - 1 * u.deg)'], {'atol': '(0.1 * u.deg)'}), '(new.lon, old.lon - 1 * u.deg, atol=0.1 * u.deg)\n', (14351, 14399), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((14423, 14485), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {'atol': '(0.001 * u.deg)'}), '(new.lat, old.lat, atol=0.001 * u.deg)\n', (14447, 14485), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((14487, 14554), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.radius', 'old.radius'], {'atol': '(1e-05 * u.AU)'}), '(new.radius, old.radius, atol=1e-05 * u.AU)\n', (14511, 14554), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((14627, 14645), 'astropy.time.Time', 'Time', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (14631, 14645), False, 'from astropy.time import Time\n'), ((14823, 14895), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(75.815607 * u.deg)'], {'atol': '(1e-07 * u.deg)'}), '(new.lon, 75.815607 * u.deg, atol=1e-07 * u.deg)\n', (14847, 14895), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((15092, 15163), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(9.999963 * u.deg)'], {'atol': '(1e-07 * u.deg)'}), '(new.lat, 9.999963 * u.deg, atol=1e-07 * u.deg)\n', (15116, 15163), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((15165, 15237), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.radius', '(1.000009 * u.AU)'], {'atol': '(1e-07 * u.AU)'}), '(new.radius, 1.000009 * u.AU, atol=1e-07 * u.AU)\n', (15189, 15237), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((15318, 15394), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {'obstime': '"""2001-02-01"""'}), "(0 * u.deg, 0 * u.deg, 1 * u.AU, obstime='2001-02-01')\n", (15340, 15394), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((15404, 15498), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0.2 * u.AU)', '(0.3 * u.AU)', '(0.4 * u.AU)'], {'observer': 'observer', 'obstime': '"""2001-01-01"""'}), "(0.2 * u.AU, 0.3 * u.AU, 0.4 * u.AU, observer=observer, obstime\n ='2001-01-01')\n", (15416, 15498), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((15736, 15790), 'numpy.all', 'np.all', (['(from_hcc.cartesian.xyz != to_hcc.cartesian.xyz)'], {}), '(from_hcc.cartesian.xyz != to_hcc.cartesian.xyz)\n', (15742, 15790), True, 'import numpy as np\n'), ((15795, 15882), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['from_hcc.cartesian.xyz', 'to_hcc.cartesian.xyz'], {'rtol': '(0.002)'}), '(from_hcc.cartesian.xyz, to_hcc.cartesian.xyz, rtol\n =0.002)\n', (15819, 15882), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((15940, 16016), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, 1 * u.AU, obstime='2001-01-01')\n", (15962, 16016), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16027, 16103), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {'obstime': '"""2001-03-31"""'}), "(0 * u.deg, 0 * u.deg, 1 * u.AU, obstime='2001-03-31')\n", (16049, 16103), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16113, 16207), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0.2 * u.AU)', '(0.3 * u.AU)', '(0.4 * u.AU)'], {'observer': 'observer1', 'obstime': '"""2001-02-01"""'}), "(0.2 * u.AU, 0.3 * u.AU, 0.4 * u.AU, observer=observer1,\n obstime='2001-02-01')\n", (16125, 16207), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16379, 16438), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['to_hcc.x', '(-from_hcc.z)'], {'rtol': '(0.002)'}), '(to_hcc.x, -from_hcc.z, rtol=0.002)\n', (16403, 16438), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((16442, 16500), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['to_hcc.y', 'from_hcc.y'], {'rtol': '(0.002)'}), '(to_hcc.y, from_hcc.y, rtol=0.002)\n', (16466, 16500), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((16504, 16562), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['to_hcc.z', 'from_hcc.x'], {'rtol': '(0.002)'}), '(to_hcc.z, from_hcc.x, rtol=0.002)\n', (16528, 16562), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((16753, 16829), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, 1 * u.AU, obstime='2001-01-01')\n", (16775, 16829), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16921, 17020), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0.2 * u.AU)', '(0.3 * u.AU)', '(0.4 * u.AU)'], {'observer': 'observer1', 'obstime': 'observer1.obstime'}), '(0.2 * u.AU, 0.3 * u.AU, 0.4 * u.AU, observer=observer1,\n obstime=observer1.obstime)\n', (16933, 17020), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17098, 17197), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0.2 * u.AU)', '(0.3 * u.AU)', '(0.4 * u.AU)'], {'observer': 'observer2', 'obstime': 'observer1.obstime'}), '(0.2 * u.AU, 0.3 * u.AU, 0.4 * u.AU, observer=observer2,\n obstime=observer1.obstime)\n', (17110, 17197), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17268, 17312), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgs1.lon', 'hgs2.lon'], {}), '(hgs1.lon, hgs2.lon)\n', (17292, 17312), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((17317, 17361), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgs1.lat', 'hgs2.lat'], {}), '(hgs1.lat, hgs2.lat)\n', (17341, 17361), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((17366, 17416), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hgs1.radius', 'hgs2.radius'], {}), '(hgs1.radius, hgs2.radius)\n', (17390, 17416), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((17608, 17684), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)', '(1 * u.AU)'], {'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, 1 * u.AU, obstime='2001-01-01')\n", (17630, 17684), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17775, 17865), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(20 * u.deg)', '(40 * u.deg)', '(0.5 * u.AU)'], {'obstime': 'observer1.obstime'}), '(20 * u.deg, 40 * u.deg, 0.5 * u.AU, obstime=\n observer1.obstime)\n', (17797, 17865), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((18026, 18090), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['hcc1.cartesian.xyz', 'hcc2.cartesian.xyz'], {}), '(hcc1.cartesian.xyz, hcc2.cartesian.xyz)\n', (18050, 18090), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((18842, 18927), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(10.642778 * u.deg)'], {'atol': '(0.1 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 10.642778 * u.deg, atol=0.1 * u.arcsec,\n rtol=0)\n', (18866, 18927), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((18924, 18974), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.radius'], {}), '(new.distance, old.radius)\n', (18948, 18974), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((19442, 19527), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(10.640314 * u.deg)'], {'atol': '(0.1 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 10.640314 * u.deg, atol=0.1 * u.arcsec,\n rtol=0)\n', (19466, 19527), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((19524, 19574), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.radius'], {}), '(new.distance, old.radius)\n', (19548, 19574), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((20376, 20479), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(16.688242 * u.deg + 0.076 * u.deg)'], {'atol': '(0.01 * u.arcsec)', 'rtol': '(0)'}), '(new.lon, 16.688242 * u.deg + 0.076 * u.deg, atol=\n 0.01 * u.arcsec, rtol=0)\n', (20400, 20479), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((20473, 20515), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {}), '(new.lat, old.lat)\n', (20497, 20515), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((20520, 20568), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.radius', 'old.radius'], {}), '(new.radius, old.radius)\n', (20544, 20568), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((21325, 21391), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.x', '(800000.0 * u.km)'], {'atol': '(0.01 * u.km)'}), '(new.x, 800000.0 * u.km, atol=0.01 * u.km)\n', (21349, 21391), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((21393, 21460), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.y', '(908797.89 * u.km)'], {'atol': '(0.01 * u.km)'}), '(new.y, 908797.89 * u.km, atol=0.01 * u.km)\n', (21417, 21460), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((21461, 21528), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.z', '(688539.32 * u.km)'], {'atol': '(0.01 * u.km)'}), '(new.z, 688539.32 * u.km, atol=0.01 * u.km)\n', (21485, 21528), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((21848, 21892), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2019-12-01"""'}), "(obstime='2019-12-01')\n", (21870, 21892), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((22543, 22605), 'astropy.time.Time', 'Time', (["['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01']"], {}), "(['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01'])\n", (22547, 22605), False, 'from astropy.time import Time\n'), ((22621, 22666), 'astropy.coordinates.get_body_barycentric_posvel', 'get_body_barycentric_posvel', (['"""earth"""', 'obstime'], {}), "('earth', obstime)\n", (22648, 22666), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((22753, 22797), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['loc'], {'frame': '"""icrs"""', 'obstime': 'obstime'}), "(loc, frame='icrs', obstime=obstime)\n", (22761, 22797), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((23028, 23116), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.velocity.d_x', '(0 * u.km / u.s)'], {'atol': '(1e-15 * u.km / u.s)'}), '(new.velocity.d_x, 0 * u.km / u.s, atol=1e-15 * u.\n km / u.s)\n', (23052, 23116), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23108, 23196), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.velocity.d_y', '(0 * u.km / u.s)'], {'atol': '(1e-14 * u.km / u.s)'}), '(new.velocity.d_y, 0 * u.km / u.s, atol=1e-14 * u.\n km / u.s)\n', (23132, 23196), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23188, 23276), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.velocity.d_x', '(0 * u.km / u.s)'], {'atol': '(1e-15 * u.km / u.s)'}), '(new.velocity.d_x, 0 * u.km / u.s, atol=1e-15 * u.\n km / u.s)\n', (23212, 23276), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23322, 23373), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['newer.velocity.d_x', 'vel.x'], {}), '(newer.velocity.d_x, vel.x)\n', (23346, 23373), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23378, 23429), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['newer.velocity.d_y', 'vel.y'], {}), '(newer.velocity.d_y, vel.y)\n', (23402, 23429), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23434, 23485), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['newer.velocity.d_z', 'vel.z'], {}), '(newer.velocity.d_z, vel.z)\n', (23458, 23485), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((23590, 23652), 'astropy.time.Time', 'Time', (["['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01']"], {}), "(['2019-01-01', '2019-04-01', '2019-07-01', '2019-10-01'])\n", (23594, 23652), False, 'from astropy.time import Time\n'), ((24181, 24270), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new_vel.d_lon', '(-360 * u.deg / (27.27253 * u.day))'], {'rtol': '(0.01)'}), '(new_vel.d_lon, -360 * u.deg / (27.27253 * u.day),\n rtol=0.01)\n', (24205, 24270), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((24267, 24323), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new_vel.d_lat', '(0 * u.deg / u.s)'], {}), '(new_vel.d_lat, 0 * u.deg / u.s)\n', (24291, 24323), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((24324, 24414), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new_vel.d_distance', '(0 * u.km / u.s)'], {'atol': '(1e-07 * u.km / u.s)'}), '(new_vel.d_distance, 0 * u.km / u.s, atol=1e-07 * u\n .km / u.s)\n', (24348, 24414), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((25042, 25127), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(10.0003 * u.deg)'], {'atol': '(0.01 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 10.0003 * u.deg, atol=0.01 * u.arcsec, rtol=0\n )\n', (25066, 25127), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((25125, 25177), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {}), '(new.distance, old.distance)\n', (25149, 25177), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((25825, 25910), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(10.00007 * u.deg)'], {'atol': '(0.01 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 10.00007 * u.deg, atol=0.01 * u.arcsec,\n rtol=0)\n', (25849, 25910), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((25908, 25960), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {}), '(new.distance, old.distance)\n', (25932, 25960), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26036, 26054), 'astropy.time.Time', 'Time', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (26040, 26054), False, 'from astropy.time import Time\n'), ((26212, 26254), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', 'old.lon'], {}), '(new.lon, old.lon)\n', (26236, 26254), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26259, 26301), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {}), '(new.lat, old.lat)\n', (26283, 26301), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26306, 26358), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {}), '(new.distance, old.distance)\n', (26330, 26358), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26446, 26518), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(old.lon - 1 * u.deg)'], {'atol': '(0.1 * u.deg)'}), '(new.lon, old.lon - 1 * u.deg, atol=0.1 * u.deg)\n', (26470, 26518), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26542, 26605), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {'atol': '(0.5 * u.arcsec)'}), '(new.lat, old.lat, atol=0.5 * u.arcsec)\n', (26566, 26605), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((26608, 26672), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {'rtol': '(1e-05)'}), '(new.distance, old.distance, rtol=1e-05)\n', (26632, 26672), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27138, 27224), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(32.777377 * u.deg)'], {'atol': '(0.01 * u.arcsec)', 'rtol': '(0)'}), '(new.lon, 32.777377 * u.deg, atol=0.01 * u.arcsec,\n rtol=0)\n', (27162, 27224), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27221, 27307), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(15.594639 * u.deg)'], {'atol': '(0.01 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 15.594639 * u.deg, atol=0.01 * u.arcsec,\n rtol=0)\n', (27245, 27307), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27304, 27361), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', '(0.45215884 * u.AU)'], {}), '(new.distance, 0.45215884 * u.AU)\n', (27328, 27361), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27599, 27641), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', 'old.lon'], {}), '(new.lon, old.lon)\n', (27623, 27641), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27646, 27688), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {}), '(new.lat, old.lat)\n', (27670, 27688), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((27693, 27745), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {}), '(new.distance, old.distance)\n', (27717, 27745), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28284, 28370), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', '(-65.736793 * u.deg)'], {'atol': '(0.5 * u.arcsec)', 'rtol': '(0)'}), '(new.lon, -65.736793 * u.deg, atol=0.5 * u.arcsec,\n rtol=0)\n', (28308, 28370), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28367, 28409), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {}), '(new.lat, old.lat)\n', (28391, 28409), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28414, 28464), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.radius'], {}), '(new.distance, old.radius)\n', (28438, 28464), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28540, 28558), 'astropy.time.Time', 'Time', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (28544, 28558), False, 'from astropy.time import Time\n'), ((28707, 28749), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', 'old.lon'], {}), '(new.lon, old.lon)\n', (28731, 28749), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28754, 28796), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {}), '(new.lat, old.lat)\n', (28778, 28796), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28801, 28853), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {}), '(new.distance, old.distance)\n', (28825, 28853), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((28936, 28996), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lon', 'old.lon'], {'atol': '(0.1 * u.deg)'}), '(new.lon, old.lon, atol=0.1 * u.deg)\n', (28960, 28996), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((29022, 29084), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', 'old.lat'], {'atol': '(0.001 * u.deg)'}), '(new.lat, old.lat, atol=0.001 * u.deg)\n', (29046, 29084), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((29086, 29157), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', 'old.distance'], {'atol': '(1e-05 * u.AU)'}), '(new.distance, old.distance, atol=1e-05 * u.AU)\n', (29110, 29157), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((29819, 29905), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(28.830109 * u.deg)'], {'atol': '(0.05 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 28.830109 * u.deg, atol=0.05 * u.arcsec,\n rtol=0)\n', (29843, 29905), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((29902, 29957), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', '(1.819721 * u.AU)'], {}), '(new.distance, 1.819721 * u.AU)\n', (29926, 29957), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((30566, 30651), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.lat', '(28.82775 * u.deg)'], {'atol': '(0.05 * u.arcsec)', 'rtol': '(0)'}), '(new.lat, 28.82775 * u.deg, atol=0.05 * u.arcsec,\n rtol=0)\n', (30590, 30651), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((30649, 30705), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.distance', '(1.8217103 * u.AU)'], {}), '(new.distance, 1.8217103 * u.AU)\n', (30673, 30705), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((30820, 30859), 'astropy.time.Time', 'Time', (['"""1996-08-28 16:46:00"""'], {'scale': '"""tt"""'}), "('1996-08-28 16:46:00', scale='tt')\n", (30824, 30859), False, 'from astropy.time import Time\n'), ((30876, 30955), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['([-5.7840451, -4.1082375, 1.9146822] * (6378.14 * u.km))'], {}), '([-5.7840451, -4.1082375, 1.9146822] * (6378.14 * u.km))\n', (30899, 30955), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((30966, 31045), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['([-5.7864918, -4.1039136, 1.9165612] * (6378.14 * u.km))'], {}), '([-5.7864918, -4.1039136, 1.9165612] * (6378.14 * u.km))\n', (30989, 31045), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((31210, 31254), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['new.xyz', 'gei_d.xyz'], {}), '(new.xyz, gei_d.xyz)\n', (31234, 31254), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((32888, 33022), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['([10] * 2)', '([10] * 2)'], {'unit': 'u.deg', 'observer': '"""earth"""', 'obstime': "['2019-01-01', '2019-01-02']", 'frame': '"""heliographic_carrington"""'}), "([10] * 2, [10] * 2, unit=u.deg, observer='earth', obstime=[\n '2019-01-01', '2019-01-02'], frame='heliographic_carrington')\n", (32896, 33022), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((33607, 33625), 'astropy.time.Time', 'Time', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (33611, 33625), False, 'from astropy.time import Time\n'), ((34349, 34402), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result1.lon', 'sun_center.lon'], {}), '(result1.lon, sun_center.lon)\n', (34373, 34402), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((34407, 34460), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result1.lat', 'sun_center.lat'], {}), '(result1.lat, sun_center.lat)\n', (34431, 34460), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((34465, 34524), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result1.radius', 'sun_center.radius'], {}), '(result1.radius, sun_center.radius)\n', (34489, 34524), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((34867, 34915), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result2.lat', 'other.lat'], {}), '(result2.lat, other.lat)\n', (34891, 34915), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((34920, 34974), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result2.radius', 'other.radius'], {}), '(result2.radius, other.radius)\n', (34944, 34974), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((35263, 35305), 'sunpy.coordinates.HeliocentricInertial', 'HeliocentricInertial', ([], {'obstime': '"""2001-02-01"""'}), "(obstime='2001-02-01')\n", (35283, 35305), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((35739, 35792), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result2.lon', 'sun_center.lon'], {}), '(result2.lon, sun_center.lon)\n', (35763, 35792), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((35797, 35850), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result2.lat', 'sun_center.lat'], {}), '(result2.lat, sun_center.lat)\n', (35821, 35850), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((35855, 35916), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result2.distance', 'sun_center.radius'], {}), '(result2.distance, sun_center.radius)\n', (35879, 35916), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((36070, 36120), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result3.lon', 'result1.lon'], {}), '(result3.lon, result1.lon)\n', (36094, 36120), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((36125, 36175), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result3.lat', 'result1.lat'], {}), '(result3.lat, result1.lat)\n', (36149, 36175), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((36180, 36240), 'astropy.tests.helper.assert_quantity_allclose', 'assert_quantity_allclose', (['result3.distance', 'result1.distance'], {}), '(result3.distance, result1.distance)\n', (36204, 36240), False, 'from astropy.tests.helper import quantity_allclose, assert_quantity_allclose\n'), ((2567, 2577), 'numpy.tan', 'np.tan', (['L0'], {}), '(L0)\n', (2573, 2577), True, 'import numpy as np\n'), ((6314, 6355), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'equinox': 'obstime'}), '(equinox=obstime)\n', (6338, 6355), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((6982, 7021), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (7004, 7021), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((7180, 7219), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (7202, 7219), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((9821, 9860), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (9843, 9860), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((9907, 9946), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (9929, 9946), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((13852, 13884), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer2'}), '(observer=observer2)\n', (13864, 13884), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((14271, 14322), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '(obstime + 1 * u.day)'}), '(obstime=obstime + 1 * u.day)\n', (14293, 14322), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((14767, 14818), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': '(obstime + 1 * u.day)'}), '(obstime=obstime + 1 * u.day)\n', (14789, 14818), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((15523, 15576), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer', 'obstime': '"""2001-03-31"""'}), "(observer=observer, obstime='2001-03-31')\n", (15535, 15576), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16233, 16287), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer2', 'obstime': '"""2001-02-01"""'}), "(observer=observer2, obstime='2001-02-01')\n", (16245, 16287), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((16863, 16907), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-03-31"""'}), "(obstime='2001-03-31')\n", (16885, 16907), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17040, 17084), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'hcc1.obstime'}), '(obstime=hcc1.obstime)\n', (17062, 17084), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17217, 17261), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'hcc2.obstime'}), '(obstime=hcc2.obstime)\n', (17239, 17261), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17718, 17762), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-03-31"""'}), "(obstime='2001-03-31')\n", (17740, 17762), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17883, 17936), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer1', 'obstime': 'hgs.obstime'}), '(observer=observer1, obstime=hgs.obstime)\n', (17895, 17936), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((17966, 18019), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': 'observer2', 'obstime': 'hgs.obstime'}), '(observer=observer2, obstime=hgs.obstime)\n', (17978, 18019), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((18782, 18811), 'astropy.coordinates.Longitude', 'Longitude', (['(-108.65371 * u.deg)'], {}), '(-108.65371 * u.deg)\n', (18791, 18811), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((19295, 19341), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'equinox': '"""2019-06-01"""'}), "(equinox='2019-06-01')\n", (19319, 19341), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((19382, 19410), 'astropy.coordinates.Longitude', 'Longitude', (['(-108.3824 * u.deg)'], {}), '(-108.3824 * u.deg)\n', (19391, 19410), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((21143, 21205), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['([700000.0, 800000.0, 900000.0] * u.km)'], {}), '([700000.0, 800000.0, 900000.0] * u.km)\n', (21166, 21205), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((21288, 21318), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'observer': '"""earth"""'}), "(observer='earth')\n", (21300, 21318), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((23663, 23695), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['(1)', '(0)', '(0)'], {}), '(1, 0, 0)\n', (23686, 23695), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((23843, 23887), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['loc'], {'obstime': 'obstime'}), '(loc, obstime=obstime)\n', (23865, 23887), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((24982, 25009), 'astropy.coordinates.Longitude', 'Longitude', (['(110.0161 * u.deg)'], {}), '(110.0161 * u.deg)\n', (24991, 25009), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((25765, 25793), 'astropy.coordinates.Longitude', 'Longitude', (['(109.74535 * u.deg)'], {}), '(109.74535 * u.deg)\n', (25774, 25793), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((26387, 26441), 'sunpy.coordinates.HeliocentricEarthEcliptic', 'HeliocentricEarthEcliptic', ([], {'obstime': '(obstime + 1 * u.day)'}), '(obstime=obstime + 1 * u.day)\n', (26412, 26441), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((28882, 28931), 'sunpy.coordinates.HeliocentricInertial', 'HeliocentricInertial', ([], {'obstime': '(obstime + 1 * u.day)'}), '(obstime=obstime + 1 * u.day)\n', (28902, 28931), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((29759, 29787), 'astropy.coordinates.Longitude', 'Longitude', (['(95.230617 * u.deg)'], {}), '(95.230617 * u.deg)\n', (29768, 29787), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((30424, 30465), 'sunpy.coordinates.GeocentricEarthEquatorial', 'GeocentricEarthEquatorial', ([], {'equinox': '_J2000'}), '(equinox=_J2000)\n', (30449, 30465), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((30506, 30533), 'astropy.coordinates.Longitude', 'Longitude', (['(95.07903 * u.deg)'], {}), '(95.07903 * u.deg)\n', (30515, 30533), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((31385, 31442), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0 * u.km)', '(0 * u.km)', '(0 * u.km)'], {'observer': 'None'}), '(0 * u.km, 0 * u.km, 0 * u.km, observer=None)\n', (31397, 31442), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31455, 31534), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0 * u.km)', '(0 * u.km)', '(0 * u.km)'], {'observer': 'None', 'obstime': '"""2001-01-01"""'}), "(0 * u.km, 0 * u.km, 0 * u.km, observer=None, obstime='2001-01-01')\n", (31467, 31534), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31547, 31599), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.deg)', '(0 * u.deg)'], {'observer': 'None'}), '(0 * u.deg, 0 * u.deg, observer=None)\n', (31562, 31599), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31614, 31688), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.deg)', '(0 * u.deg)'], {'observer': 'None', 'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, observer=None, obstime='2001-01-01')\n", (31629, 31688), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((33179, 33232), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'obstime': "['2019-01-03', '2019-01-04']"}), "(obstime=['2019-01-03', '2019-01-04'])\n", (33194, 33232), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((34171, 34198), 'sunpy.coordinates.transformations.transform_with_sun_center', 'transform_with_sun_center', ([], {}), '()\n', (34196, 34198), False, 'from sunpy.coordinates.transformations import transform_with_sun_center\n'), ((34658, 34685), 'sunpy.coordinates.transformations.transform_with_sun_center', 'transform_with_sun_center', ([], {}), '()\n', (34683, 34685), False, 'from sunpy.coordinates.transformations import transform_with_sun_center\n'), ((35653, 35680), 'sunpy.coordinates.transformations.transform_with_sun_center', 'transform_with_sun_center', ([], {}), '()\n', (35678, 35680), False, 'from sunpy.coordinates.transformations import transform_with_sun_center\n'), ((4008, 4044), 'astropy.coordinates.get_body_barycentric', 'get_body_barycentric', (['"""earth"""', 'adate'], {}), "('earth', adate)\n", (4028, 4044), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((4789, 4825), 'astropy.coordinates.get_body_barycentric', 'get_body_barycentric', (['"""earth"""', 'times'], {}), "('earth', times)\n", (4809, 4825), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((6596, 6618), 'astropy.coordinates.Angle', 'Angle', (['"""308d13m30.51s"""'], {}), "('308d13m30.51s')\n", (6601, 6618), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((6694, 6709), 'astropy.coordinates.Angle', 'Angle', (['"""-0.27s"""'], {}), "('-0.27s')\n", (6699, 6709), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((7117, 7132), 'sunpy.coordinates.sun.L0', 'sun.L0', (['obstime'], {}), '(obstime)\n', (7123, 7132), False, 'from sunpy.coordinates import sun\n'), ((7695, 7734), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (7717, 7734), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((8643, 8682), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (8665, 8682), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((9570, 9609), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (9592, 9609), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((14203, 14242), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (14225, 14242), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((14699, 14738), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (14721, 14738), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((18644, 18688), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (18666, 18688), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((20287, 20331), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (20309, 20331), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((21215, 21259), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (21237, 21259), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((21779, 21834), 'sunpy.coordinates.Helioprojective', 'Helioprojective', ([], {'obstime': '"""2019-06-01"""', 'observer': '"""earth"""'}), "(obstime='2019-06-01', observer='earth')\n", (21794, 21834), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((23711, 23741), 'astropy.coordinates.CartesianDifferential', 'CartesianDifferential', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (23732, 23741), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((24841, 24887), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (24865, 24887), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((25525, 25593), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'obstime': '"""2019-06-01"""', 'equinox': '"""2019-06-01"""'}), "(obstime='2019-06-01', equinox='2019-06-01')\n", (25549, 25593), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((26108, 26150), 'sunpy.coordinates.HeliocentricEarthEcliptic', 'HeliocentricEarthEcliptic', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (26133, 26150), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((27046, 27093), 'sunpy.coordinates.HeliocentricEarthEcliptic', 'HeliocentricEarthEcliptic', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (27071, 27093), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((27495, 27540), 'sunpy.coordinates.GeocentricSolarEcliptic', 'GeocentricSolarEcliptic', ([], {'obstime': '"""2001-01-01"""'}), "(obstime='2001-01-01')\n", (27518, 27540), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((28184, 28228), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (28206, 28228), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((28614, 28651), 'sunpy.coordinates.HeliocentricInertial', 'HeliocentricInertial', ([], {'obstime': 'obstime'}), '(obstime=obstime)\n', (28634, 28651), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((29618, 29664), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'obstime': '"""2019-06-01"""'}), "(obstime='2019-06-01')\n", (29642, 29664), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((30327, 30395), 'astropy.coordinates.HeliocentricTrueEcliptic', 'HeliocentricMeanEcliptic', ([], {'obstime': '"""2019-06-01"""', 'equinox': '"""2019-06-01"""'}), "(obstime='2019-06-01', equinox='2019-06-01')\n", (30351, 30395), True, 'from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic\n'), ((31081, 31117), 'sunpy.coordinates.GeocentricEarthEquatorial', 'GeocentricEarthEquatorial', ([], {'obstime': 't'}), '(obstime=t)\n', (31106, 31117), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31146, 31193), 'sunpy.coordinates.GeocentricEarthEquatorial', 'GeocentricEarthEquatorial', ([], {'equinox': 't', 'obstime': 't'}), '(equinox=t, obstime=t)\n', (31171, 31193), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31725, 31783), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)'], {'obstime': 'None'}), '(0 * u.deg, 0 * u.deg, obstime=None)\n', (31747, 31783), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31789, 31855), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', (['(0 * u.deg)', '(0 * u.deg)'], {'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, obstime='2001-01-01')\n", (31811, 31855), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31861, 31940), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0 * u.km)', '(0 * u.km)', '(0 * u.km)'], {'observer': 'None', 'obstime': '"""2012-12-12"""'}), "(0 * u.km, 0 * u.km, 0 * u.km, observer=None, obstime='2012-12-12')\n", (31873, 31940), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((31944, 32018), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0 * u.km)', '(0 * u.km)', '(0 * u.km)'], {'observer': '"""earth"""', 'obstime': 'None'}), "(0 * u.km, 0 * u.km, 0 * u.km, observer='earth', obstime=None)\n", (31956, 32018), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((32022, 32109), 'sunpy.coordinates.Heliocentric', 'Heliocentric', (['(0 * u.km)', '(0 * u.km)', '(0 * u.km)'], {'observer': '"""earth"""', 'obstime': '"""2001-01-01"""'}), "(0 * u.km, 0 * u.km, 0 * u.km, observer='earth', obstime=\n '2001-01-01')\n", (32034, 32109), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((32108, 32182), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.deg)', '(0 * u.deg)'], {'observer': 'None', 'obstime': '"""2012-12-12"""'}), "(0 * u.deg, 0 * u.deg, observer=None, obstime='2012-12-12')\n", (32123, 32182), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((32188, 32257), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.deg)', '(0 * u.deg)'], {'observer': '"""earth"""', 'obstime': 'None'}), "(0 * u.deg, 0 * u.deg, observer='earth', obstime=None)\n", (32203, 32257), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((32263, 32340), 'sunpy.coordinates.Helioprojective', 'Helioprojective', (['(0 * u.deg)', '(0 * u.deg)'], {'observer': '"""earth"""', 'obstime': '"""2001-01-01"""'}), "(0 * u.deg, 0 * u.deg, observer='earth', obstime='2001-01-01')\n", (32278, 32340), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((34115, 34159), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-01-01"""'}), "(obstime='2001-01-01')\n", (34137, 34159), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((34242, 34286), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-02-01"""'}), "(obstime='2001-02-01')\n", (34264, 34286), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((34602, 34646), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-01-01"""'}), "(obstime='2001-01-01')\n", (34624, 34646), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((34724, 34768), 'sunpy.coordinates.HeliographicCarrington', 'HeliographicCarrington', ([], {'obstime': '"""2001-02-01"""'}), "(obstime='2001-02-01')\n", (34746, 34768), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((35201, 35245), 'sunpy.coordinates.HeliographicStonyhurst', 'HeliographicStonyhurst', ([], {'obstime': '"""2001-01-01"""'}), "(obstime='2001-01-01')\n", (35223, 35245), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((21970, 22023), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'obstime': 'start.obstime', 'observer': '"""earth"""'}), "(obstime=start.obstime, observer='earth')\n", (21982, 22023), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((22090, 22143), 'sunpy.coordinates.Heliocentric', 'Heliocentric', ([], {'obstime': 'frame.obstime', 'observer': '"""earth"""'}), "(obstime=frame.obstime, observer='earth')\n", (22102, 22143), False, 'from sunpy.coordinates import Helioprojective, HeliographicStonyhurst, HeliographicCarrington, Heliocentric, HeliocentricEarthEcliptic, GeocentricSolarEcliptic, HeliocentricInertial, GeocentricEarthEquatorial, get_earth\n'), ((32596, 32623), 'pytest.raises', 'pytest.raises', (['ConvertError'], {}), '(ConvertError)\n', (32609, 32623), False, 'import pytest\n'), ((32678, 32705), 'pytest.raises', 'pytest.raises', (['ConvertError'], {}), '(ConvertError)\n', (32691, 32705), False, 'import pytest\n'), ((33696, 33728), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (33719, 33728), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n'), ((33842, 33874), 'astropy.coordinates.CartesianRepresentation', 'CartesianRepresentation', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (33865, 33874), False, 'from astropy.coordinates import SkyCoord, get_body_barycentric, Angle, ConvertError, Longitude, CartesianRepresentation, get_body_barycentric_posvel, CartesianDifferential, SphericalDifferential\n')]
|
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import copy
import awkward as ak
from awkward._v2.contents.content import Content
from awkward._v2.forms.unmaskedform import UnmaskedForm
from awkward._v2.forms.form import _parameters_equal
np = ak.nplike.NumpyMetadata.instance()
class UnmaskedArray(Content):
is_OptionType = True
def __init__(self, content, identifier=None, parameters=None, nplike=None):
if not isinstance(content, Content):
raise ak._v2._util.error(
TypeError(
"{} 'content' must be a Content subtype, not {}".format(
type(self).__name__, repr(content)
)
)
)
if nplike is None:
nplike = content.nplike
self._content = content
self._init(identifier, parameters, nplike)
@property
def content(self):
return self._content
Form = UnmaskedForm
def _form_with_key(self, getkey):
form_key = getkey(self)
return self.Form(
self._content._form_with_key(getkey),
has_identifier=self._identifier is not None,
parameters=self._parameters,
form_key=form_key,
)
def _to_buffers(self, form, getkey, container, nplike):
assert isinstance(form, self.Form)
self._content._to_buffers(form.content, getkey, container, nplike)
@property
def typetracer(self):
tt = ak._v2._typetracer.TypeTracer.instance()
return UnmaskedArray(
self._content.typetracer,
self._typetracer_identifier(),
self._parameters,
tt,
)
@property
def length(self):
return self._content.length
def _forget_length(self):
return UnmaskedArray(
self._content._forget_length(),
self._identifier,
self._parameters,
self._nplike,
)
def __repr__(self):
return self._repr("", "", "")
def _repr(self, indent, pre, post):
out = [indent, pre, "<UnmaskedArray len="]
out.append(repr(str(self.length)))
out.append(">")
out.extend(self._repr_extra(indent + " "))
out.append("\n")
out.append(self._content._repr(indent + " ", "<content>", "</content>\n"))
out.append(indent + "</UnmaskedArray>")
out.append(post)
return "".join(out)
def merge_parameters(self, parameters):
return UnmaskedArray(
self._content,
self._identifier,
ak._v2._util.merge_parameters(self._parameters, parameters),
self._nplike,
)
def toByteMaskedArray(self):
return ak._v2.contents.bytemaskedarray.ByteMaskedArray(
ak._v2.index.Index8(
self.mask_as_bool(valid_when=True).view(np.int8), nplike=self.nplike
),
self._content,
True,
self._identifier,
self._parameters,
self._nplike,
)
def toIndexedOptionArray64(self):
arange = self._nplike.index_nplike.arange(self._content.length, dtype=np.int64)
return ak._v2.contents.indexedoptionarray.IndexedOptionArray(
ak._v2.index.Index64(arange, nplike=self.nplike),
self._content,
self._identifier,
self._parameters,
self._nplike,
)
def mask_as_bool(self, valid_when=True, nplike=None):
if nplike is None:
nplike = self._nplike
if valid_when:
return nplike.index_nplike.ones(self._content.length, dtype=np.bool_)
else:
return nplike.index_nplike.zeros(self._content.length, dtype=np.bool_)
def _getitem_nothing(self):
return self._content._getitem_range(slice(0, 0))
def _getitem_at(self, where):
if not self._nplike.known_data:
return ak._v2._typetracer.MaybeNone(self._content._getitem_at(where))
return self._content._getitem_at(where)
def _getitem_range(self, where):
if not self._nplike.known_shape:
return self
start, stop, step = where.indices(self.length)
assert step == 1
return UnmaskedArray(
self._content._getitem_range(slice(start, stop)),
self._range_identifier(start, stop),
self._parameters,
self._nplike,
)
def _getitem_field(self, where, only_fields=()):
return UnmaskedArray(
self._content._getitem_field(where, only_fields),
self._field_identifier(where),
None,
self._nplike,
)
def _getitem_fields(self, where, only_fields=()):
return UnmaskedArray(
self._content._getitem_fields(where, only_fields),
self._fields_identifier(where),
None,
self._nplike,
)
def _carry(self, carry, allow_lazy):
return UnmaskedArray(
self._content._carry(carry, allow_lazy),
self._carry_identifier(carry),
self._parameters,
self._nplike,
)
def _getitem_next_jagged(self, slicestarts, slicestops, slicecontent, tail):
return UnmaskedArray(
self._content._getitem_next_jagged(
slicestarts, slicestops, slicecontent, tail
),
self._identifier,
self._parameters,
self._nplike,
)
def _getitem_next(self, head, tail, advanced):
if head == ():
return self
elif isinstance(
head, (int, slice, ak._v2.index.Index64, ak._v2.contents.ListOffsetArray)
):
return UnmaskedArray(
self._content._getitem_next(head, tail, advanced),
self._identifier,
self._parameters,
self._nplike,
).simplify_optiontype()
elif ak._util.isstr(head):
return self._getitem_next_field(head, tail, advanced)
elif isinstance(head, list):
return self._getitem_next_fields(head, tail, advanced)
elif head is np.newaxis:
return self._getitem_next_newaxis(tail, advanced)
elif head is Ellipsis:
return self._getitem_next_ellipsis(tail, advanced)
elif isinstance(head, ak._v2.contents.IndexedOptionArray):
return self._getitem_next_missing(head, tail, advanced)
else:
raise ak._v2._util.error(AssertionError(repr(head)))
def project(self, mask=None):
if mask is not None:
return ak._v2.contents.bytemaskedarray.ByteMaskedArray(
mask,
self._content,
False,
None,
self._parameters,
self._nplike,
).project()
else:
return self._content
def simplify_optiontype(self):
if isinstance(
self._content,
(
ak._v2.contents.indexedarray.IndexedArray,
ak._v2.contents.indexedoptionarray.IndexedOptionArray,
ak._v2.contents.bytemaskedarray.ByteMaskedArray,
ak._v2.contents.bitmaskedarray.BitMaskedArray,
ak._v2.contents.unmaskedarray.UnmaskedArray,
),
):
return self._content
else:
return self
def num(self, axis, depth=0):
posaxis = self.axis_wrap_if_negative(axis)
if posaxis == depth:
out = self.length
if ak._v2._util.isint(out):
return np.int64(out)
else:
return out
else:
return ak._v2.contents.unmaskedarray.UnmaskedArray(
self._content.num(posaxis, depth), None, self._parameters, self._nplike
)
def _offsets_and_flattened(self, axis, depth):
posaxis = self.axis_wrap_if_negative(axis)
if posaxis == depth:
raise ak._v2._util.error(np.AxisError("axis=0 not allowed for flatten"))
else:
offsets, flattened = self._content._offsets_and_flattened(posaxis, depth)
if offsets.length == 0:
return (
offsets,
UnmaskedArray(flattened, None, self._parameters, self._nplike),
)
else:
return (offsets, flattened)
def mergeable(self, other, mergebool):
if not _parameters_equal(self._parameters, other._parameters):
return False
if isinstance(
other,
(
ak._v2.contents.emptyarray.EmptyArray,
ak._v2.contents.unionarray.UnionArray,
),
):
return True
if isinstance(
other,
(
ak._v2.contents.indexedarray.IndexedArray,
ak._v2.contents.indexedoptionarray.IndexedOptionArray,
ak._v2.contents.bytemaskedarray.ByteMaskedArray,
ak._v2.contents.bitmaskedarray.BitMaskedArray,
ak._v2.contents.unmaskedarray.UnmaskedArray,
),
):
return self._content.mergeable(other.content, mergebool)
else:
return self._content.mergeable(other, mergebool)
def _reverse_merge(self, other):
return self.toIndexedOptionArray64()._reverse_merge(other)
def mergemany(self, others):
if len(others) == 0:
return self
return self.toIndexedOptionArray64().mergemany(others)
def fill_none(self, value):
return self._content.fill_none(value)
def _local_index(self, axis, depth):
posaxis = self.axis_wrap_if_negative(axis)
if posaxis == depth:
return self._local_index_axis0()
else:
return UnmaskedArray(
self._content._local_index(posaxis, depth),
self._identifier,
self._parameters,
self._nplike,
)
def numbers_to_type(self, name):
return ak._v2.contents.unmaskedarray.UnmaskedArray(
self._content.numbers_to_type(name),
self._identifier,
self._parameters,
self._nplike,
)
def _is_unique(self, negaxis, starts, parents, outlength):
if self._content.length == 0:
return True
return self._content._is_unique(negaxis, starts, parents, outlength)
def _unique(self, negaxis, starts, parents, outlength):
if self._content.length == 0:
return self
return self._content._unique(negaxis, starts, parents, outlength)
def _argsort_next(
self,
negaxis,
starts,
shifts,
parents,
outlength,
ascending,
stable,
kind,
order,
):
out = self._content._argsort_next(
negaxis,
starts,
shifts,
parents,
outlength,
ascending,
stable,
kind,
order,
)
if isinstance(out, ak._v2.contents.RegularArray):
tmp = ak._v2.contents.UnmaskedArray(
out._content,
None,
None,
self._nplike,
).simplify_optiontype()
return ak._v2.contents.RegularArray(
tmp,
out._size,
out._length,
None,
None,
self._nplike,
)
else:
return out
def _sort_next(
self, negaxis, starts, parents, outlength, ascending, stable, kind, order
):
out = self._content._sort_next(
negaxis,
starts,
parents,
outlength,
ascending,
stable,
kind,
order,
)
if isinstance(out, ak._v2.contents.RegularArray):
tmp = ak._v2.contents.UnmaskedArray(
out._content,
self._identifier,
self._parameters,
self._nplike,
).simplify_optiontype()
return ak._v2.contents.RegularArray(
tmp,
out._size,
out._length,
self._identifier,
self._parameters,
self._nplike,
)
else:
return out
def _combinations(self, n, replacement, recordlookup, parameters, axis, depth):
posaxis = self.axis_wrap_if_negative(axis)
if posaxis == depth:
return self._combinations_axis0(n, replacement, recordlookup, parameters)
else:
return ak._v2.contents.unmaskedarray.UnmaskedArray(
self._content._combinations(
n, replacement, recordlookup, parameters, posaxis, depth
),
self._identifier,
self._parameters,
self._nplike,
)
def _reduce_next(
self,
reducer,
negaxis,
starts,
shifts,
parents,
outlength,
mask,
keepdims,
):
next = self._content
if isinstance(next, ak._v2.contents.RegularArray):
next = next.toListOffsetArray64(True)
return next._reduce_next(
reducer,
negaxis,
starts,
shifts,
parents,
outlength,
mask,
keepdims,
)
def _validity_error(self, path):
if isinstance(
self._content,
(
ak._v2.contents.bitmaskedarray.BitMaskedArray,
ak._v2.contents.bytemaskedarray.ByteMaskedArray,
ak._v2.contents.indexedarray.IndexedArray,
ak._v2.contents.indexedoptionarray.IndexedOptionArray,
ak._v2.contents.unmaskedarray.UnmaskedArray,
),
):
return "{0} contains \"{1}\", the operation that made it might have forgotten to call 'simplify_optiontype()'"
else:
return self._content.validity_error(path + ".content")
def _nbytes_part(self):
result = self.content._nbytes_part()
if self.identifier is not None:
result = result + self.identifier._nbytes_part()
return result
def _pad_none(self, target, axis, depth, clip):
posaxis = self.axis_wrap_if_negative(axis)
if posaxis == depth:
return self.pad_none_axis0(target, clip)
elif posaxis == depth + 1:
return self._content._pad_none(target, posaxis, depth, clip)
else:
return ak._v2.contents.unmaskedarray.UnmaskedArray(
self._content._pad_none(target, posaxis, depth, clip),
None,
self._parameters,
self._nplike,
)
def _to_arrow(self, pyarrow, mask_node, validbytes, length, options):
return self._content._to_arrow(pyarrow, self, None, length, options)
def _to_numpy(self, allow_missing):
content = ak._v2.operations.to_numpy(self.content, allow_missing=allow_missing)
if allow_missing:
return self._nplike.ma.MaskedArray(content)
else:
return content
def _completely_flatten(self, nplike, options):
return self.project()._completely_flatten(nplike, options)
def _recursively_apply(
self, action, depth, depth_context, lateral_context, options
):
if options["return_array"]:
def continuation():
return UnmaskedArray(
self._content._recursively_apply(
action,
depth,
copy.copy(depth_context),
lateral_context,
options,
),
self._identifier,
self._parameters if options["keep_parameters"] else None,
self._nplike,
)
else:
def continuation():
self._content._recursively_apply(
action,
depth,
copy.copy(depth_context),
lateral_context,
options,
)
result = action(
self,
depth=depth,
depth_context=depth_context,
lateral_context=lateral_context,
continuation=continuation,
options=options,
)
if isinstance(result, Content):
return result
elif result is None:
return continuation()
else:
raise ak._v2._util.error(AssertionError(result))
def packed(self):
return UnmaskedArray(
self._content.packed(), self._identifier, self._parameters, self._nplike
)
def _to_list(self, behavior, json_conversions):
out = self._to_list_custom(behavior, json_conversions)
if out is not None:
return out
return self._content._to_list(behavior, json_conversions)
def _to_nplike(self, nplike):
content = self._content._to_nplike(nplike)
return UnmaskedArray(
content,
identifier=self.identifier,
parameters=self.parameters,
nplike=nplike,
)
def _layout_equal(self, other, index_dtype=True, numpyarray=True):
return self.content.layout_equal(other.content, index_dtype, numpyarray)
|
[
"awkward._v2._typetracer.TypeTracer.instance",
"awkward._v2._util.isint",
"awkward._v2.operations.to_numpy",
"awkward._v2._util.merge_parameters",
"awkward._util.isstr",
"awkward._v2.index.Index64",
"copy.copy",
"awkward._v2.contents.RegularArray",
"awkward._v2.contents.bytemaskedarray.ByteMaskedArray",
"awkward._v2.forms.form._parameters_equal",
"awkward._v2.contents.UnmaskedArray",
"awkward.nplike.NumpyMetadata.instance"
] |
[((287, 321), 'awkward.nplike.NumpyMetadata.instance', 'ak.nplike.NumpyMetadata.instance', ([], {}), '()\n', (319, 321), True, 'import awkward as ak\n'), ((1518, 1558), 'awkward._v2._typetracer.TypeTracer.instance', 'ak._v2._typetracer.TypeTracer.instance', ([], {}), '()\n', (1556, 1558), True, 'import awkward as ak\n'), ((15304, 15373), 'awkward._v2.operations.to_numpy', 'ak._v2.operations.to_numpy', (['self.content'], {'allow_missing': 'allow_missing'}), '(self.content, allow_missing=allow_missing)\n', (15330, 15373), True, 'import awkward as ak\n'), ((2632, 2691), 'awkward._v2._util.merge_parameters', 'ak._v2._util.merge_parameters', (['self._parameters', 'parameters'], {}), '(self._parameters, parameters)\n', (2661, 2691), True, 'import awkward as ak\n'), ((3310, 3358), 'awkward._v2.index.Index64', 'ak._v2.index.Index64', (['arange'], {'nplike': 'self.nplike'}), '(arange, nplike=self.nplike)\n', (3330, 3358), True, 'import awkward as ak\n'), ((7659, 7682), 'awkward._v2._util.isint', 'ak._v2._util.isint', (['out'], {}), '(out)\n', (7677, 7682), True, 'import awkward as ak\n'), ((8577, 8631), 'awkward._v2.forms.form._parameters_equal', '_parameters_equal', (['self._parameters', 'other._parameters'], {}), '(self._parameters, other._parameters)\n', (8594, 8631), False, 'from awkward._v2.forms.form import _parameters_equal\n'), ((11486, 11574), 'awkward._v2.contents.RegularArray', 'ak._v2.contents.RegularArray', (['tmp', 'out._size', 'out._length', 'None', 'None', 'self._nplike'], {}), '(tmp, out._size, out._length, None, None, self.\n _nplike)\n', (11514, 11574), True, 'import awkward as ak\n'), ((12336, 12447), 'awkward._v2.contents.RegularArray', 'ak._v2.contents.RegularArray', (['tmp', 'out._size', 'out._length', 'self._identifier', 'self._parameters', 'self._nplike'], {}), '(tmp, out._size, out._length, self._identifier,\n self._parameters, self._nplike)\n', (12364, 12447), True, 'import awkward as ak\n'), ((6018, 6038), 'awkward._util.isstr', 'ak._util.isstr', (['head'], {}), '(head)\n', (6032, 6038), True, 'import awkward as ak\n'), ((6701, 6818), 'awkward._v2.contents.bytemaskedarray.ByteMaskedArray', 'ak._v2.contents.bytemaskedarray.ByteMaskedArray', (['mask', 'self._content', '(False)', 'None', 'self._parameters', 'self._nplike'], {}), '(mask, self._content, False,\n None, self._parameters, self._nplike)\n', (6748, 6818), True, 'import awkward as ak\n'), ((11295, 11364), 'awkward._v2.contents.UnmaskedArray', 'ak._v2.contents.UnmaskedArray', (['out._content', 'None', 'None', 'self._nplike'], {}), '(out._content, None, None, self._nplike)\n', (11324, 11364), True, 'import awkward as ak\n'), ((12121, 12219), 'awkward._v2.contents.UnmaskedArray', 'ak._v2.contents.UnmaskedArray', (['out._content', 'self._identifier', 'self._parameters', 'self._nplike'], {}), '(out._content, self._identifier, self.\n _parameters, self._nplike)\n', (12150, 12219), True, 'import awkward as ak\n'), ((16434, 16458), 'copy.copy', 'copy.copy', (['depth_context'], {}), '(depth_context)\n', (16443, 16458), False, 'import copy\n'), ((15970, 15994), 'copy.copy', 'copy.copy', (['depth_context'], {}), '(depth_context)\n', (15979, 15994), False, 'import copy\n')]
|
import yaml
import datetime
import re
from .transaction import Transaction
class Block(yaml.YAMLObject):
yaml_tag = u'!Block'
miner_re = re.compile(r'^[0-9a-zA-Z]{3,32}$')
def __repr__(self):
return "%s(Timestamp=%r, Difficulty=%r, Nonce=%r, Miner=%r, Transactions=[%d])" % (
self.__class__.__name__,
self.Timestamp.isoformat(),
self.Difficulty,
self.Nonce,
self.Miner,
len(self.Transactions),
)
def validate(self):
if not hasattr(self, 'Timestamp'):
raise RuntimeError("No 'Timestamp' attribute.")
if not isinstance(self.Timestamp, datetime.datetime):
raise RuntimeError("'Timestamp' has to be a Date & Time.")
if not hasattr(self, 'Difficulty'):
raise RuntimeError("No 'Difficulty' attribute.")
if not isinstance(self.Difficulty, int):
raise RuntimeError("'Difficulty' has to be an Integer.")
if self.Difficulty <= 0:
raise RuntimeError("'Difficulty' has to be a positive Integer.")
if self.Difficulty > 384:
raise RuntimeError("'Difficulty' is too large.")
if not hasattr(self, 'Nonce'):
raise RuntimeError("No 'Nonce' attribute.")
if not isinstance(self.Nonce, int):
raise RuntimeError("'Nonce' has to be an Integer.")
if not hasattr(self, 'Miner'):
raise RuntimeError("No 'Miner' attribute.")
if not isinstance(self.Miner, str):
raise RuntimeError("'Miner' has to be a String.")
if len(self.Miner) < 3:
raise RuntimeError("'Miner' length must be at least 3.")
if len(self.Miner) > 32:
raise RuntimeError("'Miner' length must be at maximum 32.")
if self.miner_re.match(self.Miner) is None:
raise RuntimeError("'Miner: {}' has an incorrect format.".format(self.Miner))
if not hasattr(self, 'Transactions'):
raise RuntimeError("No 'Transactions' attribute.")
if not isinstance(self.Transactions, list):
raise RuntimeError("'Transactions' has to be a list.")
if len(self.Transactions) <= 0:
raise RuntimeError("'Transactions' has to contain at least 1 transaction.")
if len(self.Transactions) > 1000:
raise RuntimeError("'Transactions' has to contain at most 1000 transactions.")
txids = set()
for transaction in self.Transactions:
if not isinstance(transaction, Transaction):
raise RuntimeError("Transaction elements must be of the type Transaction (is {}).".format(type(transaction)))
transaction.validate()
if transaction.Id in txids:
raise RuntimeError("Transaction ID {} is not unique.".format(transaction.Id))
keys = set(self.__dict__.keys())
keys.remove('Timestamp')
keys.remove('Difficulty')
keys.remove('Nonce')
keys.remove('Miner')
keys.remove('Transactions')
if len(keys) > 0:
raise RuntimeError("Unknown attributes provided: {}".format(', '.join(keys)))
|
[
"re.compile"
] |
[((142, 175), 're.compile', 're.compile', (['"""^[0-9a-zA-Z]{3,32}$"""'], {}), "('^[0-9a-zA-Z]{3,32}$')\n", (152, 175), False, 'import re\n')]
|
import os
import argparse
import time
import dgl
from dgl.contrib import KVServer
import torch as th
from train_pytorch import load_model
from dataloader import get_server_partition_dataset
NUM_THREAD = 1 # Fix the number of threads to 1 on kvstore
class KGEServer(KVServer):
"""User-defined kvstore for DGL-KGE
"""
def _push_handler(self, name, ID, data, target):
"""Row-Sparse Adagrad updater
"""
original_name = name[0:-6]
state_sum = target[original_name+'_state-data-']
grad_sum = (data * data).mean(1)
state_sum.index_add_(0, ID, grad_sum)
std = state_sum[ID] # _sparse_mask
std_values = std.sqrt_().add_(1e-10).unsqueeze(1)
tmp = (-self.clr * data / std_values)
target[name].index_add_(0, ID, tmp)
def set_clr(self, learning_rate):
"""Set learning rate for Row-Sparse Adagrad updater
"""
self.clr = learning_rate
# Note: Most of the args are unnecessary for KVStore, will remove them later
class ArgParser(argparse.ArgumentParser):
def __init__(self):
super(ArgParser, self).__init__()
self.add_argument('--model_name', default='TransE',
choices=['TransE', 'TransE_l1', 'TransE_l2', 'TransR',
'RESCAL', 'DistMult', 'ComplEx', 'RotatE'],
help='model to use')
self.add_argument('--data_path', type=str, default='../data',
help='root path of all dataset')
self.add_argument('--dataset', type=str, default='FB15k',
help='dataset name, under data_path')
self.add_argument('--format', type=str, default='1',
help='the format of the dataset.')
self.add_argument('--hidden_dim', type=int, default=256,
help='hidden dim used by relation and entity')
self.add_argument('--lr', type=float, default=0.0001,
help='learning rate')
self.add_argument('-g', '--gamma', type=float, default=12.0,
help='margin value')
self.add_argument('--gpu', type=int, default=[-1], nargs='+',
help='a list of active gpu ids, e.g. 0')
self.add_argument('--mix_cpu_gpu', action='store_true',
help='mix CPU and GPU training')
self.add_argument('-de', '--double_ent', action='store_true',
help='double entitiy dim for complex number')
self.add_argument('-dr', '--double_rel', action='store_true',
help='double relation dim for complex number')
self.add_argument('--seed', type=int, default=0,
help='set random seed for reproducibility')
self.add_argument('--rel_part', action='store_true',
help='enable relation partitioning')
self.add_argument('--soft_rel_part', action='store_true',
help='enable soft relation partition')
self.add_argument('--nomp_thread_per_process', type=int, default=-1,
help='num of omp threads used per process in multi-process training')
self.add_argument('--async_update', action='store_true',
help='allow async_update on node embedding')
self.add_argument('--strict_rel_part', action='store_true',
help='Strict relation partition')
self.add_argument('--server_id', type=int, default=0,
help='Unique ID of each server')
self.add_argument('--ip_config', type=str, default='ip_config.txt',
help='IP configuration file of kvstore')
self.add_argument('--total_client', type=int, default=1,
help='Total number of client worker nodes')
def get_server_data(args, machine_id):
"""Get data from data_path/dataset/part_machine_id
Return: glocal2local,
entity_emb,
entity_state,
relation_emb,
relation_emb_state
"""
g2l, dataset = get_server_partition_dataset(
args.data_path,
args.dataset,
args.format,
machine_id)
# Note that the dataset doesn't ccontain the triple
print('n_entities: ' + str(dataset.n_entities))
print('n_relations: ' + str(dataset.n_relations))
model = load_model(None, args, dataset.n_entities, dataset.n_relations)
return g2l, model.entity_emb.emb, model.entity_emb.state_sum, model.relation_emb.emb, model.relation_emb.state_sum
def start_server(args):
"""Start kvstore service
"""
th.set_num_threads(NUM_THREAD)
server_namebook = dgl.contrib.read_ip_config(filename=args.ip_config)
my_server = KGEServer(server_id=args.server_id,
server_namebook=server_namebook,
num_client=args.total_client)
my_server.set_clr(args.lr)
if my_server.get_id() % my_server.get_group_count() == 0: # master server
g2l, entity_emb, entity_emb_state, relation_emb, relation_emb_state = get_server_data(args, my_server.get_machine_id())
my_server.set_global2local(name='entity_emb', global2local=g2l)
my_server.init_data(name='relation_emb', data_tensor=relation_emb)
my_server.init_data(name='relation_emb_state', data_tensor=relation_emb_state)
my_server.init_data(name='entity_emb', data_tensor=entity_emb)
my_server.init_data(name='entity_emb_state', data_tensor=entity_emb_state)
else: # backup server
my_server.set_global2local(name='entity_emb')
my_server.init_data(name='relation_emb')
my_server.init_data(name='relation_emb_state')
my_server.init_data(name='entity_emb')
my_server.init_data(name='entity_emb_state')
print('KVServer %d listen for requests ...' % my_server.get_id())
my_server.start()
if __name__ == '__main__':
args = ArgParser().parse_args()
start_server(args)
|
[
"torch.set_num_threads",
"train_pytorch.load_model",
"dgl.contrib.read_ip_config",
"dataloader.get_server_partition_dataset"
] |
[((4184, 4271), 'dataloader.get_server_partition_dataset', 'get_server_partition_dataset', (['args.data_path', 'args.dataset', 'args.format', 'machine_id'], {}), '(args.data_path, args.dataset, args.format,\n machine_id)\n', (4212, 4271), False, 'from dataloader import get_server_partition_dataset\n'), ((4460, 4523), 'train_pytorch.load_model', 'load_model', (['None', 'args', 'dataset.n_entities', 'dataset.n_relations'], {}), '(None, args, dataset.n_entities, dataset.n_relations)\n', (4470, 4523), False, 'from train_pytorch import load_model\n'), ((4710, 4740), 'torch.set_num_threads', 'th.set_num_threads', (['NUM_THREAD'], {}), '(NUM_THREAD)\n', (4728, 4740), True, 'import torch as th\n'), ((4764, 4815), 'dgl.contrib.read_ip_config', 'dgl.contrib.read_ip_config', ([], {'filename': 'args.ip_config'}), '(filename=args.ip_config)\n', (4790, 4815), False, 'import dgl\n')]
|