hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71b5b6b59934a264b9a46047b70741641a2db51 | 10,504 | py | Python | tests/test_clone.py | mohammadroghani/django-clone | 603037194ae43f5e2eb96bd0aa159c1fbcf8c51c | [
"MIT"
] | null | null | null | tests/test_clone.py | mohammadroghani/django-clone | 603037194ae43f5e2eb96bd0aa159c1fbcf8c51c | [
"MIT"
] | null | null | null | tests/test_clone.py | mohammadroghani/django-clone | 603037194ae43f5e2eb96bd0aa159c1fbcf8c51c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Django Clone - https://github.com/mohammadroghani/django-clone
# Copyright © 2016 Mohammad Roghani <mohammadroghani43@gmail.com>
# Copyright © 2016 Amir Keivan Mohtashami <akmohtashami97@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from django.test import TestCase
from django.utils import timezone
from django_clone.clone import Cloner
from tests.models import *
def get_information_list(object_list):
information_list = []
for object in object_list:
information_list.append((object.pk, object.__module__ + "." + object.__class__.__name__))
information_list.sort()
return information_list
class VersionControlTests(TestCase):
def test_get_all_neighbor_objects(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
choice = question.choice_set.create(choice_text='a', votes=0)
c = Choice(question=question, choice_text='b', votes=0)
c.save()
choice.save()
person = Person()
person.save()
person.questions.add(question)
test_list = [(question.pk, question.__module__ + "." + question.__class__.__name__)]
cloner = Cloner()
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(person)), test_list)
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(person)), test_list)
test_list.clear()
test_list = [(choice.pk, choice.__module__ + "." + choice.__class__.__name__), (c.pk, c.__module__ + "." + c.__class__.__name__), (person.pk, person.__module__ + "." + person.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(question)), test_list)
def test_get_all_related_objects(self):
question = Question(question_text='question1', pub_date=timezone.now())
q1 = Question(question_text='q1', pub_date=timezone.now())
q1.save()
question.save()
choice = question.choice_set.create(choice_text='a', votes=0)
c = Choice(question=question, choice_text='b', votes=0)
c.save()
choice.save()
c1 = q1.choice_set.create(choice_text='a', votes=0)
c1.save()
person = Person()
person.save()
person.questions.add(question)
cloner = Cloner()
test_list = [(q1.pk, q1.__module__+ "." + q1.__class__.__name__), (c1.pk, c1.__module__ + "." + c1.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_related_object(q1)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c1)), test_list)
test_list.clear()
test_list = [(question.pk, question.__module__ + "." + question.__class__.__name__), (c.pk, c.__module__ + "." + c.__class__.__name__), (choice.pk, choice.__module__ + "." + choice.__class__.__name__),
(person.pk, person.__module__ + "." + person.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_related_object(question)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(choice)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(person)), test_list)
self.assertNotEqual(get_information_list(Cloner().get_all_related_object(q1)), test_list)
def test_get_all_related_objects_with_circular_relation(self):
a_object = A()
b_object = B()
c_object = C()
a_object.save()
b_object.save()
c_object.save()
a_object.b.add(b_object)
b_object.c.add(c_object)
c_object.a.add(a_object)
test_list = [(b_object.pk, b_object.__module__ + "." + b_object.__class__.__name__), (c_object.pk, c_object.__module__ + "." + c_object.__class__.__name__), (a_object.pk, a_object.__module__ + "." + a_object.__class__.__name__)]
test_list.sort()
cloner = Cloner()
self.assertEqual(get_information_list(cloner.get_all_related_object(a_object)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(b_object)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c_object)), test_list)
def test_clone_with_one_object(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
q = Cloner().clone(question)
self.assertNotEqual(q.pk, question.pk)
self.assertEqual(q.question_text, question.question_text)
self.assertEqual(q.pub_date, question.pub_date)
def test_clone_with_foreign_key(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = Choice(question=question, choice_text='c', votes=0)
choice.save()
cloner = Cloner()
c = cloner.clone(choice)
self.assertNotEqual(choice.id, c.id)
self.assertNotEqual(choice.question.id, c.question.id)
self.assertEqual(choice.question.question_text, c.question.question_text)
q = cloner.clone(question)
self.assertNotEqual(q.id, question.id)
self.assertNotEqual(question.choice_set.get(choice_text='c').pk, q.choice_set.get(choice_text='c').pk)
def test_clone_with_ignore_list(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = Choice(question=question, choice_text='c', votes=0)
choice.save()
c = Cloner(ignored_models=["tests.Question"]).clone(choice)
self.assertNotEqual(choice.id, c.id)
self.assertEqual(choice.question.id, c.question.id)
def test_clone_with_many_to_many_field(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
person = Person()
person.save()
person.questions.add(question)
p = Cloner().clone(person)
self.assertNotEqual(person.id, p.id)
self.assertNotEqual(person.questions.get(question_text='question1').id,
p.questions.get(question_text='question1').id)
def test_clone_many_to_many_field_with_repeated_instance(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
person = Person()
person.save()
person.questions.add(question)
person.questions.add(question)
p = Cloner().clone(person)
self.assertEqual(person.questions.all().count(), p.questions.all().count())
def test_clone_with_through_field(self):
student = Student(name='Ali')
group = Group(name='ACM')
student.save()
group.save()
membership = Membership(student=student, group=group)
membership.save()
g = Cloner().clone(group)
self.assertNotEqual(g.id, group.id)
self.assertNotEqual(group.members.get(name='Ali').id, g.members.get(name='Ali').id)
s = Cloner().clone(student)
self.assertNotEqual(s.id, student.id)
self.assertNotEqual(student.group_set.get(name='ACM').id, s.group_set.get(name='ACM').id)
def test_clone_many_to_many_field_with_through_field_and_repeated_instance(self):
student = Student(name='Ali')
group = Group(name='ACM')
student.save()
group.save()
membership1 = Membership(student=student, group=group)
membership1.save()
membership2 = Membership(student=student, group=group)
membership2.save()
g = Cloner().clone(group)
self.assertEqual(g.members.all().count(), group.members.all().count())
s = Cloner().clone(student)
self.assertEqual(s.group_set.all().count(), student.group_set.all().count())
def test_clone_subclass(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice(question=question, choice_text='c', votes=0)
choice.save()
Cloner().clone(question)
self.assertEqual(Question.objects.count(), 2)
self.assertEqual(Choice.objects.count(), 2)
Cloner().clone(choice)
self.assertEqual(Question.objects.count(), 3)
self.assertEqual(Choice.objects.count(), 3)
def test_clone_subclass_explicit_relation(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice2(question=question, choice_text='c', votes=0)
choice.save()
Cloner().clone(question)
self.assertEqual(Question.objects.count(), 2)
self.assertEqual(Choice.objects.count(), 2)
Cloner().clone(choice)
self.assertEqual(Question.objects.count(), 3)
self.assertEqual(Choice.objects.count(), 3)
def test_clone_unique(self):
def unique_editor(obj):
if isinstance(obj, BigChoice):
obj.unique_value += "S"
return obj
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice(question=question, choice_text='c', votes=0, unique_value="S")
choice.save()
new_choice = Cloner().clone(choice, unique_editor)
self.assertNotEqual(new_choice.pk, choice.pk)
| 47.315315 | 236 | 0.680217 |
from django.test import TestCase
from django.utils import timezone
from django_clone.clone import Cloner
from tests.models import *
def get_information_list(object_list):
information_list = []
for object in object_list:
information_list.append((object.pk, object.__module__ + "." + object.__class__.__name__))
information_list.sort()
return information_list
class VersionControlTests(TestCase):
def test_get_all_neighbor_objects(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
choice = question.choice_set.create(choice_text='a', votes=0)
c = Choice(question=question, choice_text='b', votes=0)
c.save()
choice.save()
person = Person()
person.save()
person.questions.add(question)
test_list = [(question.pk, question.__module__ + "." + question.__class__.__name__)]
cloner = Cloner()
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(person)), test_list)
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(person)), test_list)
test_list.clear()
test_list = [(choice.pk, choice.__module__ + "." + choice.__class__.__name__), (c.pk, c.__module__ + "." + c.__class__.__name__), (person.pk, person.__module__ + "." + person.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_neighbor_objects(question)), test_list)
def test_get_all_related_objects(self):
question = Question(question_text='question1', pub_date=timezone.now())
q1 = Question(question_text='q1', pub_date=timezone.now())
q1.save()
question.save()
choice = question.choice_set.create(choice_text='a', votes=0)
c = Choice(question=question, choice_text='b', votes=0)
c.save()
choice.save()
c1 = q1.choice_set.create(choice_text='a', votes=0)
c1.save()
person = Person()
person.save()
person.questions.add(question)
cloner = Cloner()
test_list = [(q1.pk, q1.__module__+ "." + q1.__class__.__name__), (c1.pk, c1.__module__ + "." + c1.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_related_object(q1)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c1)), test_list)
test_list.clear()
test_list = [(question.pk, question.__module__ + "." + question.__class__.__name__), (c.pk, c.__module__ + "." + c.__class__.__name__), (choice.pk, choice.__module__ + "." + choice.__class__.__name__),
(person.pk, person.__module__ + "." + person.__class__.__name__)]
test_list.sort()
self.assertEqual(get_information_list(cloner.get_all_related_object(question)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(choice)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(person)), test_list)
self.assertNotEqual(get_information_list(Cloner().get_all_related_object(q1)), test_list)
def test_get_all_related_objects_with_circular_relation(self):
a_object = A()
b_object = B()
c_object = C()
a_object.save()
b_object.save()
c_object.save()
a_object.b.add(b_object)
b_object.c.add(c_object)
c_object.a.add(a_object)
test_list = [(b_object.pk, b_object.__module__ + "." + b_object.__class__.__name__), (c_object.pk, c_object.__module__ + "." + c_object.__class__.__name__), (a_object.pk, a_object.__module__ + "." + a_object.__class__.__name__)]
test_list.sort()
cloner = Cloner()
self.assertEqual(get_information_list(cloner.get_all_related_object(a_object)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(b_object)), test_list)
self.assertEqual(get_information_list(cloner.get_all_related_object(c_object)), test_list)
def test_clone_with_one_object(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
q = Cloner().clone(question)
self.assertNotEqual(q.pk, question.pk)
self.assertEqual(q.question_text, question.question_text)
self.assertEqual(q.pub_date, question.pub_date)
def test_clone_with_foreign_key(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = Choice(question=question, choice_text='c', votes=0)
choice.save()
cloner = Cloner()
c = cloner.clone(choice)
self.assertNotEqual(choice.id, c.id)
self.assertNotEqual(choice.question.id, c.question.id)
self.assertEqual(choice.question.question_text, c.question.question_text)
q = cloner.clone(question)
self.assertNotEqual(q.id, question.id)
self.assertNotEqual(question.choice_set.get(choice_text='c').pk, q.choice_set.get(choice_text='c').pk)
def test_clone_with_ignore_list(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = Choice(question=question, choice_text='c', votes=0)
choice.save()
c = Cloner(ignored_models=["tests.Question"]).clone(choice)
self.assertNotEqual(choice.id, c.id)
self.assertEqual(choice.question.id, c.question.id)
def test_clone_with_many_to_many_field(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
person = Person()
person.save()
person.questions.add(question)
p = Cloner().clone(person)
self.assertNotEqual(person.id, p.id)
self.assertNotEqual(person.questions.get(question_text='question1').id,
p.questions.get(question_text='question1').id)
def test_clone_many_to_many_field_with_repeated_instance(self):
question = Question(question_text='question1', pub_date=timezone.now())
question.save()
person = Person()
person.save()
person.questions.add(question)
person.questions.add(question)
p = Cloner().clone(person)
self.assertEqual(person.questions.all().count(), p.questions.all().count())
def test_clone_with_through_field(self):
student = Student(name='Ali')
group = Group(name='ACM')
student.save()
group.save()
membership = Membership(student=student, group=group)
membership.save()
g = Cloner().clone(group)
self.assertNotEqual(g.id, group.id)
self.assertNotEqual(group.members.get(name='Ali').id, g.members.get(name='Ali').id)
s = Cloner().clone(student)
self.assertNotEqual(s.id, student.id)
self.assertNotEqual(student.group_set.get(name='ACM').id, s.group_set.get(name='ACM').id)
def test_clone_many_to_many_field_with_through_field_and_repeated_instance(self):
student = Student(name='Ali')
group = Group(name='ACM')
student.save()
group.save()
membership1 = Membership(student=student, group=group)
membership1.save()
membership2 = Membership(student=student, group=group)
membership2.save()
g = Cloner().clone(group)
self.assertEqual(g.members.all().count(), group.members.all().count())
s = Cloner().clone(student)
self.assertEqual(s.group_set.all().count(), student.group_set.all().count())
def test_clone_subclass(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice(question=question, choice_text='c', votes=0)
choice.save()
Cloner().clone(question)
self.assertEqual(Question.objects.count(), 2)
self.assertEqual(Choice.objects.count(), 2)
Cloner().clone(choice)
self.assertEqual(Question.objects.count(), 3)
self.assertEqual(Choice.objects.count(), 3)
def test_clone_subclass_explicit_relation(self):
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice2(question=question, choice_text='c', votes=0)
choice.save()
Cloner().clone(question)
self.assertEqual(Question.objects.count(), 2)
self.assertEqual(Choice.objects.count(), 2)
Cloner().clone(choice)
self.assertEqual(Question.objects.count(), 3)
self.assertEqual(Choice.objects.count(), 3)
def test_clone_unique(self):
def unique_editor(obj):
if isinstance(obj, BigChoice):
obj.unique_value += "S"
return obj
question = Question(question_text='a', pub_date=timezone.now())
question.save()
choice = BigChoice(question=question, choice_text='c', votes=0, unique_value="S")
choice.save()
new_choice = Cloner().clone(choice, unique_editor)
self.assertNotEqual(new_choice.pk, choice.pk)
| true | true |
f71b5cd7fa3f30ff2ff0a5a2c5acbd05b042c711 | 497 | py | Python | examples/example_proj/dependency_app_o2o/migrations/0001_initial.py | philsupertramp/dj-migration-test | 97ec4513b9848d96436907de7940841866895e3c | [
"MIT"
] | 4 | 2019-07-05T19:32:07.000Z | 2020-02-07T00:47:15.000Z | examples/example_proj/dependency_app_o2o/migrations/0001_initial.py | philsupertramp/dj-migration-test | 97ec4513b9848d96436907de7940841866895e3c | [
"MIT"
] | 17 | 2019-08-23T07:21:23.000Z | 2021-09-22T18:44:26.000Z | examples/example_proj/dependency_app_o2o/migrations/0001_initial.py | philsupertramp/dj-migration-test | 97ec4513b9848d96436907de7940841866895e3c | [
"MIT"
] | null | null | null | # Generated by Django 2.2.3 on 2019-07-27 12:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DepModO2O',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('placeholder', models.BooleanField(default=True)),
],
),
]
| 22.590909 | 114 | 0.583501 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DepModO2O',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('placeholder', models.BooleanField(default=True)),
],
),
]
| true | true |
f71b5d0be6cfd82d1e2beb6b6ec45e9a21282a6a | 1,163 | py | Python | src/bot.py | Shignum/ShiggyBot | 292d99300dea55848d1aa458c8b8893a8dd78fc2 | [
"MIT"
] | null | null | null | src/bot.py | Shignum/ShiggyBot | 292d99300dea55848d1aa458c8b8893a8dd78fc2 | [
"MIT"
] | null | null | null | src/bot.py | Shignum/ShiggyBot | 292d99300dea55848d1aa458c8b8893a8dd78fc2 | [
"MIT"
] | null | null | null | import os
from discord import Embed
from discord import Intents
from discord.ext import commands
from dotenv import load_dotenv
load_dotenv()
intents = Intents.default()
bot = commands.Bot(command_prefix=os.getenv('PREFIX'))
TOKEN = os.getenv('BOT_TOKEN')
@bot.event
async def on_ready():
print(f'{bot.user} has logged in.')
initial_extensions = ['cogs.event','cogs.music','cogs.other','cogs.playlist']
for extension in initial_extensions:
bot.load_extension(extension)
@bot.event
async def on_command_error(ctx, error):
if isinstance(error, commands.CommandNotFound):
await ctx.send(embed=Embed(title='Command not found.'))
return
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(embed=Embed(title='Command needs an Argument.'))
return
elif isinstance(error, commands.CommandInvokeError):
await ctx.send(embed=Embed(title=f'{error.original}'))
return
elif isinstance(error, commands.MissingPermissions):
await ctx.send(embed=Embed(title="You don't have the permission to use this command."))
return
raise error
bot.run(TOKEN)
| 29.075 | 95 | 0.715391 | import os
from discord import Embed
from discord import Intents
from discord.ext import commands
from dotenv import load_dotenv
load_dotenv()
intents = Intents.default()
bot = commands.Bot(command_prefix=os.getenv('PREFIX'))
TOKEN = os.getenv('BOT_TOKEN')
@bot.event
async def on_ready():
print(f'{bot.user} has logged in.')
initial_extensions = ['cogs.event','cogs.music','cogs.other','cogs.playlist']
for extension in initial_extensions:
bot.load_extension(extension)
@bot.event
async def on_command_error(ctx, error):
if isinstance(error, commands.CommandNotFound):
await ctx.send(embed=Embed(title='Command not found.'))
return
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(embed=Embed(title='Command needs an Argument.'))
return
elif isinstance(error, commands.CommandInvokeError):
await ctx.send(embed=Embed(title=f'{error.original}'))
return
elif isinstance(error, commands.MissingPermissions):
await ctx.send(embed=Embed(title="You don't have the permission to use this command."))
return
raise error
bot.run(TOKEN)
| true | true |
f71b5d97598ff53100bfb2598cdb30dd30469fd8 | 10,221 | py | Python | client-py/iotdb/utils/IoTDBRpcDataSet.py | slawr/iotdb | 96b5269f0fc6e02927563d4481da3bfb310fc7b1 | [
"Apache-2.0"
] | null | null | null | client-py/iotdb/utils/IoTDBRpcDataSet.py | slawr/iotdb | 96b5269f0fc6e02927563d4481da3bfb310fc7b1 | [
"Apache-2.0"
] | 27 | 2021-10-19T09:41:40.000Z | 2022-03-30T16:22:17.000Z | client-py/iotdb/utils/IoTDBRpcDataSet.py | slawr/iotdb | 96b5269f0fc6e02927563d4481da3bfb310fc7b1 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# for package
import logging
from thrift.transport import TTransport
from iotdb.thrift.rpc.TSIService import TSFetchResultsReq, TSCloseOperationReq
from iotdb.utils.IoTDBConstants import TSDataType
logger = logging.getLogger("IoTDB")
class IoTDBRpcDataSet(object):
TIMESTAMP_STR = "Time"
# VALUE_IS_NULL = "The value got by %s (column name) is NULL."
START_INDEX = 2
FLAG = 0x80
def __init__(
self,
sql,
column_name_list,
column_type_list,
column_name_index,
ignore_timestamp,
query_id,
client,
session_id,
query_data_set,
fetch_size,
):
self.__session_id = session_id
self.__ignore_timestamp = ignore_timestamp
self.__sql = sql
self.__query_id = query_id
self.__client = client
self.__fetch_size = fetch_size
self.__column_size = len(column_name_list)
self.__default_time_out = 1000
self.__column_name_list = []
self.__column_type_list = []
self.__column_ordinal_dict = {}
if not ignore_timestamp:
self.__column_name_list.append(IoTDBRpcDataSet.TIMESTAMP_STR)
self.__column_type_list.append(TSDataType.INT64)
self.__column_ordinal_dict[IoTDBRpcDataSet.TIMESTAMP_STR] = 1
if column_name_index is not None:
self.__column_type_deduplicated_list = [
None for _ in range(len(column_name_index))
]
for i in range(len(column_name_list)):
name = column_name_list[i]
self.__column_name_list.append(name)
self.__column_type_list.append(TSDataType[column_type_list[i]])
if name not in self.__column_ordinal_dict:
index = column_name_index[name]
self.__column_ordinal_dict[name] = (
index + IoTDBRpcDataSet.START_INDEX
)
self.__column_type_deduplicated_list[index] = TSDataType[
column_type_list[i]
]
else:
index = IoTDBRpcDataSet.START_INDEX
self.__column_type_deduplicated_list = []
for i in range(len(column_name_list)):
name = column_name_list[i]
self.__column_name_list.append(name)
self.__column_type_list.append(TSDataType[column_type_list[i]])
if name not in self.__column_ordinal_dict:
self.__column_ordinal_dict[name] = index
index += 1
self.__column_type_deduplicated_list.append(
TSDataType[column_type_list[i]]
)
self.__time_bytes = bytes(0)
self.__current_bitmap = [
bytes(0) for _ in range(len(self.__column_type_deduplicated_list))
]
self.__value = [None for _ in range(len(self.__column_type_deduplicated_list))]
self.__query_data_set = query_data_set
self.__is_closed = False
self.__empty_resultSet = False
self.__has_cached_record = False
self.__rows_index = 0
def close(self):
if self.__is_closed:
return
if self.__client is not None:
try:
status = self.__client.closeOperation(
TSCloseOperationReq(self.__session_id, self.__query_id)
)
logger.debug(
"close session {}, message: {}".format(
self.__session_id, status.message
)
)
except TTransport.TException as e:
raise RuntimeError(
"close session {} failed because: ".format(self.__session_id), e
)
self.__is_closed = True
self.__client = None
def next(self):
if self.has_cached_result():
self.construct_one_row()
return True
if self.__empty_resultSet:
return False
if self.fetch_results():
self.construct_one_row()
return True
return False
def has_cached_result(self):
return (self.__query_data_set is not None) and (
len(self.__query_data_set.time) != 0
)
def construct_one_row(self):
# simulating buffer, read 8 bytes from data set and discard first 8 bytes which have been read.
self.__time_bytes = self.__query_data_set.time[:8]
self.__query_data_set.time = self.__query_data_set.time[8:]
for i in range(len(self.__query_data_set.bitmapList)):
bitmap_buffer = self.__query_data_set.bitmapList[i]
# another 8 new rows, should move the bitmap buffer position to next byte
if self.__rows_index % 8 == 0:
self.__current_bitmap[i] = bitmap_buffer[0]
self.__query_data_set.bitmapList[i] = bitmap_buffer[1:]
if not self.is_null(i, self.__rows_index):
value_buffer = self.__query_data_set.valueList[i]
data_type = self.__column_type_deduplicated_list[i]
# simulating buffer
if data_type == TSDataType.BOOLEAN:
self.__value[i] = value_buffer[:1]
self.__query_data_set.valueList[i] = value_buffer[1:]
elif data_type == TSDataType.INT32:
self.__value[i] = value_buffer[:4]
self.__query_data_set.valueList[i] = value_buffer[4:]
elif data_type == TSDataType.INT64:
self.__value[i] = value_buffer[:8]
self.__query_data_set.valueList[i] = value_buffer[8:]
elif data_type == TSDataType.FLOAT:
self.__value[i] = value_buffer[:4]
self.__query_data_set.valueList[i] = value_buffer[4:]
elif data_type == TSDataType.DOUBLE:
self.__value[i] = value_buffer[:8]
self.__query_data_set.valueList[i] = value_buffer[8:]
elif data_type == TSDataType.TEXT:
length = int.from_bytes(
value_buffer[:4], byteorder="big", signed=False
)
self.__value[i] = value_buffer[4 : 4 + length]
self.__query_data_set.valueList[i] = value_buffer[4 + length :]
else:
raise RuntimeError("unsupported data type {}.".format(data_type))
self.__rows_index += 1
self.__has_cached_record = True
def fetch_results(self):
self.__rows_index = 0
request = TSFetchResultsReq(
self.__session_id,
self.__sql,
self.__fetch_size,
self.__query_id,
True,
self.__default_time_out,
)
try:
resp = self.__client.fetchResults(request)
if not resp.hasResultSet:
self.__empty_resultSet = True
else:
self.__query_data_set = resp.queryDataSet
return resp.hasResultSet
except TTransport.TException as e:
raise RuntimeError(
"Cannot fetch result from server, because of network connection: ", e
)
def is_null(self, index, row_num):
bitmap = self.__current_bitmap[index]
shift = row_num % 8
return ((IoTDBRpcDataSet.FLAG >> shift) & (bitmap & 0xFF)) == 0
def is_null_by_index(self, column_index):
index = (
self.__column_ordinal_dict[self.find_column_name_by_index(column_index)]
- IoTDBRpcDataSet.START_INDEX
)
# time column will never be None
if index < 0:
return True
return self.is_null(index, self.__rows_index - 1)
def is_null_by_name(self, column_name):
index = self.__column_ordinal_dict[column_name] - IoTDBRpcDataSet.START_INDEX
# time column will never be None
if index < 0:
return True
return self.is_null(index, self.__rows_index - 1)
def find_column_name_by_index(self, column_index):
if column_index <= 0:
raise Exception("Column index should start from 1")
if column_index > len(self.__column_name_list):
raise Exception(
"column index {} out of range {}".format(
column_index, self.__column_size
)
)
return self.__column_name_list[column_index - 1]
def get_fetch_size(self):
return self.__fetch_size
def set_fetch_size(self, fetch_size):
self.__fetch_size = fetch_size
def get_column_names(self):
return self.__column_name_list
def get_column_types(self):
return self.__column_type_list
def get_column_size(self):
return self.__column_size
def get_ignore_timestamp(self):
return self.__ignore_timestamp
def get_column_ordinal_dict(self):
return self.__column_ordinal_dict
def get_column_type_deduplicated_list(self):
return self.__column_type_deduplicated_list
def get_values(self):
return self.__value
def get_time_bytes(self):
return self.__time_bytes
def get_has_cached_record(self):
return self.__has_cached_record
| 37.577206 | 103 | 0.604246 |
import logging
from thrift.transport import TTransport
from iotdb.thrift.rpc.TSIService import TSFetchResultsReq, TSCloseOperationReq
from iotdb.utils.IoTDBConstants import TSDataType
logger = logging.getLogger("IoTDB")
class IoTDBRpcDataSet(object):
TIMESTAMP_STR = "Time"
START_INDEX = 2
FLAG = 0x80
def __init__(
self,
sql,
column_name_list,
column_type_list,
column_name_index,
ignore_timestamp,
query_id,
client,
session_id,
query_data_set,
fetch_size,
):
self.__session_id = session_id
self.__ignore_timestamp = ignore_timestamp
self.__sql = sql
self.__query_id = query_id
self.__client = client
self.__fetch_size = fetch_size
self.__column_size = len(column_name_list)
self.__default_time_out = 1000
self.__column_name_list = []
self.__column_type_list = []
self.__column_ordinal_dict = {}
if not ignore_timestamp:
self.__column_name_list.append(IoTDBRpcDataSet.TIMESTAMP_STR)
self.__column_type_list.append(TSDataType.INT64)
self.__column_ordinal_dict[IoTDBRpcDataSet.TIMESTAMP_STR] = 1
if column_name_index is not None:
self.__column_type_deduplicated_list = [
None for _ in range(len(column_name_index))
]
for i in range(len(column_name_list)):
name = column_name_list[i]
self.__column_name_list.append(name)
self.__column_type_list.append(TSDataType[column_type_list[i]])
if name not in self.__column_ordinal_dict:
index = column_name_index[name]
self.__column_ordinal_dict[name] = (
index + IoTDBRpcDataSet.START_INDEX
)
self.__column_type_deduplicated_list[index] = TSDataType[
column_type_list[i]
]
else:
index = IoTDBRpcDataSet.START_INDEX
self.__column_type_deduplicated_list = []
for i in range(len(column_name_list)):
name = column_name_list[i]
self.__column_name_list.append(name)
self.__column_type_list.append(TSDataType[column_type_list[i]])
if name not in self.__column_ordinal_dict:
self.__column_ordinal_dict[name] = index
index += 1
self.__column_type_deduplicated_list.append(
TSDataType[column_type_list[i]]
)
self.__time_bytes = bytes(0)
self.__current_bitmap = [
bytes(0) for _ in range(len(self.__column_type_deduplicated_list))
]
self.__value = [None for _ in range(len(self.__column_type_deduplicated_list))]
self.__query_data_set = query_data_set
self.__is_closed = False
self.__empty_resultSet = False
self.__has_cached_record = False
self.__rows_index = 0
def close(self):
if self.__is_closed:
return
if self.__client is not None:
try:
status = self.__client.closeOperation(
TSCloseOperationReq(self.__session_id, self.__query_id)
)
logger.debug(
"close session {}, message: {}".format(
self.__session_id, status.message
)
)
except TTransport.TException as e:
raise RuntimeError(
"close session {} failed because: ".format(self.__session_id), e
)
self.__is_closed = True
self.__client = None
def next(self):
if self.has_cached_result():
self.construct_one_row()
return True
if self.__empty_resultSet:
return False
if self.fetch_results():
self.construct_one_row()
return True
return False
def has_cached_result(self):
return (self.__query_data_set is not None) and (
len(self.__query_data_set.time) != 0
)
def construct_one_row(self):
self.__time_bytes = self.__query_data_set.time[:8]
self.__query_data_set.time = self.__query_data_set.time[8:]
for i in range(len(self.__query_data_set.bitmapList)):
bitmap_buffer = self.__query_data_set.bitmapList[i]
if self.__rows_index % 8 == 0:
self.__current_bitmap[i] = bitmap_buffer[0]
self.__query_data_set.bitmapList[i] = bitmap_buffer[1:]
if not self.is_null(i, self.__rows_index):
value_buffer = self.__query_data_set.valueList[i]
data_type = self.__column_type_deduplicated_list[i]
if data_type == TSDataType.BOOLEAN:
self.__value[i] = value_buffer[:1]
self.__query_data_set.valueList[i] = value_buffer[1:]
elif data_type == TSDataType.INT32:
self.__value[i] = value_buffer[:4]
self.__query_data_set.valueList[i] = value_buffer[4:]
elif data_type == TSDataType.INT64:
self.__value[i] = value_buffer[:8]
self.__query_data_set.valueList[i] = value_buffer[8:]
elif data_type == TSDataType.FLOAT:
self.__value[i] = value_buffer[:4]
self.__query_data_set.valueList[i] = value_buffer[4:]
elif data_type == TSDataType.DOUBLE:
self.__value[i] = value_buffer[:8]
self.__query_data_set.valueList[i] = value_buffer[8:]
elif data_type == TSDataType.TEXT:
length = int.from_bytes(
value_buffer[:4], byteorder="big", signed=False
)
self.__value[i] = value_buffer[4 : 4 + length]
self.__query_data_set.valueList[i] = value_buffer[4 + length :]
else:
raise RuntimeError("unsupported data type {}.".format(data_type))
self.__rows_index += 1
self.__has_cached_record = True
def fetch_results(self):
self.__rows_index = 0
request = TSFetchResultsReq(
self.__session_id,
self.__sql,
self.__fetch_size,
self.__query_id,
True,
self.__default_time_out,
)
try:
resp = self.__client.fetchResults(request)
if not resp.hasResultSet:
self.__empty_resultSet = True
else:
self.__query_data_set = resp.queryDataSet
return resp.hasResultSet
except TTransport.TException as e:
raise RuntimeError(
"Cannot fetch result from server, because of network connection: ", e
)
def is_null(self, index, row_num):
bitmap = self.__current_bitmap[index]
shift = row_num % 8
return ((IoTDBRpcDataSet.FLAG >> shift) & (bitmap & 0xFF)) == 0
def is_null_by_index(self, column_index):
index = (
self.__column_ordinal_dict[self.find_column_name_by_index(column_index)]
- IoTDBRpcDataSet.START_INDEX
)
if index < 0:
return True
return self.is_null(index, self.__rows_index - 1)
def is_null_by_name(self, column_name):
index = self.__column_ordinal_dict[column_name] - IoTDBRpcDataSet.START_INDEX
if index < 0:
return True
return self.is_null(index, self.__rows_index - 1)
def find_column_name_by_index(self, column_index):
if column_index <= 0:
raise Exception("Column index should start from 1")
if column_index > len(self.__column_name_list):
raise Exception(
"column index {} out of range {}".format(
column_index, self.__column_size
)
)
return self.__column_name_list[column_index - 1]
def get_fetch_size(self):
return self.__fetch_size
def set_fetch_size(self, fetch_size):
self.__fetch_size = fetch_size
def get_column_names(self):
return self.__column_name_list
def get_column_types(self):
return self.__column_type_list
def get_column_size(self):
return self.__column_size
def get_ignore_timestamp(self):
return self.__ignore_timestamp
def get_column_ordinal_dict(self):
return self.__column_ordinal_dict
def get_column_type_deduplicated_list(self):
return self.__column_type_deduplicated_list
def get_values(self):
return self.__value
def get_time_bytes(self):
return self.__time_bytes
def get_has_cached_record(self):
return self.__has_cached_record
| true | true |
f71b5dbf84e94f967043c63798744db773956c70 | 2,822 | py | Python | awesome-bot.py | ksmirenko/awesome-irc-bot | 2d39da7efc3621d737bcec458fc0f50ee7189e05 | [
"MIT"
] | null | null | null | awesome-bot.py | ksmirenko/awesome-irc-bot | 2d39da7efc3621d737bcec458fc0f50ee7189e05 | [
"MIT"
] | null | null | null | awesome-bot.py | ksmirenko/awesome-irc-bot | 2d39da7efc3621d737bcec458fc0f50ee7189e05 | [
"MIT"
] | null | null | null | import re
import socket
import sys
import threading
from random import randint
host = 'irc.freenode.org'
port = 6667
nick = 'gabe_the_dog'
real_name = 'Gabe the dog'
channel = '#spbnet'
size = 2048
youtube_prefix = 'https://www.youtube.com/watch?v='
gabe_the_dog_sources = [
'i1H0leZhXcY',
'i11RMG_U3R4',
'xK6cUQQ9cJY',
'b2p8Zxmuq4g',
'iY4Ci0wg258',
'd6ysCgOu8N8',
'dvZGs9QRNIw',
'TsIZG5QbS1g',
'gwkRRED5WxY',
'oFRSLqpq9xk',
'h4-pHUVthf0',
'gIx6_Srsrog',
'eWu5eB62dT8',
'vwGnXKNGjT0',
'AeEH5ugJrUU',
'WCFnvj4Lztg',
'Gl1uq4tg7YU',
'rcIpIw4YtZk',
'9u9vlj8CgS0',
'gvOWADwCDNg',
'JtA_WnBP_Co',
'R78ZxZW_N-o',
'd1lth7uX02g',
'onZcB3y2RTM',
'j20cTvQYe6s',
'tVznLG3PAdM',
'muLAN-kP5pE',
'VJxNv2m7qns',
'y3PcelCeraw'
]
def send_cmd(sock, cmd):
sock.send(bytes(cmd))
def connect():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
return sock
def login(sock):
send_cmd(sock, "USER {0} * * :{1}\r\n".format(nick, real_name))
send_cmd(sock, "NICK {0}\r\n".format(nick))
send_cmd(sock, "JOIN {0}\r\n".format(channel))
def send_msg(sock, msg):
send_cmd(sock, "PRIVMSG {} :{}\r\n".format(channel, msg))
# magic
def magic(sock):
index = randint(0, len(gabe_the_dog_sources) - 1)
msg = "Check this out: {}{}".format(youtube_prefix, gabe_the_dog_sources[index])
send_msg(sock, msg)
# thread routines
def send_routine(sock):
while True:
msg = raw_input()
if msg.startswith("/q"):
send_cmd(sock, "QUIT")
sock.close()
return
send_msg(sock, msg)
def receive_routine(sock):
try:
while True:
text = str(sock.recv(size))
if text.startswith("PING "):
send_cmd(sock, "PONG {}".format(text[5:]))
continue
if len(text) > 1:
print_message(text, "PRIVMSG" in text and channel in text)
if "show some magic" in text and nick in text:
magic(sock)
except:
print("Disconnected!")
def print_message(msg, is_private):
if is_private:
sender_nick = re.sub(r":(.*)!.*PRIVMSG " + channel + r" :(.*)", r"\1", msg)
msg_text = re.sub(r":(.*)!.*PRIVMSG " + channel + r" :(.*)", r"\2", msg)
print("<{}>: {}".format(sender_nick[:-1], msg_text[:-1]))
else:
print(msg)
def main():
sock = connect()
login(sock)
print("Connected!")
sender_thread = threading.Thread(target=send_routine, args=(sock,))
receiver_thread = threading.Thread(target=receive_routine, args=(sock,))
sender_thread.start()
receiver_thread.start()
sender_thread.join()
receiver_thread.join()
main()
| 22.576 | 84 | 0.592488 | import re
import socket
import sys
import threading
from random import randint
host = 'irc.freenode.org'
port = 6667
nick = 'gabe_the_dog'
real_name = 'Gabe the dog'
channel = '#spbnet'
size = 2048
youtube_prefix = 'https://www.youtube.com/watch?v='
gabe_the_dog_sources = [
'i1H0leZhXcY',
'i11RMG_U3R4',
'xK6cUQQ9cJY',
'b2p8Zxmuq4g',
'iY4Ci0wg258',
'd6ysCgOu8N8',
'dvZGs9QRNIw',
'TsIZG5QbS1g',
'gwkRRED5WxY',
'oFRSLqpq9xk',
'h4-pHUVthf0',
'gIx6_Srsrog',
'eWu5eB62dT8',
'vwGnXKNGjT0',
'AeEH5ugJrUU',
'WCFnvj4Lztg',
'Gl1uq4tg7YU',
'rcIpIw4YtZk',
'9u9vlj8CgS0',
'gvOWADwCDNg',
'JtA_WnBP_Co',
'R78ZxZW_N-o',
'd1lth7uX02g',
'onZcB3y2RTM',
'j20cTvQYe6s',
'tVznLG3PAdM',
'muLAN-kP5pE',
'VJxNv2m7qns',
'y3PcelCeraw'
]
def send_cmd(sock, cmd):
sock.send(bytes(cmd))
def connect():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
return sock
def login(sock):
send_cmd(sock, "USER {0} * * :{1}\r\n".format(nick, real_name))
send_cmd(sock, "NICK {0}\r\n".format(nick))
send_cmd(sock, "JOIN {0}\r\n".format(channel))
def send_msg(sock, msg):
send_cmd(sock, "PRIVMSG {} :{}\r\n".format(channel, msg))
def magic(sock):
index = randint(0, len(gabe_the_dog_sources) - 1)
msg = "Check this out: {}{}".format(youtube_prefix, gabe_the_dog_sources[index])
send_msg(sock, msg)
def send_routine(sock):
while True:
msg = raw_input()
if msg.startswith("/q"):
send_cmd(sock, "QUIT")
sock.close()
return
send_msg(sock, msg)
def receive_routine(sock):
try:
while True:
text = str(sock.recv(size))
if text.startswith("PING "):
send_cmd(sock, "PONG {}".format(text[5:]))
continue
if len(text) > 1:
print_message(text, "PRIVMSG" in text and channel in text)
if "show some magic" in text and nick in text:
magic(sock)
except:
print("Disconnected!")
def print_message(msg, is_private):
if is_private:
sender_nick = re.sub(r":(.*)!.*PRIVMSG " + channel + r" :(.*)", r"\1", msg)
msg_text = re.sub(r":(.*)!.*PRIVMSG " + channel + r" :(.*)", r"\2", msg)
print("<{}>: {}".format(sender_nick[:-1], msg_text[:-1]))
else:
print(msg)
def main():
sock = connect()
login(sock)
print("Connected!")
sender_thread = threading.Thread(target=send_routine, args=(sock,))
receiver_thread = threading.Thread(target=receive_routine, args=(sock,))
sender_thread.start()
receiver_thread.start()
sender_thread.join()
receiver_thread.join()
main()
| true | true |
f71b5dcfe6e6dcab397ded91c0b2aed0f4eaaa39 | 3,380 | py | Python | drawSS.py | banroku/analySS | 15ba9e9216f86a1bf74062eae479a3ce1c9c5a11 | [
"MIT"
] | null | null | null | drawSS.py | banroku/analySS | 15ba9e9216f86a1bf74062eae479a3ce1c9c5a11 | [
"MIT"
] | null | null | null | drawSS.py | banroku/analySS | 15ba9e9216f86a1bf74062eae479a3ce1c9c5a11 | [
"MIT"
] | null | null | null | # coding=utf-8
def thinningSS(file, max_strain=10, interval=0.1):
'''a function to conduct data thinning of SS curve at range (0, MAX_STRAIN), with INTERVAL
This returns np.series of stress with strain in the index.
FILE should be passed as dictionary containing following:
'name': name of sample like 'RL7785'
'crv': path(relative) of xxx_crv.csv file
'rlt': path(relative) of xxx_rlt.csv file
'set': path(relative) of xxx_set.csv file
'''
import pandas as pd
import numpy as np
# read files and parameters
data = pd.read_csv(file['crv'], sep=',', encoding='shift_jis', skiprows=1, index_col=0)
data_rlt = pd.read_csv(file['rlt'], sep=',', encoding='shift_jis')
L = 64 # span
b = float(data_rlt.iloc[2, 3]) # width of first specimen
h = float(data_rlt.iloc[2, 4]) # height of first specimen
#print('span, width, height of first specimen:', L, ',', b, ',', h)#cut out curve of first specimen
col = ['mm', 'N']
data = data.reindex(columns=col)
data.dropna(subset=['mm'], inplace=True)
#%% convert (mm, N) to (%, MPa)
# sigma = 3*F*L / (2*b*h^2)
# epsilon = 6*100*s*h / (L^2)
# F: load, L:span = 64 mm, b:width, h:height, s=strain/mm
data['strain'] = data['mm'] * 6 * 100 * h / L / L
data['stress'] = data['N'] * 3 * L / (2 * b * h * h)
#%% data thinnings
interval_steps = int(max_strain/interval)
marker = pd.DataFrame({'strain': np.round(np.linspace(0, max_strain, interval_steps, endpoint=False), 2), 'marker': True})
data_marked = pd.merge(data, marker, on='strain', how='outer')
data_marked.rename(data_marked['strain'], inplace=True)
data_marked.sort_values(by=['strain'], inplace=True)
data_marked.interpolate(method='slinear', limit=1, inplace=True)
data_marked['marker'].fillna('False', inplace=True)
data_skipped = data_marked[data_marked['marker']==True]
thinnedSS = data_skipped['stress']
thinnedSS.name = file['name']
return thinnedSS
#%%
def parameters(file):
'''a function to pick following parameters as pd.Series:
parameters = ['width', 'height', 'FM', 'FS_max', 'FS_break', 'FE_max', 'FE_break',
'd_width', 'd_height', 'd_FM', 'd_FS_max', 'd_FS_break', 'd_FE_max', 'd_FE_break']
FILE should be passed as dictionary containing following:
'name': name of sample like 'RL7785'
'crv': path(relative) of xxx_crv.csv file
'rlt': path(relative) of xxx_rlt.csv file
'set': path(relative) of xxx_set.csv file '''
file_rlt = file['rlt']
data_rlt = pd.read_csv(file_rlt, sep=',', skiprows=[1,2], index_col=0, encoding='shift_jis')
parameters = ['幅', '厚さ', '弾性率', '最大点', '破壊点', '最大点.1', '破壊点.1']
data_rlt = data_rlt.loc[['単純平均', '標準偏差'], parameters]
data_rlt.index = ['average', 'stdev']
data_rlt.columns = ['width', 'height', 'FM', 'FS_max', 'FS_break', 'FE_max', 'FE_break']
data_rlt = data_rlt.values
data_flattened = [item for sublist in data_rlt for item in sublist] #see below
parameters = ['width', 'height', 'FM', 'FS_max', 'FS_break', 'FE_max', 'FE_break',
'd_width', 'd_height', 'd_FM', 'd_FS_max', 'd_FS_break', 'd_FE_max', 'd_FE_break']
data_rlt = pd.Series(data_flattened, index=parameters)
data_rlt.name = file['name']
return data_rlt | 47.605634 | 126 | 0.628107 |
def thinningSS(file, max_strain=10, interval=0.1):
import pandas as pd
import numpy as np
data = pd.read_csv(file['crv'], sep=',', encoding='shift_jis', skiprows=1, index_col=0)
data_rlt = pd.read_csv(file['rlt'], sep=',', encoding='shift_jis')
L = 64
b = float(data_rlt.iloc[2, 3])
h = float(data_rlt.iloc[2, 4])
= data.reindex(columns=col)
data.dropna(subset=['mm'], inplace=True)
data['strain'] = data['mm'] * 6 * 100 * h / L / L
data['stress'] = data['N'] * 3 * L / (2 * b * h * h)
interval_steps = int(max_strain/interval)
marker = pd.DataFrame({'strain': np.round(np.linspace(0, max_strain, interval_steps, endpoint=False), 2), 'marker': True})
data_marked = pd.merge(data, marker, on='strain', how='outer')
data_marked.rename(data_marked['strain'], inplace=True)
data_marked.sort_values(by=['strain'], inplace=True)
data_marked.interpolate(method='slinear', limit=1, inplace=True)
data_marked['marker'].fillna('False', inplace=True)
data_skipped = data_marked[data_marked['marker']==True]
thinnedSS = data_skipped['stress']
thinnedSS.name = file['name']
return thinnedSS
def parameters(file):
file_rlt = file['rlt']
data_rlt = pd.read_csv(file_rlt, sep=',', skiprows=[1,2], index_col=0, encoding='shift_jis')
parameters = ['幅', '厚さ', '弾性率', '最大点', '破壊点', '最大点.1', '破壊点.1']
data_rlt = data_rlt.loc[['単純平均', '標準偏差'], parameters]
data_rlt.index = ['average', 'stdev']
data_rlt.columns = ['width', 'height', 'FM', 'FS_max', 'FS_break', 'FE_max', 'FE_break']
data_rlt = data_rlt.values
data_flattened = [item for sublist in data_rlt for item in sublist]
parameters = ['width', 'height', 'FM', 'FS_max', 'FS_break', 'FE_max', 'FE_break',
'd_width', 'd_height', 'd_FM', 'd_FS_max', 'd_FS_break', 'd_FE_max', 'd_FE_break']
data_rlt = pd.Series(data_flattened, index=parameters)
data_rlt.name = file['name']
return data_rlt | true | true |
f71b5dd3b2f1f6ba21eafc9f59670a50d9efc222 | 207 | py | Python | sciencer/expanders/__init__.py | SciencerIO/sciencer-toolkit | f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9 | [
"MIT"
] | 2 | 2022-03-28T17:27:21.000Z | 2022-03-29T22:27:15.000Z | sciencer/expanders/__init__.py | SciencerIO/sciencer-toolkit | f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9 | [
"MIT"
] | null | null | null | sciencer/expanders/__init__.py | SciencerIO/sciencer-toolkit | f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9 | [
"MIT"
] | 1 | 2022-03-28T14:47:53.000Z | 2022-03-28T14:47:53.000Z | """Sciencer Expanders"""
from .expander import Expander
from .expand_by_authors import ExpandByAuthors
from .expand_by_references import ExpandByReferences
from .expand_by_citations import ExpandByCitations
| 34.5 | 52 | 0.864734 | from .expander import Expander
from .expand_by_authors import ExpandByAuthors
from .expand_by_references import ExpandByReferences
from .expand_by_citations import ExpandByCitations
| true | true |
f71b5e233cb62b6fa8ba747a25edcddd0d4c142f | 1,068 | py | Python | get-git-lfs.py | rcmurphy/pre-commit-hooks | 17fcaab5769b7628e872601d852d3dcf13c0930e | [
"MIT"
] | null | null | null | get-git-lfs.py | rcmurphy/pre-commit-hooks | 17fcaab5769b7628e872601d852d3dcf13c0930e | [
"MIT"
] | null | null | null | get-git-lfs.py | rcmurphy/pre-commit-hooks | 17fcaab5769b7628e872601d852d3dcf13c0930e | [
"MIT"
] | 1 | 2016-05-06T15:27:07.000Z | 2016-05-06T15:27:07.000Z | #!/usr/bin/env python3.4
"""This is a script to install git-lfs to a tempdir for use in tests"""
import io
import os.path
import shutil
import tarfile
from urllib.request import urlopen
DOWNLOAD_PATH = (
'https://github.com/github/git-lfs/releases/download/'
'v1.1.0/git-lfs-linux-amd64-1.1.0.tar.gz'
)
PATH_IN_TAR = 'git-lfs-1.1.0/git-lfs'
DEST_PATH = '/tmp/git-lfs/git-lfs'
DEST_DIR = os.path.dirname(DEST_PATH)
def main():
if (
os.path.exists(DEST_PATH) and
os.path.isfile(DEST_PATH) and
os.access(DEST_PATH, os.X_OK)
):
print('Already installed!')
return 0
shutil.rmtree(DEST_DIR, ignore_errors=True)
os.makedirs(DEST_DIR, exist_ok=True)
contents = io.BytesIO(urlopen(DOWNLOAD_PATH).read())
with tarfile.open(fileobj=contents) as tar:
with tar.extractfile(PATH_IN_TAR) as src_file:
with open(DEST_PATH, 'wb') as dest_file:
shutil.copyfileobj(src_file, dest_file)
os.chmod(DEST_PATH, 0o755)
if __name__ == '__main__':
exit(main())
| 27.384615 | 71 | 0.661985 |
import io
import os.path
import shutil
import tarfile
from urllib.request import urlopen
DOWNLOAD_PATH = (
'https://github.com/github/git-lfs/releases/download/'
'v1.1.0/git-lfs-linux-amd64-1.1.0.tar.gz'
)
PATH_IN_TAR = 'git-lfs-1.1.0/git-lfs'
DEST_PATH = '/tmp/git-lfs/git-lfs'
DEST_DIR = os.path.dirname(DEST_PATH)
def main():
if (
os.path.exists(DEST_PATH) and
os.path.isfile(DEST_PATH) and
os.access(DEST_PATH, os.X_OK)
):
print('Already installed!')
return 0
shutil.rmtree(DEST_DIR, ignore_errors=True)
os.makedirs(DEST_DIR, exist_ok=True)
contents = io.BytesIO(urlopen(DOWNLOAD_PATH).read())
with tarfile.open(fileobj=contents) as tar:
with tar.extractfile(PATH_IN_TAR) as src_file:
with open(DEST_PATH, 'wb') as dest_file:
shutil.copyfileobj(src_file, dest_file)
os.chmod(DEST_PATH, 0o755)
if __name__ == '__main__':
exit(main())
| true | true |
f71b5e5ba3ad4fa2190d7a089a3fbcdfd842d9d6 | 4,150 | py | Python | ptvs_virtualenv_proxy.py | SpaceTheArcher/test | 469ba40a6e3a5719e90f521d851252b1d5499dab | [
"Apache-2.0"
] | null | null | null | ptvs_virtualenv_proxy.py | SpaceTheArcher/test | 469ba40a6e3a5719e90f521d851252b1d5499dab | [
"Apache-2.0"
] | 2 | 2020-06-05T18:25:57.000Z | 2021-06-01T22:22:13.000Z | ptvs_virtualenv_proxy.py | bruno-zaccariello/test | 469ba40a6e3a5719e90f521d851252b1d5499dab | [
"Apache-2.0"
] | null | null | null | # ############################################################################
#
# Copyright (c) Microsoft Corporation.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
# ###########################################################################
import datetime
import os
import sys
if sys.version_info[0] == 3:
def to_str(value):
return value.decode(sys.getfilesystemencoding())
def execfile(path, global_dict):
"""Execute a file"""
with open(path, 'r') as f:
code = f.read()
code = code.replace('\r\n', '\n') + '\n'
exec(code, global_dict)
else:
def to_str(value):
return value.encode(sys.getfilesystemencoding())
def log(txt):
"""Logs fatal errors to a log file if WSGI_LOG env var is defined"""
log_file = os.environ.get('WSGI_LOG')
if log_file:
f = open(log_file, 'a+')
try:
f.write('%s: %s' % (datetime.datetime.now(), txt))
finally:
f.close()
ptvsd_secret = os.getenv('WSGI_PTVSD_SECRET')
if ptvsd_secret:
log('Enabling ptvsd ...\n')
try:
import ptvsd
try:
ptvsd.enable_attach(ptvsd_secret)
log('ptvsd enabled.\n')
except:
log('ptvsd.enable_attach failed\n')
except ImportError:
log('error importing ptvsd.\n');
def get_wsgi_handler(handler_name):
if not handler_name:
raise Exception('WSGI_HANDLER env var must be set')
if not isinstance(handler_name, str):
handler_name = to_str(handler_name)
module_name, _, callable_name = handler_name.rpartition('.')
should_call = callable_name.endswith('()')
callable_name = callable_name[:-2] if should_call else callable_name
name_list = [(callable_name, should_call)]
handler = None
while module_name:
try:
handler = __import__(module_name, fromlist=[name_list[0][0]])
for name, should_call in name_list:
handler = getattr(handler, name)
if should_call:
handler = handler()
break
except ImportError:
module_name, _, callable_name = module_name.rpartition('.')
should_call = callable_name.endswith('()')
callable_name = callable_name[:-2] if should_call else callable_name
name_list.insert(0, (callable_name, should_call))
handler = None
if handler is None:
raise ValueError('"%s" could not be imported' % handler_name)
return handler
activate_this = os.getenv('WSGI_ALT_VIRTUALENV_ACTIVATE_THIS')
if not activate_this:
raise Exception('WSGI_ALT_VIRTUALENV_ACTIVATE_THIS is not set')
def get_virtualenv_handler():
log('Activating virtualenv with %s\n' % activate_this)
execfile(activate_this, dict(__file__=activate_this))
log('Getting handler %s\n' % os.getenv('WSGI_ALT_VIRTUALENV_HANDLER'))
handler = get_wsgi_handler(os.getenv('WSGI_ALT_VIRTUALENV_HANDLER'))
log('Got handler: %r\n' % handler)
return handler
def get_venv_handler():
log('Activating venv with executable at %s\n' % activate_this)
import site
sys.executable = activate_this
old_sys_path, sys.path = sys.path, []
site.main()
sys.path.insert(0, '')
for item in old_sys_path:
if item not in sys.path:
sys.path.append(item)
log('Getting handler %s\n' % os.getenv('WSGI_ALT_VIRTUALENV_HANDLER'))
handler = get_wsgi_handler(os.getenv('WSGI_ALT_VIRTUALENV_HANDLER'))
log('Got handler: %r\n' % handler)
return handler
| 34.87395 | 98 | 0.608675 | true | true | |
f71b5f38bc0959d120c19af81b07d70402e40457 | 2,779 | py | Python | bifurcation-diagram/run.py | ExplosiveJam/fickettmodel-reproducibility | e47af1d3e2513d35dad65c16d4fd68c23e505f87 | [
"MIT"
] | 1 | 2019-06-08T20:06:33.000Z | 2019-06-08T20:06:33.000Z | bifurcation-diagram/run.py | ExplosiveJam/fickettmodel-reproducibility | e47af1d3e2513d35dad65c16d4fd68c23e505f87 | [
"MIT"
] | null | null | null | bifurcation-diagram/run.py | ExplosiveJam/fickettmodel-reproducibility | e47af1d3e2513d35dad65c16d4fd68c23e505f87 | [
"MIT"
] | 1 | 2019-06-24T13:00:02.000Z | 2019-06-24T13:00:02.000Z | #!/usr/bin/env python
r""" Run many simulations with varying :math:`\theta`.
The simulations are run.
Separate script should plot bifurcation diagram.
"""
import argparse
import os
import sys
import shutil
import numpy as np
from mpi4py import MPI
from saf.fm.nonlinear import Config
from saf.action import solve
from saf.util import reset_logging
TOTAL_THETAS = 251
FINAL_TIME = 1000
Q = 4
IO_FORMAT = 'numpy'
# Format for floating-point numbers.
FMT = '.3f'
def _worker(tasks, rank):
for t in tasks:
_worker_single_task(t, rank)
def _worker_single_task(task, rank):
theta = task
worker_name = rank
try:
outdir = 'theta={:{fmt}}'.format(theta, fmt=FMT)
outdir = os.path.join(OUTPUT_DIR, outdir)
if os.path.exists(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
outname = os.path.join(outdir, 'stdout.log')
errname = os.path.join(outdir, 'stderr.log')
sys.stdout = open(outname, 'w')
sys.stderr = open(errname, 'w')
msg = 'Worker {} | theta={:{fmt}}'.format(worker_name, theta, fmt=FMT)
print(msg)
except Exception as e:
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
return
try:
c = _get_config(theta)
solve('nonlinear', c, outdir, log_to_file=False)
reset_logging()
except Exception as e:
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
sys.stdout = sys.__stdout__
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
def _get_config(theta):
c = Config()
c.n12 = N12
c.final_time = FINAL_TIME
c.dt = 0.005
c.approximator = 'godunov-minmod'
c.time_integrator = 'dopri5'
c.plot_time_step = 0
c.io_format = IO_FORMAT
c.play_animation = False
c.lambda_tol = 1e-6
c.q = Q
c.theta = theta
c.reaction_rate_version = 'v2' # Expression exactly as in FariaEtAl2015.
c.f = 1
c.ic_amplitude = 0.0
c.ic_type = 'gaussian'
c.truncation_coef = 1e6
return c
p = argparse.ArgumentParser()
p.add_argument('N12', help='Resolution', type=int)
args = p.parse_args()
N12 = args.N12
OUTPUT_DIR = os.path.join('_output', 'N12={:04d}'.format(N12))
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
all_tasks = []
# Build `all_tasks` in master process to distribute it to all processes.
if rank == 0:
# Uniformly spaced values of :math:`\theta`.
theta_values = np.linspace(0.90, 1.15, num=TOTAL_THETAS)
for i in range(size):
all_tasks.append([])
for i in range(len(theta_values)):
all_tasks[i % size].append(theta_values[i])
# Now distribute the tasks to each process.
tasks = comm.scatter(all_tasks, root=0)
_worker(tasks, rank)
| 23.956897 | 78 | 0.640158 |
import argparse
import os
import sys
import shutil
import numpy as np
from mpi4py import MPI
from saf.fm.nonlinear import Config
from saf.action import solve
from saf.util import reset_logging
TOTAL_THETAS = 251
FINAL_TIME = 1000
Q = 4
IO_FORMAT = 'numpy'
FMT = '.3f'
def _worker(tasks, rank):
for t in tasks:
_worker_single_task(t, rank)
def _worker_single_task(task, rank):
theta = task
worker_name = rank
try:
outdir = 'theta={:{fmt}}'.format(theta, fmt=FMT)
outdir = os.path.join(OUTPUT_DIR, outdir)
if os.path.exists(outdir):
shutil.rmtree(outdir)
os.mkdir(outdir)
outname = os.path.join(outdir, 'stdout.log')
errname = os.path.join(outdir, 'stderr.log')
sys.stdout = open(outname, 'w')
sys.stderr = open(errname, 'w')
msg = 'Worker {} | theta={:{fmt}}'.format(worker_name, theta, fmt=FMT)
print(msg)
except Exception as e:
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
return
try:
c = _get_config(theta)
solve('nonlinear', c, outdir, log_to_file=False)
reset_logging()
except Exception as e:
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
sys.stdout = sys.__stdout__
print('theta={:{fmt}} | {}'.format(theta, str(e), fmt=FMT))
def _get_config(theta):
c = Config()
c.n12 = N12
c.final_time = FINAL_TIME
c.dt = 0.005
c.approximator = 'godunov-minmod'
c.time_integrator = 'dopri5'
c.plot_time_step = 0
c.io_format = IO_FORMAT
c.play_animation = False
c.lambda_tol = 1e-6
c.q = Q
c.theta = theta
c.reaction_rate_version = 'v2'
c.f = 1
c.ic_amplitude = 0.0
c.ic_type = 'gaussian'
c.truncation_coef = 1e6
return c
p = argparse.ArgumentParser()
p.add_argument('N12', help='Resolution', type=int)
args = p.parse_args()
N12 = args.N12
OUTPUT_DIR = os.path.join('_output', 'N12={:04d}'.format(N12))
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
all_tasks = []
if rank == 0:
theta_values = np.linspace(0.90, 1.15, num=TOTAL_THETAS)
for i in range(size):
all_tasks.append([])
for i in range(len(theta_values)):
all_tasks[i % size].append(theta_values[i])
tasks = comm.scatter(all_tasks, root=0)
_worker(tasks, rank)
| true | true |
f71b5f65fde60a4fce5bcdd06e514fa54d419c62 | 2,762 | py | Python | mwparserfromhell/nodes/wikilink.py | hperala/kontuwikibot | f409e6fb45adf4e553dc326d9fb3c0d29eda6373 | [
"MIT"
] | null | null | null | mwparserfromhell/nodes/wikilink.py | hperala/kontuwikibot | f409e6fb45adf4e553dc326d9fb3c0d29eda6373 | [
"MIT"
] | null | null | null | mwparserfromhell/nodes/wikilink.py | hperala/kontuwikibot | f409e6fb45adf4e553dc326d9fb3c0d29eda6373 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import unicode_literals
from . import Node
from ..compat import str
from ..utils import parse_anything
__all__ = ["Wikilink"]
class Wikilink(Node):
"""Represents an internal wikilink, like ``[[Foo|Bar]]``."""
def __init__(self, title, text=None):
super(Wikilink, self).__init__()
self._title = title
self._text = text
def __unicode__(self):
if self.text is not None:
return "[[" + str(self.title) + "|" + str(self.text) + "]]"
return "[[" + str(self.title) + "]]"
def __children__(self):
yield self.title
if self.text is not None:
yield self.text
def __strip__(self, normalize, collapse):
if self.text is not None:
return self.text.strip_code(normalize, collapse)
return self.title.strip_code(normalize, collapse)
def __showtree__(self, write, get, mark):
write("[[")
get(self.title)
if self.text is not None:
write(" | ")
mark()
get(self.text)
write("]]")
@property
def title(self):
"""The title of the linked page, as a :class:`.Wikicode` object."""
return self._title
@property
def text(self):
"""The text to display (if any), as a :class:`.Wikicode` object."""
return self._text
@title.setter
def title(self, value):
self._title = parse_anything(value)
@text.setter
def text(self, value):
if value is None:
self._text = None
else:
self._text = parse_anything(value)
| 33.277108 | 79 | 0.654598 |
from __future__ import unicode_literals
from . import Node
from ..compat import str
from ..utils import parse_anything
__all__ = ["Wikilink"]
class Wikilink(Node):
def __init__(self, title, text=None):
super(Wikilink, self).__init__()
self._title = title
self._text = text
def __unicode__(self):
if self.text is not None:
return "[[" + str(self.title) + "|" + str(self.text) + "]]"
return "[[" + str(self.title) + "]]"
def __children__(self):
yield self.title
if self.text is not None:
yield self.text
def __strip__(self, normalize, collapse):
if self.text is not None:
return self.text.strip_code(normalize, collapse)
return self.title.strip_code(normalize, collapse)
def __showtree__(self, write, get, mark):
write("[[")
get(self.title)
if self.text is not None:
write(" | ")
mark()
get(self.text)
write("]]")
@property
def title(self):
return self._title
@property
def text(self):
return self._text
@title.setter
def title(self, value):
self._title = parse_anything(value)
@text.setter
def text(self, value):
if value is None:
self._text = None
else:
self._text = parse_anything(value)
| true | true |
f71b5f8ccdadb4be20d3cb2813522c3537586cb1 | 2,254 | py | Python | bcs-ui/backend/tests/container_service/observability/log_stream/test_log_stream.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 599 | 2019-06-25T03:20:46.000Z | 2022-03-31T12:14:33.000Z | bcs-ui/backend/tests/container_service/observability/log_stream/test_log_stream.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 537 | 2019-06-27T06:03:44.000Z | 2022-03-31T12:10:01.000Z | bcs-ui/backend/tests/container_service/observability/log_stream/test_log_stream.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 214 | 2019-06-25T03:26:05.000Z | 2022-03-31T07:52:03.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import pytest
from channels.testing import WebsocketCommunicator
from backend.accounts.middlewares import BCSChannelAuthMiddlewareStack
from backend.container_service.observability.log_stream.views import LogStreamHandler
@pytest.fixture
def session_id(api_client, project_id, cluster_id, namespace, pod_name, container_name):
response = api_client.post(
f'/api/logs/projects/{project_id}/clusters/{cluster_id}/namespaces/{namespace}/pods/{pod_name}/stdlogs/sessions/', # noqa
{"container_name": container_name},
)
result = response.json()
return result['data']['session_id']
@pytest.mark.skip(reason='暂时跳过标准日志部分单元测试')
@pytest.mark.django_db
@pytest.mark.asyncio
async def test_log_stream(project_id, cluster_id, namespace, pod_name, session_id):
app = BCSChannelAuthMiddlewareStack(LogStreamHandler.as_asgi())
# Test a normal connection
communicator = WebsocketCommunicator(
app,
f'/ws/logs/projects/{project_id}/clusters/{cluster_id}/namespaces/{namespace}/pods/{pod_name}/stdlogs/stream/?session_id={session_id}', # noqa
)
communicator.scope['url_route'] = {
'kwargs': {
'project_id': project_id,
'cluster_id': cluster_id,
'namespace': namespace,
'pod': pod_name,
}
}
connected, _ = await communicator.connect()
assert connected
# Test sending text
await communicator.send_to(text_data="hello")
# Close out
await communicator.disconnect()
| 35.777778 | 151 | 0.733807 |
import pytest
from channels.testing import WebsocketCommunicator
from backend.accounts.middlewares import BCSChannelAuthMiddlewareStack
from backend.container_service.observability.log_stream.views import LogStreamHandler
@pytest.fixture
def session_id(api_client, project_id, cluster_id, namespace, pod_name, container_name):
response = api_client.post(
f'/api/logs/projects/{project_id}/clusters/{cluster_id}/namespaces/{namespace}/pods/{pod_name}/stdlogs/sessions/',
{"container_name": container_name},
)
result = response.json()
return result['data']['session_id']
@pytest.mark.skip(reason='暂时跳过标准日志部分单元测试')
@pytest.mark.django_db
@pytest.mark.asyncio
async def test_log_stream(project_id, cluster_id, namespace, pod_name, session_id):
app = BCSChannelAuthMiddlewareStack(LogStreamHandler.as_asgi())
communicator = WebsocketCommunicator(
app,
f'/ws/logs/projects/{project_id}/clusters/{cluster_id}/namespaces/{namespace}/pods/{pod_name}/stdlogs/stream/?session_id={session_id}',
)
communicator.scope['url_route'] = {
'kwargs': {
'project_id': project_id,
'cluster_id': cluster_id,
'namespace': namespace,
'pod': pod_name,
}
}
connected, _ = await communicator.connect()
assert connected
await communicator.send_to(text_data="hello")
await communicator.disconnect()
| true | true |
f71b5fa3d07b50277b17d00725bcbd1f7fff771e | 6,977 | py | Python | tensorflow/contrib/cmake/tools/create_def_file.py | uve/tensorflow | e08079463bf43e5963acc41da1f57e95603f8080 | [
"Apache-2.0"
] | 6 | 2022-02-04T18:12:24.000Z | 2022-03-21T23:57:12.000Z | Lib/site-packages/tensorflow/contrib/cmake/tools/create_def_file.py | shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings | 1fa4cd6a566c8745f455fc3d2273208f21f88ced | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/tensorflow/contrib/cmake/tools/create_def_file.py | shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings | 1fa4cd6a566c8745f455fc3d2273208f21f88ced | [
"bzip2-1.0.6"
] | 1 | 2022-02-08T03:53:23.000Z | 2022-02-08T03:53:23.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""create_def_file.py - tool to create a windows def file.
The def file can be used to export symbols from the tensorflow dll to enable
tf.load_library().
Because the linker allows only 64K symbols to be exported per dll
we filter the symbols down to the essentials. The regular expressions
we use for this are specific to tensorflow.
TODO: this works fine but there is an issue with exporting
'const char * const' and importing it from a user_ops. The problem is
on the importing end and using __declspec(dllimport) works around it.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import codecs
import os
import re
import subprocess
import sys
import tempfile
# External tools we use that come with visual studio sdk and
# we assume that the caller has the correct PATH to the sdk
UNDNAME = "undname.exe"
DUMPBIN = "dumpbin.exe"
# Exclude if matched
EXCLUDE_RE = re.compile(r"RTTI|deleting destructor|::internal::|Internal|"
r"python_op_gen_internal|grappler")
# Include if matched before exclude
INCLUDEPRE_RE = re.compile(r"google::protobuf::internal::ExplicitlyConstructed|"
r"tensorflow::internal::LogMessage|"
r"tensorflow::internal::LogString|"
r"tensorflow::internal::CheckOpMessageBuilder|"
r"tensorflow::internal::PickUnusedPortOrDie|"
r"tensorflow::internal::ValidateDevice|"
r"tensorflow::ops::internal::Enter|"
r"tensorflow::strings::internal::AppendPieces|"
r"tensorflow::strings::internal::CatPieces|"
r"tensorflow::errors::Internal|"
r"tensorflow::Tensor::CopyFromInternal|"
r"tensorflow::kernel_factory::"
r"OpKernelRegistrar::InitInternal|"
r"tensorflow::io::internal::JoinPathImpl")
# Include if matched after exclude
INCLUDE_RE = re.compile(r"^(TF_\w*)$|"
r"^(TFE_\w*)$|"
r"tensorflow::|"
r"functor::|"
r"\?nsync_|"
r"stream_executor::")
# We want to identify data members explicitly in the DEF file, so that no one
# can implicitly link against the DLL if they use one of the variables exported
# from the DLL and the header they use does not decorate the symbol with
# __declspec(dllimport). It is easier to detect what a data symbol does
# NOT look like, so doing it with the below regex.
DATA_EXCLUDE_RE = re.compile(r"[)(]|"
r"vftable|"
r"vbtable|"
r"vcall|"
r"RTTI|"
r"protobuf::internal::ExplicitlyConstructed")
def get_args():
"""Parse command line."""
filename_list = lambda x: x.split(";")
parser = argparse.ArgumentParser()
parser.add_argument("--input", type=filename_list,
help="paths to input libraries separated by semicolons",
required=True)
parser.add_argument("--output", help="output deffile", required=True)
parser.add_argument("--target", help="name of the target", required=True)
parser.add_argument("--bitness", help="build target bitness", required=True)
args = parser.parse_args()
return args
def main():
"""main."""
args = get_args()
# Pipe dumpbin to extract all linkable symbols from libs.
# Good symbols are collected in candidates and also written to
# a temp file.
candidates = []
tmpfile = tempfile.NamedTemporaryFile(mode="w", delete=False)
for lib_path in args.input:
proc = subprocess.Popen([DUMPBIN, "/nologo", "/linkermember:1", lib_path],
stdout=subprocess.PIPE)
for line in codecs.getreader("utf-8")(proc.stdout):
cols = line.split()
if len(cols) < 2:
continue
sym = cols[1]
tmpfile.file.write(sym + "\n")
candidates.append(sym)
exit_code = proc.wait()
if exit_code != 0:
print("{} failed, exit={}".format(DUMPBIN, exit_code))
return exit_code
tmpfile.file.close()
# Run the symbols through undname to get their undecorated name
# so we can filter on something readable.
with open(args.output, "w") as def_fp:
# track dupes
taken = set()
# Header for the def file.
def_fp.write("LIBRARY " + args.target + "\n")
def_fp.write("EXPORTS\n")
if args.bitness == "64":
def_fp.write("\t??1OpDef@tensorflow@@UEAA@XZ\n")
else:
def_fp.write("\t??1OpDef@tensorflow@@UAE@XZ\n")
# Each symbols returned by undname matches the same position in candidates.
# We compare on undname but use the decorated name from candidates.
dupes = 0
proc = subprocess.Popen([UNDNAME, tmpfile.name], stdout=subprocess.PIPE)
for idx, line in enumerate(codecs.getreader("utf-8")(proc.stdout)):
decorated = candidates[idx]
if decorated in taken:
# Symbol is already in output, done.
dupes += 1
continue
if not INCLUDEPRE_RE.search(line):
if EXCLUDE_RE.search(line):
continue
if not INCLUDE_RE.search(line):
continue
if "deleting destructor" in line:
# Some of the symbols convered by INCLUDEPRE_RE export deleting
# destructor symbols, which is a bad idea.
# So we filter out such symbols here.
continue
if DATA_EXCLUDE_RE.search(line):
def_fp.write("\t" + decorated + "\n")
else:
def_fp.write("\t" + decorated + " DATA\n")
taken.add(decorated)
exit_code = proc.wait()
if exit_code != 0:
print("{} failed, exit={}".format(UNDNAME, exit_code))
return exit_code
os.unlink(tmpfile.name)
print("symbols={}, taken={}, dupes={}"
.format(len(candidates), len(taken), dupes))
return 0
if __name__ == "__main__":
sys.exit(main())
| 38.546961 | 81 | 0.611151 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import codecs
import os
import re
import subprocess
import sys
import tempfile
UNDNAME = "undname.exe"
DUMPBIN = "dumpbin.exe"
EXCLUDE_RE = re.compile(r"RTTI|deleting destructor|::internal::|Internal|"
r"python_op_gen_internal|grappler")
INCLUDEPRE_RE = re.compile(r"google::protobuf::internal::ExplicitlyConstructed|"
r"tensorflow::internal::LogMessage|"
r"tensorflow::internal::LogString|"
r"tensorflow::internal::CheckOpMessageBuilder|"
r"tensorflow::internal::PickUnusedPortOrDie|"
r"tensorflow::internal::ValidateDevice|"
r"tensorflow::ops::internal::Enter|"
r"tensorflow::strings::internal::AppendPieces|"
r"tensorflow::strings::internal::CatPieces|"
r"tensorflow::errors::Internal|"
r"tensorflow::Tensor::CopyFromInternal|"
r"tensorflow::kernel_factory::"
r"OpKernelRegistrar::InitInternal|"
r"tensorflow::io::internal::JoinPathImpl")
INCLUDE_RE = re.compile(r"^(TF_\w*)$|"
r"^(TFE_\w*)$|"
r"tensorflow::|"
r"functor::|"
r"\?nsync_|"
r"stream_executor::")
DATA_EXCLUDE_RE = re.compile(r"[)(]|"
r"vftable|"
r"vbtable|"
r"vcall|"
r"RTTI|"
r"protobuf::internal::ExplicitlyConstructed")
def get_args():
filename_list = lambda x: x.split(";")
parser = argparse.ArgumentParser()
parser.add_argument("--input", type=filename_list,
help="paths to input libraries separated by semicolons",
required=True)
parser.add_argument("--output", help="output deffile", required=True)
parser.add_argument("--target", help="name of the target", required=True)
parser.add_argument("--bitness", help="build target bitness", required=True)
args = parser.parse_args()
return args
def main():
args = get_args()
candidates = []
tmpfile = tempfile.NamedTemporaryFile(mode="w", delete=False)
for lib_path in args.input:
proc = subprocess.Popen([DUMPBIN, "/nologo", "/linkermember:1", lib_path],
stdout=subprocess.PIPE)
for line in codecs.getreader("utf-8")(proc.stdout):
cols = line.split()
if len(cols) < 2:
continue
sym = cols[1]
tmpfile.file.write(sym + "\n")
candidates.append(sym)
exit_code = proc.wait()
if exit_code != 0:
print("{} failed, exit={}".format(DUMPBIN, exit_code))
return exit_code
tmpfile.file.close()
with open(args.output, "w") as def_fp:
taken = set()
def_fp.write("LIBRARY " + args.target + "\n")
def_fp.write("EXPORTS\n")
if args.bitness == "64":
def_fp.write("\t??1OpDef@tensorflow@@UEAA@XZ\n")
else:
def_fp.write("\t??1OpDef@tensorflow@@UAE@XZ\n")
dupes = 0
proc = subprocess.Popen([UNDNAME, tmpfile.name], stdout=subprocess.PIPE)
for idx, line in enumerate(codecs.getreader("utf-8")(proc.stdout)):
decorated = candidates[idx]
if decorated in taken:
dupes += 1
continue
if not INCLUDEPRE_RE.search(line):
if EXCLUDE_RE.search(line):
continue
if not INCLUDE_RE.search(line):
continue
if "deleting destructor" in line:
continue
if DATA_EXCLUDE_RE.search(line):
def_fp.write("\t" + decorated + "\n")
else:
def_fp.write("\t" + decorated + " DATA\n")
taken.add(decorated)
exit_code = proc.wait()
if exit_code != 0:
print("{} failed, exit={}".format(UNDNAME, exit_code))
return exit_code
os.unlink(tmpfile.name)
print("symbols={}, taken={}, dupes={}"
.format(len(candidates), len(taken), dupes))
return 0
if __name__ == "__main__":
sys.exit(main())
| true | true |
f71b5fa9abf8cdc0cf3fabe615159d23770b9aaa | 4,790 | py | Python | flanker/mime/message/headers/encodedword.py | skshetry/flanker | 63d1cdf927777f49f97e8d7f01e105a3b0d25cd2 | [
"Apache-2.0"
] | 929 | 2015-01-01T11:14:21.000Z | 2022-03-28T23:47:40.000Z | flanker/mime/message/headers/encodedword.py | skshetry/flanker | 63d1cdf927777f49f97e8d7f01e105a3b0d25cd2 | [
"Apache-2.0"
] | 141 | 2015-01-10T19:02:03.000Z | 2021-07-26T18:04:14.000Z | flanker/mime/message/headers/encodedword.py | skshetry/flanker | 63d1cdf927777f49f97e8d7f01e105a3b0d25cd2 | [
"Apache-2.0"
] | 179 | 2015-01-01T18:42:46.000Z | 2022-02-16T21:57:14.000Z | # coding:utf-8
import logging
from base64 import b64encode
import regex as re
import six
from flanker import _email
from flanker.mime.message import charsets, errors
_log = logging.getLogger(__name__)
_RE_FOLDING_WHITE_SPACES = re.compile(r"(?:\n\r?|\r\n?)")
# This spec refers to http://tools.ietf.org/html/rfc2047
_RE_ENCODED_WORD = re.compile(r'''(?P<encodedWord>
=\? # literal =?
(?P<charset>[^?]*?) # non-greedy up to the next ? is the charset
\? # literal ?
(?P<encoding>[qb]) # either a "q" or a "b", case insensitive
\? # literal ?
(?P<encoded>.*?) # non-greedy up to the next ?= is the encoded string
\?= # literal ?=
)''', re.VERBOSE | re.IGNORECASE | re.MULTILINE)
def unfold(value):
"""
Unfolding is accomplished by simply removing any CRLF
that is immediately followed by WSP. Each header field should be
treated in its unfolded form for further syntactic and semantic
evaluation.
"""
return _RE_FOLDING_WHITE_SPACES.sub('', value)
def decode(header):
return mime_to_unicode(header)
def mime_to_unicode(header):
"""
Takes a header value and returns a fully decoded unicode string.
It differs from standard Python's mail.header.decode_header() because:
- it is higher level, i.e. returns a unicode string instead of
an array of tuples
- it accepts Unicode and non-ASCII strings as well
>>> header_to_unicode("=?UTF-8?B?UmVbMl06INCX0LXQvNC70Y/QutC4?=")
u"Земляки"
>>> header_to_unicode("hello")
u"Hello"
"""
# Only string header values need to be converted.
if not isinstance(header, six.string_types):
return header
try:
header = unfold(header)
decoded = [] # decoded parts
while header:
match = _RE_ENCODED_WORD.search(header)
if not match:
# Append the remainder of the string to the list of chunks.
decoded.append((header, 'ascii'))
break
start = match.start()
if start != 0:
# decodes unencoded ascii part to unicode
value = header[0:start]
if value.strip():
decoded.append((value, 'ascii'))
# decode a header =?...?= of encoding
charset, value = _decode_part(match.group('charset').lower(),
match.group('encoding').lower(),
match.group('encoded'))
if decoded and decoded[-1][1] == charset:
decoded[-1] = (decoded[-1][0]+value, charset)
else:
decoded.append((value, charset))
header = header[match.end():]
return u"".join(charsets.convert_to_unicode(c, v) for v, c in decoded)
except Exception:
try:
logged_header = header
if isinstance(logged_header, six.text_type):
logged_header = logged_header.encode('utf-8')
# encode header as utf-8 so all characters can be base64 encoded
logged_header = b64encode(logged_header)
_log.warning(
u"HEADER-DECODE-FAIL: ({0}) - b64encoded".format(
logged_header))
except Exception:
_log.exception("Failed to log exception")
return header
def _decode_part(charset, encoding, value):
"""
Attempts to decode part, understands
'q' - quoted encoding
'b' - base64 mime encoding
Returns (charset, decoded-string)
"""
if encoding == 'q':
return charset, _decode_quoted_printable(value)
if encoding == 'b':
# Postel's law: add missing padding
paderr = len(value) % 4
if paderr:
value += '==='[:4 - paderr]
return charset, _email.decode_base64(value)
if not encoding:
return charset, value
raise errors.DecodingError('Unknown encoding: %s' % encoding)
def _decode_quoted_printable(qp):
if six.PY2:
return _email.decode_quoted_printable(str(qp))
buf = bytearray()
size = len(qp)
i = 0
while i < size:
ch = qp[i]
i += 1
if ch == '_':
buf.append(ord(' '))
continue
if ch != '=':
buf.append(ord(ch))
continue
# If there is no enough characters left, then treat them as is.
if size - i < 2:
buf.append(ord(ch))
continue
try:
codepoint = int(qp[i:i + 2], 16)
except ValueError:
buf.append(ord(ch))
continue
buf.append(codepoint)
i += 2
return six.binary_type(buf)
| 30.125786 | 80 | 0.56618 |
import logging
from base64 import b64encode
import regex as re
import six
from flanker import _email
from flanker.mime.message import charsets, errors
_log = logging.getLogger(__name__)
_RE_FOLDING_WHITE_SPACES = re.compile(r"(?:\n\r?|\r\n?)")
_RE_ENCODED_WORD = re.compile(r'''(?P<encodedWord>
=\? # literal =?
(?P<charset>[^?]*?) # non-greedy up to the next ? is the charset
\? # literal ?
(?P<encoding>[qb]) # either a "q" or a "b", case insensitive
\? # literal ?
(?P<encoded>.*?) # non-greedy up to the next ?= is the encoded string
\?= # literal ?=
)''', re.VERBOSE | re.IGNORECASE | re.MULTILINE)
def unfold(value):
return _RE_FOLDING_WHITE_SPACES.sub('', value)
def decode(header):
return mime_to_unicode(header)
def mime_to_unicode(header):
if not isinstance(header, six.string_types):
return header
try:
header = unfold(header)
decoded = []
while header:
match = _RE_ENCODED_WORD.search(header)
if not match:
decoded.append((header, 'ascii'))
break
start = match.start()
if start != 0:
value = header[0:start]
if value.strip():
decoded.append((value, 'ascii'))
charset, value = _decode_part(match.group('charset').lower(),
match.group('encoding').lower(),
match.group('encoded'))
if decoded and decoded[-1][1] == charset:
decoded[-1] = (decoded[-1][0]+value, charset)
else:
decoded.append((value, charset))
header = header[match.end():]
return u"".join(charsets.convert_to_unicode(c, v) for v, c in decoded)
except Exception:
try:
logged_header = header
if isinstance(logged_header, six.text_type):
logged_header = logged_header.encode('utf-8')
logged_header = b64encode(logged_header)
_log.warning(
u"HEADER-DECODE-FAIL: ({0}) - b64encoded".format(
logged_header))
except Exception:
_log.exception("Failed to log exception")
return header
def _decode_part(charset, encoding, value):
if encoding == 'q':
return charset, _decode_quoted_printable(value)
if encoding == 'b':
paderr = len(value) % 4
if paderr:
value += '==='[:4 - paderr]
return charset, _email.decode_base64(value)
if not encoding:
return charset, value
raise errors.DecodingError('Unknown encoding: %s' % encoding)
def _decode_quoted_printable(qp):
if six.PY2:
return _email.decode_quoted_printable(str(qp))
buf = bytearray()
size = len(qp)
i = 0
while i < size:
ch = qp[i]
i += 1
if ch == '_':
buf.append(ord(' '))
continue
if ch != '=':
buf.append(ord(ch))
continue
# If there is no enough characters left, then treat them as is.
if size - i < 2:
buf.append(ord(ch))
continue
try:
codepoint = int(qp[i:i + 2], 16)
except ValueError:
buf.append(ord(ch))
continue
buf.append(codepoint)
i += 2
return six.binary_type(buf)
| true | true |
f71b5fdd3e686df0976041498cd2acf2ea0dd77c | 352 | py | Python | Exercicios/PythonExercicios/ex001 - 010/ex005.py | sggrilo/Curso-em-Video-Python | a0e6f3d80d89eb8709345a38e207d81a77891192 | [
"MIT"
] | null | null | null | Exercicios/PythonExercicios/ex001 - 010/ex005.py | sggrilo/Curso-em-Video-Python | a0e6f3d80d89eb8709345a38e207d81a77891192 | [
"MIT"
] | null | null | null | Exercicios/PythonExercicios/ex001 - 010/ex005.py | sggrilo/Curso-em-Video-Python | a0e6f3d80d89eb8709345a38e207d81a77891192 | [
"MIT"
] | null | null | null | # ANTECESSOR E SUCESSOR — Faça um programa que leia um número
# inteiro e mostre na tela o seu antecessor e o seu sucessor.
n = int(input('Digite um número inteiro: '))
a = n - 1
s = n + 1
print('O antecessor de \033[4;33m{}\033[m equivale a \033[4;31m{}\033[m. '.format(n, a), end='')
print('Seu sucessor equivale a \033[4;32m{}\033[m.'.format(s))
| 32 | 96 | 0.664773 |
n = int(input('Digite um número inteiro: '))
a = n - 1
s = n + 1
print('O antecessor de \033[4;33m{}\033[m equivale a \033[4;31m{}\033[m. '.format(n, a), end='')
print('Seu sucessor equivale a \033[4;32m{}\033[m.'.format(s))
| true | true |
f71b5fe3a6cd69858a329449b4f2842d872d3cb0 | 27,953 | py | Python | canvasapi/user.py | onomou/canvasapi | 94d269e8e771bcf03fd57e235190aced3b5af87a | [
"MIT"
] | null | null | null | canvasapi/user.py | onomou/canvasapi | 94d269e8e771bcf03fd57e235190aced3b5af87a | [
"MIT"
] | null | null | null | canvasapi/user.py | onomou/canvasapi | 94d269e8e771bcf03fd57e235190aced3b5af87a | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function, unicode_literals
from six import python_2_unicode_compatible, string_types
import warnings
from canvasapi.calendar_event import CalendarEvent
from canvasapi.canvas_object import CanvasObject
from canvasapi.communication_channel import CommunicationChannel
from canvasapi.folder import Folder
from canvasapi.paginated_list import PaginatedList
from canvasapi.upload import Uploader
from canvasapi.util import combine_kwargs, obj_or_id
@python_2_unicode_compatible
class User(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def get_profile(self, **kwargs):
"""
Retrieve this user's profile.
:calls: `GET /api/v1/users/:user_id/profile \
<https://canvas.instructure.com/doc/api/users.html#method.profile.settings>`_
:rtype: dict
"""
response = self._requester.request(
'GET',
'users/{}/profile'.format(self.id)
)
return response.json()
def get_page_views(self, **kwargs):
"""
Retrieve this user's page views.
:calls: `GET /api/v1/users/:user_id/page_views \
<https://canvas.instructure.com/doc/api/users.html#method.page_views.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.course.PageView`
"""
from canvasapi.page_view import PageView
return PaginatedList(
PageView,
self._requester,
'GET',
'users/{}/page_views'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_courses(self, **kwargs):
"""
Retrieve all courses this user is enrolled in.
:calls: `GET /api/v1/users/:user_id/courses \
<https://canvas.instructure.com/doc/api/courses.html#method.courses.user_index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.course.Course`
"""
from canvasapi.course import Course
return PaginatedList(
Course,
self._requester,
'GET',
'users/{}/courses'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_missing_submissions(self):
"""
Retrieve all past-due assignments for which the student does not
have a submission.
:calls: `GET /api/v1/users/:user_id/missing_submissions \
<https://canvas.instructure.com/doc/api/users.html#method.users.missing_submissions>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.assignment.Assignment`
"""
from canvasapi.assignment import Assignment
return PaginatedList(
Assignment,
self._requester,
'GET',
'users/{}/missing_submissions'.format(self.id)
)
def update_settings(self, **kwargs):
"""
Update this user's settings.
:calls: `PUT /api/v1/users/:id/settings \
<https://canvas.instructure.com/doc/api/users.html#method.users.settings>`_
:rtype: dict
"""
response = self._requester.request(
'PUT',
'users/{}/settings'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return response.json()
def get_color(self, asset_string):
"""
Return the custom colors that have been saved by this user for a given context.
The `asset_string` parameter should be in the format 'context_id', for example 'course_42'.
:calls: `GET /api/v1/users/:id/colors/:asset_string \
<https://canvas.instructure.com/doc/api/users.html#method.users.get_custom_color>`_
:param asset_string: The asset to retrieve the color from.
:type asset_string: str
:rtype: dict
"""
response = self._requester.request(
'GET',
'users/{}/colors/{}'.format(self.id, asset_string)
)
return response.json()
def get_colors(self):
"""
Return all custom colors that have been saved by this user.
:calls: `GET /api/v1/users/:id/colors \
<https://canvas.instructure.com/doc/api/users.html#method.users.get_custom_colors>`_
:rtype: dict
"""
response = self._requester.request(
'GET',
'users/{}/colors'.format(self.id)
)
return response.json()
def update_color(self, asset_string, hexcode):
"""
Update a custom color for this user for a given context.
This allows colors for the calendar and elsewhere to be customized on a user basis.
The `asset_string` parameter should be in the format 'context_id', for example 'course_42'.
The `hexcode` parameter need not include the '#'.
:calls: `PUT /api/v1/users/:id/colors/:asset_string \
<https://canvas.instructure.com/doc/api/users.html#method.users.set_custom_color>`_
:param asset_string: The asset to modify the color for.
:type asset_string: str
:param hexcode: The hexcode of the color to use.
:type hexcode: str
:rtype: dict
"""
response = self._requester.request(
'PUT',
'users/{}/colors/{}'.format(self.id, asset_string),
hexcode=hexcode
)
return response.json()
def edit(self, **kwargs):
"""
Modify this user's information.
:calls: `PUT /api/v1/users/:id \
<https://canvas.instructure.com/doc/api/users.html#method.users.update>`_
:rtype: :class:`canvasapi.user.User`
"""
response = self._requester.request(
'PUT',
'users/{}'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
super(User, self).set_attributes(response.json())
return self
def merge_into(self, destination_user):
"""
Merge this user into another user.
:calls: `PUT /api/v1/users/:id/merge_into/:destination_user_id \
<https://canvas.instructure.com/doc/api/users.html#method.users.merge_into>`_
:param destination_user: The object or ID of the user to merge into.
:type destination_user: :class:`canvasapi.user.User` or int
:rtype: :class:`canvasapi.user.User`
"""
dest_user_id = obj_or_id(destination_user, 'destination_user', (User, ))
response = self._requester.request(
'PUT',
'users/{}/merge_into/{}'.format(self.id, dest_user_id),
)
super(User, self).set_attributes(response.json())
return self
def get_avatars(self):
"""
Retrieve the possible user avatar options that can be set with the user update endpoint.
:calls: `GET /api/v1/users/:user_id/avatars \
<https://canvas.instructure.com/doc/api/users.html#method.profile.profile_pics>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.avatar.Avatar`
"""
from canvasapi.avatar import Avatar
return PaginatedList(
Avatar,
self._requester,
'GET',
'users/{}/avatars'.format(self.id)
)
def get_assignments(self, course, **kwargs):
"""
Return the list of assignments for this user if the current
user (the API key owner) has rights to view. See List assignments for valid arguments.
:calls: `GET /api/v1/users/:user_id/courses/:course_id/assignments \
<https://canvas.instructure.com/doc/api/assignments.html#method.assignments_api.user_index>`_
:param course: The object or ID of the course to retrieve.
:type course: :class:`canvasapi.course.Course` or int
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.assignment.Assignment`
"""
from canvasapi.assignment import Assignment
from canvasapi.course import Course
course_id = obj_or_id(course, "course", (Course,))
return PaginatedList(
Assignment,
self._requester,
'GET',
'users/{}/courses/{}/assignments'.format(self.id, course_id),
_kwargs=combine_kwargs(**kwargs)
)
def get_enrollments(self, **kwargs):
"""
List all of the enrollments for this user.
:calls: `GET /api/v1/users/:user_id/enrollments \
<https://canvas.instructure.com/doc/api/enrollments.html#method.enrollments_api.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.enrollment.Enrollment`
"""
from canvasapi.enrollment import Enrollment
return PaginatedList(
Enrollment,
self._requester,
'GET',
'users/{}/enrollments'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def upload(self, file, **kwargs):
"""
Upload a file for a user.
NOTE: You *must* have authenticated with this user's API key to
upload on their behalf no matter what permissions the issuer of the
request has.
:calls: `POST /api/v1/users/:user_id/files \
<https://canvas.instructure.com/doc/api/users.html#method.users.create_file>`_
:param file: The file or path of the file to upload.
:type file: file or str
:returns: True if the file uploaded successfully, False otherwise, \
and the JSON response from the API.
:rtype: tuple
"""
return Uploader(
self._requester,
'users/{}/files'.format(self.id),
file,
**kwargs
).start()
def list_calendar_events_for_user(self, **kwargs):
"""
List calendar events that the current user can view or manage.
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_calendar_events_for_user` instead.
:calls: `GET /api/v1/users/:user_id/calendar_events \
<https://canvas.instructure.com/doc/api/calendar_events.html#method.calendar_events_api.user_index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.calendar_event.CalendarEvent`
"""
warnings.warn(
"`list_calendar_events_for_user`"
" is being deprecated and will be removed in a future version."
" Use `get_calendar_events_for_user` instead",
DeprecationWarning
)
return self.get_calendar_events_for_user(**kwargs)
def get_calendar_events_for_user(self, **kwargs):
"""
List calendar events that the current user can view or manage.
:calls: `GET /api/v1/users/:user_id/calendar_events \
<https://canvas.instructure.com/doc/api/calendar_events.html#method.calendar_events_api.user_index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.calendar_event.CalendarEvent`
"""
return PaginatedList(
CalendarEvent,
self._requester,
'GET',
'users/{}/calendar_events'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def list_communication_channels(self, **kwargs):
"""
List communication channels for the specified user, sorted by
position.
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_communication_channels` instead.
:calls: `GET /api/v1/users/:user_id/communication_channels \
<https://canvas.instructure.com/doc/api/communication_channels.html#method.communication_channels.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.communication_channel.CommunicationChannel`
"""
warnings.warn(
"`list_communication_channels`"
" is being deprecated and will be removed in a future version."
" Use `get_communication_channels` instead",
DeprecationWarning
)
return self.get_communication_channels(**kwargs)
def get_communication_channels(self, **kwargs):
"""
List communication channels for the specified user, sorted by
position.
:calls: `GET /api/v1/users/:user_id/communication_channels \
<https://canvas.instructure.com/doc/api/communication_channels.html#method.communication_channels.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.communication_channel.CommunicationChannel`
"""
return PaginatedList(
CommunicationChannel,
self._requester,
'GET',
'users/{}/communication_channels'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def create_communication_channel(self, **kwargs):
"""
Create a communication channel for this user
:calls: `POST /api/v1/users/:user_id/communication_channels \
<https://canvas.instructure.com/doc/api/communication_channels.html#method.communication_channels.create>`_
:rtype: :class:`canvasapi.communication_channel.CommunicationChannel`
"""
response = self._requester.request(
'POST',
'users/{}/communication_channels'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return CommunicationChannel(self._requester, response.json())
def list_files(self, **kwargs):
"""
Returns the paginated list of files for the user.
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_files` instead.
:calls: `GET /api/v1/users/:user_id/files \
<https://canvas.instructure.com/doc/api/files.html#method.files.api_index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.file.File`
"""
warnings.warn(
"`list_files` is being deprecated and will be removed in a future "
"version. Use `get_files` instead",
DeprecationWarning
)
return self.get_files(**kwargs)
def get_files(self, **kwargs):
"""
Returns the paginated list of files for the user.
:calls: `GET /api/v1/users/:user_id/files \
<https://canvas.instructure.com/doc/api/files.html#method.files.api_index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.file.File`
"""
from canvasapi.file import File
return PaginatedList(
File,
self._requester,
'GET',
'users/{}/files'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_file(self, file, **kwargs):
"""
Return the standard attachment json object for a file.
:calls: `GET /api/v1/users/:user_id/files/:id \
<https://canvas.instructure.com/doc/api/files.html#method.files.api_show>`_
:param file: The object or ID of the file to retrieve.
:type file: :class:`canvasapi.file.File` or int
:rtype: :class:`canvasapi.file.File`
"""
from canvasapi.file import File
file_id = obj_or_id(file, "file", (File,))
response = self._requester.request(
'GET',
'users/{}/files/{}'.format(self.id, file_id),
_kwargs=combine_kwargs(**kwargs)
)
return File(self._requester, response.json())
def get_folder(self, folder):
"""
Returns the details for a user's folder
:calls: `GET /api/v1/users/:user_id/folders/:id \
<https://canvas.instructure.com/doc/api/files.html#method.folders.show>`_
:param folder: The object or ID of the folder to retrieve.
:type folder: :class:`canvasapi.folder.Folder` or int
:rtype: :class:`canvasapi.folder.Folder`
"""
from canvasapi.folder import Folder
folder_id = obj_or_id(folder, "folder", (Folder,))
response = self._requester.request(
'GET',
'users/{}/folders/{}'.format(self.id, folder_id)
)
return Folder(self._requester, response.json())
def list_folders(self, **kwargs):
"""
Returns the paginated list of all folders for the given user. This will be returned as a
flat list containing all subfolders as well.
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_folders` instead.
:calls: `GET /api/v1/users/:user_id/folders \
<https://canvas.instructure.com/doc/api/files.html#method.folders.list_all_folders>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.folder.Folder`
"""
warnings.warn(
"`list_folders` is being deprecated and will be removed in a "
"future version. Use `get_folders` instead.",
DeprecationWarning
)
return self.get_folders(**kwargs)
def get_folders(self, **kwargs):
"""
Returns the paginated list of all folders for the given user. This will be returned as a
flat list containing all subfolders as well.
:calls: `GET /api/v1/users/:user_id/folders \
<https://canvas.instructure.com/doc/api/files.html#method.folders.list_all_folders>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.folder.Folder`
"""
return PaginatedList(
Folder,
self._requester,
'GET',
'users/{}/folders'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def create_folder(self, name, **kwargs):
"""
Creates a folder in this user.
:calls: `POST /api/v1/users/:user_id/folders \
<https://canvas.instructure.com/doc/api/files.html#method.folders.create>`_
:param name: The name of the folder.
:type name: str
:rtype: :class:`canvasapi.folder.Folder`
"""
response = self._requester.request(
'POST',
'users/{}/folders'.format(self.id),
name=name,
_kwargs=combine_kwargs(**kwargs)
)
return Folder(self._requester, response.json())
def list_user_logins(self, **kwargs):
"""
Given a user ID, return that user's logins for the given account.
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_user_logins` instead.
:calls: `GET /api/v1/users/:user_id/logins \
<https://canvas.instructure.com/doc/api/logins.html#method.pseudonyms.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.login.Login`
"""
warnings.warn(
"`list_user_logins` is being deprecated and will be removed in a future version."
" Use `get_user_logins` instead",
DeprecationWarning
)
return self. get_user_logins(**kwargs)
def get_user_logins(self, **kwargs):
"""
Given a user ID, return that user's logins for the given account.
:calls: `GET /api/v1/users/:user_id/logins \
<https://canvas.instructure.com/doc/api/logins.html#method.pseudonyms.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.login.Login`
"""
from canvasapi.login import Login
return PaginatedList(
Login,
self._requester,
'GET',
'users/{}/logins'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def list_observees(self, **kwargs):
"""
List the users that the given user is observing
.. warning::
.. deprecated:: 0.10.0
Use :func:`canvasapi.user.User.get_observees` instead.
:calls: `GET /api/v1/users/:user_id/observees \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.user.User`
"""
warnings.warn(
"`list_observees` is being deprecated and will be removed in a "
"future version. Use `get_observees` instead",
DeprecationWarning
)
return self.get_observees(**kwargs)
def get_observees(self, **kwargs):
"""
List the users that the given user is observing
:calls: `GET /api/v1/users/:user_id/observees \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.user.User`
"""
return PaginatedList(
User,
self._requester,
'GET',
'users/{}/observees'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def add_observee_with_credentials(self, **kwargs):
"""
Register the given user to observe another user, given the observee's credentials.
:calls: `POST /api/v1/users/:user_id/observees \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.create>`_
:rtype: :class:`canvasapi.user.User`
"""
response = self._requester.request(
'POST',
'users/{}/observees'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return User(self._requester, response.json())
def show_observee(self, observee_id):
"""
Gets information about an observed user.
:calls: `GET /api/v1/users/:user_id/observees/:observee_id \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.show>`_
:param observee_id: The login id for the user to observe.
:type observee_id: int
:rtype: :class: `canvasapi.user.User`
"""
response = self._requester.request(
'GET',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def add_observee(self, observee_id):
"""
Registers a user as being observed by the given user.
:calls: `PUT /api/v1/users/:user_id/observees/:observee_id \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.update>`_
:param observee_id: The login id for the user to observe.
:type observee_id: int
:rtype: :class: `canvasapi.user.User`
"""
response = self._requester.request(
'PUT',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def remove_observee(self, observee_id):
"""
Unregisters a user as being observed by the given user.
:calls: `DELETE /api/v1/users/:user_id/observees/:observee_id \
<https://canvas.instructure.com/doc/api/user_observees.html#method.user_observees.destroy>`_
:param observee_id: The login id for the user to observe.
:type observee_id: int
:rtype: :class: `canvasapi.user.User`
"""
response = self._requester.request(
'DELETE',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def create_content_migration(self, migration_type, **kwargs):
"""
Create a content migration.
:calls: `POST /api/v1/users/:user_id/content_migrations \
<https://canvas.instructure.com/doc/api/content_migrations.html#method.content_migrations.create>`_
:param migration_type: The migrator type to use in this migration
:type migration_type: str or :class:`canvasapi.content_migration.Migrator`
:rtype: :class:`canvasapi.content_migration.ContentMigration`
"""
from canvasapi.content_migration import ContentMigration, Migrator
if isinstance(migration_type, Migrator):
kwargs['migration_type'] = migration_type.type
elif isinstance(migration_type, string_types):
kwargs['migration_type'] = migration_type
else:
raise TypeError('Parameter migration_type must be of type Migrator or str')
response = self._requester.request(
'POST',
'users/{}/content_migrations'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
response_json = response.json()
response_json.update({'user_id': self.id})
return ContentMigration(self._requester, response_json)
def get_content_migration(self, content_migration, **kwargs):
"""
Retrive a content migration by its ID
:calls: `GET /api/v1/users/:user_id/content_migrations/:id \
<https://canvas.instructure.com/doc/api/content_migrations.html#method.content_migrations.show>`_
:param content_migration: The object or ID of the content migration to retrieve.
:type content_migration: int, str or :class:`canvasapi.content_migration.ContentMigration`
:rtype: :class:`canvasapi.content_migration.ContentMigration`
"""
from canvasapi.content_migration import ContentMigration
migration_id = obj_or_id(content_migration, "content_migration", (ContentMigration,))
response = self._requester.request(
'GET',
'users/{}/content_migrations/{}'.format(self.id, migration_id),
_kwargs=combine_kwargs(**kwargs)
)
response_json = response.json()
response_json.update({'user_id': self.id})
return ContentMigration(self._requester, response_json)
def get_content_migrations(self, **kwargs):
"""
List content migrations that the current account can view or manage.
:calls: `GET /api/v1/users/:user_id/content_migrations/ \
<https://canvas.instructure.com/doc/api/content_migrations.html#method.content_migrations.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.content_migration.ContentMigration`
"""
from canvasapi.content_migration import ContentMigration
return PaginatedList(
ContentMigration,
self._requester,
'GET',
'users/{}/content_migrations'.format(self.id),
{'user_id': self.id},
_kwargs=combine_kwargs(**kwargs)
)
def get_migration_systems(self, **kwargs):
"""
Return a list of migration systems.
:calls: `GET /api/v1/users/:user_id/content_migrations/migrators \
<https://canvas.instructure.com/doc/api/content_migrations.html#method.content_migrations.available_migrators>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.content_migration.Migrator`
"""
from canvasapi.content_migration import Migrator
return PaginatedList(
Migrator,
self._requester,
'GET',
'users/{}/content_migrations/migrators'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
@python_2_unicode_compatible
class UserDisplay(CanvasObject):
def __str__(self):
return "{}".format(self.display_name)
| 34.595297 | 120 | 0.615676 | from __future__ import absolute_import, division, print_function, unicode_literals
from six import python_2_unicode_compatible, string_types
import warnings
from canvasapi.calendar_event import CalendarEvent
from canvasapi.canvas_object import CanvasObject
from canvasapi.communication_channel import CommunicationChannel
from canvasapi.folder import Folder
from canvasapi.paginated_list import PaginatedList
from canvasapi.upload import Uploader
from canvasapi.util import combine_kwargs, obj_or_id
@python_2_unicode_compatible
class User(CanvasObject):
def __str__(self):
return "{} ({})".format(self.name, self.id)
def get_profile(self, **kwargs):
response = self._requester.request(
'GET',
'users/{}/profile'.format(self.id)
)
return response.json()
def get_page_views(self, **kwargs):
from canvasapi.page_view import PageView
return PaginatedList(
PageView,
self._requester,
'GET',
'users/{}/page_views'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_courses(self, **kwargs):
from canvasapi.course import Course
return PaginatedList(
Course,
self._requester,
'GET',
'users/{}/courses'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_missing_submissions(self):
from canvasapi.assignment import Assignment
return PaginatedList(
Assignment,
self._requester,
'GET',
'users/{}/missing_submissions'.format(self.id)
)
def update_settings(self, **kwargs):
response = self._requester.request(
'PUT',
'users/{}/settings'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return response.json()
def get_color(self, asset_string):
response = self._requester.request(
'GET',
'users/{}/colors/{}'.format(self.id, asset_string)
)
return response.json()
def get_colors(self):
response = self._requester.request(
'GET',
'users/{}/colors'.format(self.id)
)
return response.json()
def update_color(self, asset_string, hexcode):
response = self._requester.request(
'PUT',
'users/{}/colors/{}'.format(self.id, asset_string),
hexcode=hexcode
)
return response.json()
def edit(self, **kwargs):
response = self._requester.request(
'PUT',
'users/{}'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
super(User, self).set_attributes(response.json())
return self
def merge_into(self, destination_user):
dest_user_id = obj_or_id(destination_user, 'destination_user', (User, ))
response = self._requester.request(
'PUT',
'users/{}/merge_into/{}'.format(self.id, dest_user_id),
)
super(User, self).set_attributes(response.json())
return self
def get_avatars(self):
from canvasapi.avatar import Avatar
return PaginatedList(
Avatar,
self._requester,
'GET',
'users/{}/avatars'.format(self.id)
)
def get_assignments(self, course, **kwargs):
from canvasapi.assignment import Assignment
from canvasapi.course import Course
course_id = obj_or_id(course, "course", (Course,))
return PaginatedList(
Assignment,
self._requester,
'GET',
'users/{}/courses/{}/assignments'.format(self.id, course_id),
_kwargs=combine_kwargs(**kwargs)
)
def get_enrollments(self, **kwargs):
from canvasapi.enrollment import Enrollment
return PaginatedList(
Enrollment,
self._requester,
'GET',
'users/{}/enrollments'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def upload(self, file, **kwargs):
return Uploader(
self._requester,
'users/{}/files'.format(self.id),
file,
**kwargs
).start()
def list_calendar_events_for_user(self, **kwargs):
warnings.warn(
"`list_calendar_events_for_user`"
" is being deprecated and will be removed in a future version."
" Use `get_calendar_events_for_user` instead",
DeprecationWarning
)
return self.get_calendar_events_for_user(**kwargs)
def get_calendar_events_for_user(self, **kwargs):
return PaginatedList(
CalendarEvent,
self._requester,
'GET',
'users/{}/calendar_events'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def list_communication_channels(self, **kwargs):
warnings.warn(
"`list_communication_channels`"
" is being deprecated and will be removed in a future version."
" Use `get_communication_channels` instead",
DeprecationWarning
)
return self.get_communication_channels(**kwargs)
def get_communication_channels(self, **kwargs):
return PaginatedList(
CommunicationChannel,
self._requester,
'GET',
'users/{}/communication_channels'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def create_communication_channel(self, **kwargs):
response = self._requester.request(
'POST',
'users/{}/communication_channels'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return CommunicationChannel(self._requester, response.json())
def list_files(self, **kwargs):
warnings.warn(
"`list_files` is being deprecated and will be removed in a future "
"version. Use `get_files` instead",
DeprecationWarning
)
return self.get_files(**kwargs)
def get_files(self, **kwargs):
from canvasapi.file import File
return PaginatedList(
File,
self._requester,
'GET',
'users/{}/files'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def get_file(self, file, **kwargs):
from canvasapi.file import File
file_id = obj_or_id(file, "file", (File,))
response = self._requester.request(
'GET',
'users/{}/files/{}'.format(self.id, file_id),
_kwargs=combine_kwargs(**kwargs)
)
return File(self._requester, response.json())
def get_folder(self, folder):
from canvasapi.folder import Folder
folder_id = obj_or_id(folder, "folder", (Folder,))
response = self._requester.request(
'GET',
'users/{}/folders/{}'.format(self.id, folder_id)
)
return Folder(self._requester, response.json())
def list_folders(self, **kwargs):
warnings.warn(
"`list_folders` is being deprecated and will be removed in a "
"future version. Use `get_folders` instead.",
DeprecationWarning
)
return self.get_folders(**kwargs)
def get_folders(self, **kwargs):
return PaginatedList(
Folder,
self._requester,
'GET',
'users/{}/folders'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def create_folder(self, name, **kwargs):
response = self._requester.request(
'POST',
'users/{}/folders'.format(self.id),
name=name,
_kwargs=combine_kwargs(**kwargs)
)
return Folder(self._requester, response.json())
def list_user_logins(self, **kwargs):
warnings.warn(
"`list_user_logins` is being deprecated and will be removed in a future version."
" Use `get_user_logins` instead",
DeprecationWarning
)
return self. get_user_logins(**kwargs)
def get_user_logins(self, **kwargs):
from canvasapi.login import Login
return PaginatedList(
Login,
self._requester,
'GET',
'users/{}/logins'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def list_observees(self, **kwargs):
warnings.warn(
"`list_observees` is being deprecated and will be removed in a "
"future version. Use `get_observees` instead",
DeprecationWarning
)
return self.get_observees(**kwargs)
def get_observees(self, **kwargs):
return PaginatedList(
User,
self._requester,
'GET',
'users/{}/observees'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
def add_observee_with_credentials(self, **kwargs):
response = self._requester.request(
'POST',
'users/{}/observees'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
return User(self._requester, response.json())
def show_observee(self, observee_id):
response = self._requester.request(
'GET',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def add_observee(self, observee_id):
response = self._requester.request(
'PUT',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def remove_observee(self, observee_id):
response = self._requester.request(
'DELETE',
'users/{}/observees/{}'.format(self.id, observee_id)
)
return User(self._requester, response.json())
def create_content_migration(self, migration_type, **kwargs):
from canvasapi.content_migration import ContentMigration, Migrator
if isinstance(migration_type, Migrator):
kwargs['migration_type'] = migration_type.type
elif isinstance(migration_type, string_types):
kwargs['migration_type'] = migration_type
else:
raise TypeError('Parameter migration_type must be of type Migrator or str')
response = self._requester.request(
'POST',
'users/{}/content_migrations'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
response_json = response.json()
response_json.update({'user_id': self.id})
return ContentMigration(self._requester, response_json)
def get_content_migration(self, content_migration, **kwargs):
from canvasapi.content_migration import ContentMigration
migration_id = obj_or_id(content_migration, "content_migration", (ContentMigration,))
response = self._requester.request(
'GET',
'users/{}/content_migrations/{}'.format(self.id, migration_id),
_kwargs=combine_kwargs(**kwargs)
)
response_json = response.json()
response_json.update({'user_id': self.id})
return ContentMigration(self._requester, response_json)
def get_content_migrations(self, **kwargs):
from canvasapi.content_migration import ContentMigration
return PaginatedList(
ContentMigration,
self._requester,
'GET',
'users/{}/content_migrations'.format(self.id),
{'user_id': self.id},
_kwargs=combine_kwargs(**kwargs)
)
def get_migration_systems(self, **kwargs):
from canvasapi.content_migration import Migrator
return PaginatedList(
Migrator,
self._requester,
'GET',
'users/{}/content_migrations/migrators'.format(self.id),
_kwargs=combine_kwargs(**kwargs)
)
@python_2_unicode_compatible
class UserDisplay(CanvasObject):
def __str__(self):
return "{}".format(self.display_name)
| true | true |
f71b604290c4284cdb29c7ba708ed37267f359af | 3,054 | py | Python | copyright_updater/logger_factory.py | swasun/copyright-updater | 750ced32ee9738e4d65189bc0e917e0581a59668 | [
"MIT"
] | null | null | null | copyright_updater/logger_factory.py | swasun/copyright-updater | 750ced32ee9738e4d65189bc0e917e0581a59668 | [
"MIT"
] | null | null | null | copyright_updater/logger_factory.py | swasun/copyright-updater | 750ced32ee9738e4d65189bc0e917e0581a59668 | [
"MIT"
] | null | null | null | #####################################################################################
# MIT License #
# #
# Copyright (C) 2018 Charly Lamothe #
# #
# This file is part of copyright-updater. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
import logging
from logging.handlers import RotatingFileHandler
import os
import errno
class LoggerFactory:
@staticmethod
def create(path, module_name):
# Create logger
logger = logging.getLogger(module_name)
logger.setLevel(logging.DEBUG)
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
# Create file handler
fh = RotatingFileHandler(path + os.sep + module_name + '.log', maxBytes=1000000, backupCount=5)
fh.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter('%(asctime)s - %(filename)s:%(lineno)s - %(name)s - %(levelname)s - %(message)s')
# Add formatter to handler
fh.setFormatter(formatter)
# Add fh to logger
logger.addHandler(fh)
return logger | 51.762712 | 119 | 0.47053 | true | true | |
f71b60c66400900beffb67846939db70bfb9249f | 4,381 | py | Python | avax/webdav/tests/benchmarks.py | eavatar/avax.webdav | e4d4915fd5af8878ba88e3641e624e64033ece96 | [
"MIT"
] | null | null | null | avax/webdav/tests/benchmarks.py | eavatar/avax.webdav | e4d4915fd5af8878ba88e3641e624e64033ece96 | [
"MIT"
] | null | null | null | avax/webdav/tests/benchmarks.py | eavatar/avax.webdav | e4d4915fd5af8878ba88e3641e624e64033ece96 | [
"MIT"
] | null | null | null | # -*- coding: iso-8859-1 -*-
# (c) 2009-2014 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Benchmark suite for WsgiDAV.
This test suite uses davclient to generate WebDAV requests.
A first collection of ideas
===========================
- The result is printable HTML, copy/pastable
- It also contains date, environment info (Hardware, package versions, ...)
- The suite can be run stand-alone against a running WsgiDAV server, just like
litmus.
- It uses `davclient` and generates an HTML file.
- There should be detailed results as well as a few summarizing numbers:
('Total time', 'Byte reads per second', 'Byte write per second', or something
like this), so one can compare benchmarks at a glance.
- Optional parameters allow to run only a single test
- Parameter allows to pass configuration infos that are dumped with the result:
benchEnviron = {
"comment": "Test with caching enabled",
"server_os": "Ubuntu 9.01",
"server_cpu": "Intel 3GHz",
"server_ram": "2GB",
"wsgidav_version": "0.4.b1"
"network_bandwidth": "100MBit",
>> these can be automatically set?:
"client_os": "Windows XP",
"client_cpu": "AMD 5000",
"date": now()
}
- Allow to print profiling info (from WsgiDAV server and from becnhmark client!)
- The result file could also contain the results of test suites ('PASSED'),
so we could use it as documentation for tests on different platforms/setups.
Questions
=========
- is lxml really faster?
- compare this to mod_dav's performance
Test cases
==========
- PUT 1 x 10 MB
- PUT 100 x 1 kB
- GET 1 x 10 MB
- GET 100 x 1 kB
- 100 x PROPFIND depth 0
- 1 x PROPFIND depth infinity
- COPY: big file, many small files, big tree
- MOVE: big file, many small files, big tree
- DELETE: big file, many small files, big tree
- LOCK
- UNLOCK
- Check if locked
- PROPPATCH
- PROPFIND: depth 0, many small files
depth infinity
- run litmus in a timed script
- Simulate typical Windows Client request sequences:
- dir browsing
- file reading
- file editing
- http://groups.google.com/group/paste-users/t/b2afc88a86caade1?hl=en
use httperf
http://www.hpl.hp.com/research/linux/httperf/httperf-man-0.9.txt
and openwebload
http://openwebload.sourceforge.net/index.html
- makeTree(roofolderName="/bench", folderCount=10, subfolderCount=10, fileCount=10, fileSize=1024)
Big tree with 100 folders and 1000 files
bench/
folder1/
..
folder10/
subfolder10-1/
..
subfolder10-10/
file10-10-1.txt -> 1k
"""
import logging
_benchmarks = [#"proppatch_many",
#"proppatch_big",
#"proppatch_deep",
"test_scripted",
]
def _real_run_bench(bench, opts):
if bench == "*":
for bench in _benchmarks:
run_bench(bench, opts)
return
assert bench in _benchmarks
if bench == "test_scripted":
from avax.webdav.tests import test_scripted
test_scripted.main()
else:
raise ValueError()
def run_bench(bench, opts):
profile_benchmarks = opts["profile_benchmarks"]
if bench in profile_benchmarks:
# http://docs.python.org/library/profile.html#module-cProfile
import cProfile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("_real_run_bench(bench, opts)", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
# stats.sort_stats("time") # Or cumulative
stats.sort_stats("cumulative") # Or time
stats.print_stats(80) # 80 = how many to print
# The rest is optional.
# stats.print_callees()
# stats.print_callers()
logging.warning("Profile data for '%s':\n%s" % (bench, stream.getvalue()))
else:
_real_run_bench(bench, opts)
def bench_all(opts):
run_bench("*", opts)
def main():
opts = {"num": 10,
"profile_benchmarks": ["*"],
}
bench_all(opts)
if __name__ == "__main__":
main()
| 31.070922 | 99 | 0.625428 |
import logging
_benchmarks = [
"test_scripted",
]
def _real_run_bench(bench, opts):
if bench == "*":
for bench in _benchmarks:
run_bench(bench, opts)
return
assert bench in _benchmarks
if bench == "test_scripted":
from avax.webdav.tests import test_scripted
test_scripted.main()
else:
raise ValueError()
def run_bench(bench, opts):
profile_benchmarks = opts["profile_benchmarks"]
if bench in profile_benchmarks:
Profile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("_real_run_bench(bench, opts)", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
ort_stats("cumulative")
stats.print_stats(80)
logging.warning("Profile data for '%s':\n%s" % (bench, stream.getvalue()))
else:
_real_run_bench(bench, opts)
def bench_all(opts):
run_bench("*", opts)
def main():
opts = {"num": 10,
"profile_benchmarks": ["*"],
}
bench_all(opts)
if __name__ == "__main__":
main()
| true | true |
f71b6170ec1ea5471b4314a0e09ef42e3e38daff | 1,001 | py | Python | project/urls.py | tgavankar/PlaydohSlideSync | 5718d661e78d361a0dcda908b63c736bab886bb4 | [
"BSD-3-Clause"
] | null | null | null | project/urls.py | tgavankar/PlaydohSlideSync | 5718d661e78d361a0dcda908b63c736bab886bb4 | [
"BSD-3-Clause"
] | null | null | null | project/urls.py | tgavankar/PlaydohSlideSync | 5718d661e78d361a0dcda908b63c736bab886bb4 | [
"BSD-3-Clause"
] | null | null | null | from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from .examples import urls
from funfactory.monkeypatches import patch
patch()
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
(r'', include(urls)),
# Generate a robots.txt
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow' ,
mimetype="text/plain"
)
)
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
## In DEBUG mode, serve media files through Django.
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
| 27.805556 | 88 | 0.685315 | from django.conf import settings
from django.conf.urls.defaults import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from .examples import urls
from funfactory.monkeypatches import patch
patch()
urlpatterns = patterns('',
(r'', include(urls)),
(r'^robots\.txt$',
lambda r: HttpResponse(
"User-agent: *\n%s: /" % 'Allow' if settings.ENGAGE_ROBOTS else 'Disallow' ,
mimetype="text/plain"
)
)
)
_urlpatterns()
| true | true |
f71b63178ebdc11ae83bec5b2f2f47ff8b336dd6 | 1,011 | py | Python | src/sellers/migrations/0004_alter_seller_logo_url.py | evis-market/web-interface-backend | f8930ff1c009ad18e522ab29680b4bcd50a6020e | [
"MIT"
] | 2 | 2021-08-30T22:58:32.000Z | 2021-12-12T10:47:52.000Z | src/sellers/migrations/0004_alter_seller_logo_url.py | evis-market/web-interface-backend | f8930ff1c009ad18e522ab29680b4bcd50a6020e | [
"MIT"
] | null | null | null | src/sellers/migrations/0004_alter_seller_logo_url.py | evis-market/web-interface-backend | f8930ff1c009ad18e522ab29680b4bcd50a6020e | [
"MIT"
] | 1 | 2021-08-22T19:12:44.000Z | 2021-08-22T19:12:44.000Z | # Generated by Django 3.2.7 on 2021-10-27 07:05
import django.core.validators
from django.db import migrations, models
from django.db.transaction import atomic
from sellers.models import Seller
"""
Note: Migrations includes data migration that set to null logo_url field for currently existing records.
Logo_url field would be change to FileField type in this migration
"""
def clear_seller_logo_url(apps, schema_editor):
with atomic():
for seller in Seller.objects.all():
seller.logo_url = None
seller.save()
class Migration(migrations.Migration):
dependencies = [
('sellers', '0003_auto_20211012_1929'),
]
operations = [
migrations.RunPython(clear_seller_logo_url, migrations.RunPython.noop),
migrations.AlterField(
model_name='seller',
name='logo_url',
field=models.FileField(blank=True, help_text='Logo', max_length=1000, null=True, upload_to='', verbose_name='Logo'),
),
]
| 26.605263 | 128 | 0.68546 |
import django.core.validators
from django.db import migrations, models
from django.db.transaction import atomic
from sellers.models import Seller
def clear_seller_logo_url(apps, schema_editor):
with atomic():
for seller in Seller.objects.all():
seller.logo_url = None
seller.save()
class Migration(migrations.Migration):
dependencies = [
('sellers', '0003_auto_20211012_1929'),
]
operations = [
migrations.RunPython(clear_seller_logo_url, migrations.RunPython.noop),
migrations.AlterField(
model_name='seller',
name='logo_url',
field=models.FileField(blank=True, help_text='Logo', max_length=1000, null=True, upload_to='', verbose_name='Logo'),
),
]
| true | true |
f71b632bb314545ed7732ce47684f88d027b19e7 | 78 | py | Python | xpring/proto/__init__.py | mvadari/xpring-py | b837420127d1c1e5051ed305ed4f19fe9910a4f6 | [
"0BSD"
] | 6 | 2019-12-11T00:54:56.000Z | 2021-03-11T19:44:44.000Z | xpring/proto/__init__.py | mvadari/xpring-py | b837420127d1c1e5051ed305ed4f19fe9910a4f6 | [
"0BSD"
] | null | null | null | xpring/proto/__init__.py | mvadari/xpring-py | b837420127d1c1e5051ed305ed4f19fe9910a4f6 | [
"0BSD"
] | 9 | 2020-02-28T18:40:46.000Z | 2022-02-28T23:01:09.000Z | # The rest of this package, but not this __init__.py, is generated by protoc.
| 39 | 77 | 0.75641 | true | true | |
f71b6354cd7ddb3ba58cc906feac3f6233ca894c | 537 | py | Python | ImgVidProcessing/Exercise1/solution1.py | SystemNinja/MyPythonPrograms | 6bdebb5017994c3431aea769319f702075fff9b9 | [
"MIT"
] | null | null | null | ImgVidProcessing/Exercise1/solution1.py | SystemNinja/MyPythonPrograms | 6bdebb5017994c3431aea769319f702075fff9b9 | [
"MIT"
] | null | null | null | ImgVidProcessing/Exercise1/solution1.py | SystemNinja/MyPythonPrograms | 6bdebb5017994c3431aea769319f702075fff9b9 | [
"MIT"
] | null | null | null | """
This solution implements glob library in order to 'automatize' task.
Instead of manually processing each image
Reference: https://pymotw.com/2/glob/
"""
import cv2
import glob2
images=glob2.glob("*.jpg")
#images=glob2.glob("Exercise1\*.jpg")
for image in images:
img=cv2.imread(image, 0)
re=cv2.resize(img,(100,100))
cv2.imshow("Resized image", re)
cv2.waitKey(500)
cv2.destroyAllWindows()
cv2.imwrite(image+"_resized.jpg", re)
#cv2.imwrite("Exercise1\\"+image+"_resized.jpg", re) | 25.571429 | 69 | 0.670391 |
import cv2
import glob2
images=glob2.glob("*.jpg")
for image in images:
img=cv2.imread(image, 0)
re=cv2.resize(img,(100,100))
cv2.imshow("Resized image", re)
cv2.waitKey(500)
cv2.destroyAllWindows()
cv2.imwrite(image+"_resized.jpg", re)
| true | true |
f71b63892ebbad403e4916b665b53156a244c0fa | 87 | py | Python | Python/100Excersises/.history/51 to 75/69/69_20201119121845.py | magusikrak/NAMI-TERM-I-GroupWork | f0a9a5f219ccbec024eb5316361db3fca46e171c | [
"MIT"
] | null | null | null | Python/100Excersises/.history/51 to 75/69/69_20201119121845.py | magusikrak/NAMI-TERM-I-GroupWork | f0a9a5f219ccbec024eb5316361db3fca46e171c | [
"MIT"
] | 1 | 2021-07-24T03:18:30.000Z | 2021-07-24T12:45:07.000Z | Python/100Excersises/.history/51 to 75/69/69_20201119121845.py | magusikrak/NAMI-TERM-I-GroupWork | f0a9a5f219ccbec024eb5316361db3fca46e171c | [
"MIT"
] | null | null | null | import requests
sugam = requests.get("http://www.pythonhow.com")
print(rp.text[:100])
| 17.4 | 48 | 0.724138 | import requests
sugam = requests.get("http://www.pythonhow.com")
print(rp.text[:100])
| true | true |
f71b64529d9237153dd6f12a58b0280dfcb69bfe | 454 | py | Python | .history/ClassFiles/WorkingWithExternalFiles/FileHandling_20210107190119.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | .history/ClassFiles/WorkingWithExternalFiles/FileHandling_20210107190119.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | .history/ClassFiles/WorkingWithExternalFiles/FileHandling_20210107190119.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | """ Opening and Reading Files
Syntax to open file.
f = open("Myfile.txt) # assigned to the variable f.
f = open("Myfile.txt","rt") # if in the same directory.
f = open("c:\\MyFolders\Myfile.txt") # if hot in the same directory.
"""
f = open("Quotes.txt")
# print(f.readable())
# print(f.read())
# f.close()
# print(f.readable())
print(f.read(11))
print(f.readlines())
for quote in f:
print(quote)
print("HI")
| 15.655172 | 69 | 0.594714 | f = open("Quotes.txt")
print(f.read(11))
print(f.readlines())
for quote in f:
print(quote)
print("HI")
| true | true |
f71b64ca7afadced29875edb91d99ac16e7d6ba0 | 11,148 | py | Python | src/main/app-resources/notebook/libexec/helpers.py | ec-better/ewf-ethz-03-01-01 | 5ca616e5c25bbba29013a7de248af4b69757921b | [
"Apache-2.0"
] | 1 | 2021-09-23T02:20:11.000Z | 2021-09-23T02:20:11.000Z | src/main/app-resources/notebook/libexec/helpers.py | ec-better/ewf-ethz-03-01-01 | 5ca616e5c25bbba29013a7de248af4b69757921b | [
"Apache-2.0"
] | null | null | null | src/main/app-resources/notebook/libexec/helpers.py | ec-better/ewf-ethz-03-01-01 | 5ca616e5c25bbba29013a7de248af4b69757921b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import numpy as np
import cv2
import re
from shapely import wkt
from shapely.geometry import box, Polygon
import pandas as pd
import geopandas as gpd
from osgeo import gdal, gdalnumeric, osr, ogr
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
def getResolution(demFolder, return_full_paths = False):
rasterFilePaths = [f for f in os.listdir(demFolder) if os.path.isfile(os.path.join(demFolder, f))]
if return_full_paths:
rasterFilePaths = [demFolder + '/' + f for f in rasterFilePaths if f[:4] == 'DEM_' and f[-4:] == '.tif']
rasterFilePaths.sort(reverse=True)
else:
rasterFilePaths = [int(f[4:-4]) for f in rasterFilePaths if f[:4] == 'DEM_' and f[-4:] == '.tif']
return rasterFilePaths
def readGDAL2numpy(rasterPath, return_geoInformation = False):
try:
ds = gdal.Open(rasterPath)
except RuntimeError:
print('Unable to open input file')
sys.exit(1)
data = gdalnumeric.LoadFile(rasterPath, False)
noDataVal = ds.GetRasterBand(1).GetNoDataValue()
try:
if data.dtype in ['float16', 'float32', 'float64'] and noDataVal is not None:
data[data == noDataVal] = np.NaN
except:
print("Issue in no data value")
if return_geoInformation == False:
return data
else:
geoTransform = ds.GetGeoTransform()
projection = ds.GetProjection()
return data, geoTransform, projection
def writeNumpyArr2Geotiff(outputPath, data, geoTransform = None, projection = None, GDAL_dtype = gdal.GDT_Byte, noDataValue = None):
nscn, npix = data.shape
if np.isnan(data).any() and noDataValue is not None:
data[np.isnan(data)] = noDataValue
ds_new = gdal.GetDriverByName('GTiff').Create(outputPath, npix, nscn, 1, GDAL_dtype)
if geoTransform != None:
ds_new.SetGeoTransform(geoTransform)
if projection != None:
ds_new.SetProjection(projection)
outBand = ds_new.GetRasterBand(1)
outBand.WriteArray(data)
if noDataValue != None:
ds_new.GetRasterBand(1).SetNoDataValue(noDataValue)
# Close dataset
ds_new.FlushCache()
ds_new = None
outBand = None
def writeNumpyArr2Saga(outputPath, data, geoTransform = None, projection = None, GDAL_dtype = gdal.GDT_Byte, noDataValue = None):
nscn, npix = data.shape
if np.isnan(data).any() and noDataValue is not None:
data[np.isnan(data)] = noDataValue
ds_new = gdal.GetDriverByName('SAGA').Create(outputPath, npix, nscn, 1, GDAL_dtype)
outBand = ds_new.GetRasterBand(1)
outBand.WriteArray(data)
if noDataValue != None:
ds_new.GetRasterBand(1).SetNoDataValue(noDataValue)
if projection != None:
ds_new.SetProjection(projection)
# Close dataset
ds_new.FlushCache()
ds_new = None
outBand = None
def wkt2bbox(wkt_input):
wkt_geometry = wkt.loads(wkt_input)
minx, miny, maxx, maxy = wkt_geometry.bounds
b = box(minx, miny, maxx, maxy)
bbox_tuple = list(b.exterior.coords)
bbox = []
for point in bbox_tuple:
bbox.append([point[0],point[1]])
return bbox
def wkt2shp(wkt_input, target_epsg, dst_file, bbox=False):
ensure_dir(dst_file)
if bbox:
polygon = Polygon(wkt2bbox(wkt_input))
else:
polygon = wkt.loads(wkt_input)
gpd.GeoDataFrame(pd.DataFrame(['p1'], columns = ['geom']),
crs = {'init':'epsg:' + str(target_epsg)},
geometry = [polygon]).to_file(dst_file)
def rescaleDEM(image, noData = None, maxVal = 255):
if noData:
image = np.float32(image)
image[image == noData] = np.nan
minElev = np.nanmin(image)
maxElev = np.nanmax(image)
rescaled = ( ((image - minElev)/(maxElev- minElev)) * (maxVal - 1) ) + 1
return np.uint8(rescaled)
def joinStrArg(str1, str2, str3 = None):
if str3 is not None:
return str(str1) + ' ' + str(str2) + ' ' + str(str3)
else:
return str(str1) + ' ' + str(str2)
def wkt2EPSG(wkt, epsg='/usr/local/share/proj/epsg', forceProj4=False):
'''
Transform a WKT string to an EPSG code
Arguments
---------
wkt: WKT definition
epsg: the proj.4 epsg file (defaults to '/usr/local/share/proj/epsg')
forceProj4: whether to perform brute force proj4 epsg file check (last resort)
Returns: EPSG code
'''
code = None
p_in = osr.SpatialReference()
s = p_in.ImportFromWkt(wkt)
if s == 5: # invalid WKT
return None
if p_in.IsLocal() == 1: # this is a local definition
return p_in.ExportToWkt()
if p_in.IsGeographic() == 1: # this is a geographic srs
cstype = 'GEOGCS'
else: # this is a projected srs
cstype = 'PROJCS'
an = p_in.GetAuthorityName(cstype)
ac = p_in.GetAuthorityCode(cstype)
if an is not None and ac is not None: # return the EPSG code
return '%s:%s' % \
(p_in.GetAuthorityName(cstype), p_in.GetAuthorityCode(cstype))
else: # try brute force approach by grokking proj epsg definition file
p_out = p_in.ExportToProj4()
if p_out:
if forceProj4 is True:
return p_out
f = open(epsg)
for line in f:
if line.find(p_out) != -1:
m = re.search('<(\\d+)>', line)
if m:
code = m.group(1)
break
if code: # match
return 'EPSG:%s' % code
else: # no match
return None
else:
return None
def getCornerCoordinates(gdal_dataSet, target_srs = False):
"""
:param gdal_dataSet: /path/to/file OR gdal dataset
:param target_srs: False for output coordinates in same coordinate system OR 'wgs84' for lat long values OR custom osr.SpatialReference() object
:return: list of corner coordinates
--0--------3--
| |
| | <--- Index of coordinates returned in list
| |
--1--------2--
"""
if type(gdal_dataSet) is str:
gdal_dataSet = gdal.Open(gdal_dataSet)
gt=gdal_dataSet.GetGeoTransform() # gt = [ulx, xres, xskew, uly, yskew, yres]
cols = gdal_dataSet.RasterXSize
rows = gdal_dataSet.RasterYSize
def GetExtent(gt,cols,rows):
''' Return list of corner coordinates from a geotransform
@type gt: C{tuple/list}
@param gt: geotransform
@type cols: C{int}
@param cols: number of columns in the dataset
@type rows: C{int}
@param rows: number of rows in the dataset
@rtype: C{[float,...,float]}
@return: coordinates of each corner
'''
ext=[]
xarr=[0,cols]
yarr=[0,rows]
for px in xarr:
for py in yarr:
x=gt[0]+(px*gt[1])+(py*gt[2])
y=gt[3]+(px*gt[4])+(py*gt[5])
ext.append([x,y])
#print(x,y)
yarr.reverse()
return ext
def ReprojectCoords(coords,src_srs,tgt_srs):
''' Reproject a list of x,y coordinates.
@type geom: C{tuple/list}
@param geom: List of [[x,y],...[x,y]] coordinates
@type src_srs: C{osr.SpatialReference}
@param src_srs: OSR SpatialReference object
@type tgt_srs: C{osr.SpatialReference}
@param tgt_srs: OSR SpatialReference object
@rtype: C{tuple/list}
@return: List of transformed [[x,y],...[x,y]] coordinates
'''
trans_coords=[]
transform = osr.CoordinateTransformation( src_srs, tgt_srs)
for x,y in coords:
x,y,z = transform.TransformPoint(x,y)
trans_coords.append([x,y])
return trans_coords
ext = GetExtent(gt,cols,rows)
src_srs=osr.SpatialReference()
src_srs.ImportFromWkt(gdal_dataSet.GetProjection())
if target_srs == False:
return ext
elif target_srs == 'wgs84':
#target_srs = src_srs.CloneGeogCS()
#
target_srs=osr.SpatialReference()
target_srs.ImportFromEPSG(4326)
return ReprojectCoords(ext,src_srs,target_srs)
def resizeToDEM(imPath, sizeDEM = None, geoTransform = None, projection = None, noData = None):
imDS = gdal.Open(imPath, gdal.GA_ReadOnly)
imPix = imDS.RasterXSize
imScn = imDS.RasterYSize
nscn, npix = sizeDEM
if sizeDEM is not None:
if nscn != imScn or npix != imPix:
print("Size Mismatch")
image = imDS.ReadAsArray()
if noData is not None:
image = np.float32(image)
image[image == noData] = np.nan
imNew = cv2.resize(image, (npix, nscn), interpolation=cv2.INTER_CUBIC)
writeNumpyArr2Geotiff(imPath, imNew, geoTransform = geoTransform, projection = projection, GDAL_dtype = gdal.GDT_UInt16, noDataValue = noData)
def map_uint16_to_uint8(img, lower_bound=None, upper_bound=None):
'''
Map a 16-bit image trough a lookup table to convert it to 8-bit.
'''
if not(0 <= lower_bound < 2**16) and lower_bound is not None:
raise ValueError(
'"lower_bound" must be in the range [0, 65535]')
if not(0 <= upper_bound < 2**16) and upper_bound is not None:
raise ValueError(
'"upper_bound" must be in the range [0, 65535]')
if lower_bound is None:
lower_bound = np.min(img)
if upper_bound is None:
upper_bound = np.max(img)
if lower_bound >= upper_bound:
raise ValueError(
'"lower_bound" must be smaller than "upper_bound"')
lut = np.concatenate([
np.zeros(lower_bound, dtype=np.uint16),
np.linspace(0, 255, upper_bound - lower_bound).astype(np.uint16),
np.ones(2**16 - upper_bound, dtype=np.uint16) * 255
])
return lut[img].astype(np.uint8)
def closeCV(mask, kernelSize = 11):
kernel = np.ones((kernelSize, kernelSize),np.uint8)
return cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
def newGeoTransform(geoTransform, maskBounds):
newGeoTransform = (geoTransform[0]+ maskBounds['xMin'] * geoTransform[1],
geoTransform[1],
geoTransform[2],
geoTransform[3] + maskBounds['yMin'] * geoTransform[5],
geoTransform[4],
geoTransform[5])
return newGeoTransform
def shrinkGeoTransform(geoTransform, factor):
newGeoTransform = (geoTransform[0],
geoTransform[1] / factor,
geoTransform[2],
geoTransform[3],
geoTransform[4],
geoTransform[5] / factor)
return newGeoTransform
| 33.884498 | 157 | 0.591137 |
import os
import numpy as np
import cv2
import re
from shapely import wkt
from shapely.geometry import box, Polygon
import pandas as pd
import geopandas as gpd
from osgeo import gdal, gdalnumeric, osr, ogr
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
def getResolution(demFolder, return_full_paths = False):
rasterFilePaths = [f for f in os.listdir(demFolder) if os.path.isfile(os.path.join(demFolder, f))]
if return_full_paths:
rasterFilePaths = [demFolder + '/' + f for f in rasterFilePaths if f[:4] == 'DEM_' and f[-4:] == '.tif']
rasterFilePaths.sort(reverse=True)
else:
rasterFilePaths = [int(f[4:-4]) for f in rasterFilePaths if f[:4] == 'DEM_' and f[-4:] == '.tif']
return rasterFilePaths
def readGDAL2numpy(rasterPath, return_geoInformation = False):
try:
ds = gdal.Open(rasterPath)
except RuntimeError:
print('Unable to open input file')
sys.exit(1)
data = gdalnumeric.LoadFile(rasterPath, False)
noDataVal = ds.GetRasterBand(1).GetNoDataValue()
try:
if data.dtype in ['float16', 'float32', 'float64'] and noDataVal is not None:
data[data == noDataVal] = np.NaN
except:
print("Issue in no data value")
if return_geoInformation == False:
return data
else:
geoTransform = ds.GetGeoTransform()
projection = ds.GetProjection()
return data, geoTransform, projection
def writeNumpyArr2Geotiff(outputPath, data, geoTransform = None, projection = None, GDAL_dtype = gdal.GDT_Byte, noDataValue = None):
nscn, npix = data.shape
if np.isnan(data).any() and noDataValue is not None:
data[np.isnan(data)] = noDataValue
ds_new = gdal.GetDriverByName('GTiff').Create(outputPath, npix, nscn, 1, GDAL_dtype)
if geoTransform != None:
ds_new.SetGeoTransform(geoTransform)
if projection != None:
ds_new.SetProjection(projection)
outBand = ds_new.GetRasterBand(1)
outBand.WriteArray(data)
if noDataValue != None:
ds_new.GetRasterBand(1).SetNoDataValue(noDataValue)
ds_new.FlushCache()
ds_new = None
outBand = None
def writeNumpyArr2Saga(outputPath, data, geoTransform = None, projection = None, GDAL_dtype = gdal.GDT_Byte, noDataValue = None):
nscn, npix = data.shape
if np.isnan(data).any() and noDataValue is not None:
data[np.isnan(data)] = noDataValue
ds_new = gdal.GetDriverByName('SAGA').Create(outputPath, npix, nscn, 1, GDAL_dtype)
outBand = ds_new.GetRasterBand(1)
outBand.WriteArray(data)
if noDataValue != None:
ds_new.GetRasterBand(1).SetNoDataValue(noDataValue)
if projection != None:
ds_new.SetProjection(projection)
ds_new.FlushCache()
ds_new = None
outBand = None
def wkt2bbox(wkt_input):
wkt_geometry = wkt.loads(wkt_input)
minx, miny, maxx, maxy = wkt_geometry.bounds
b = box(minx, miny, maxx, maxy)
bbox_tuple = list(b.exterior.coords)
bbox = []
for point in bbox_tuple:
bbox.append([point[0],point[1]])
return bbox
def wkt2shp(wkt_input, target_epsg, dst_file, bbox=False):
ensure_dir(dst_file)
if bbox:
polygon = Polygon(wkt2bbox(wkt_input))
else:
polygon = wkt.loads(wkt_input)
gpd.GeoDataFrame(pd.DataFrame(['p1'], columns = ['geom']),
crs = {'init':'epsg:' + str(target_epsg)},
geometry = [polygon]).to_file(dst_file)
def rescaleDEM(image, noData = None, maxVal = 255):
if noData:
image = np.float32(image)
image[image == noData] = np.nan
minElev = np.nanmin(image)
maxElev = np.nanmax(image)
rescaled = ( ((image - minElev)/(maxElev- minElev)) * (maxVal - 1) ) + 1
return np.uint8(rescaled)
def joinStrArg(str1, str2, str3 = None):
if str3 is not None:
return str(str1) + ' ' + str(str2) + ' ' + str(str3)
else:
return str(str1) + ' ' + str(str2)
def wkt2EPSG(wkt, epsg='/usr/local/share/proj/epsg', forceProj4=False):
code = None
p_in = osr.SpatialReference()
s = p_in.ImportFromWkt(wkt)
if s == 5:
return None
if p_in.IsLocal() == 1:
return p_in.ExportToWkt()
if p_in.IsGeographic() == 1:
cstype = 'GEOGCS'
else:
cstype = 'PROJCS'
an = p_in.GetAuthorityName(cstype)
ac = p_in.GetAuthorityCode(cstype)
if an is not None and ac is not None:
return '%s:%s' % \
(p_in.GetAuthorityName(cstype), p_in.GetAuthorityCode(cstype))
else:
p_out = p_in.ExportToProj4()
if p_out:
if forceProj4 is True:
return p_out
f = open(epsg)
for line in f:
if line.find(p_out) != -1:
m = re.search('<(\\d+)>', line)
if m:
code = m.group(1)
break
if code:
return 'EPSG:%s' % code
else:
return None
else:
return None
def getCornerCoordinates(gdal_dataSet, target_srs = False):
if type(gdal_dataSet) is str:
gdal_dataSet = gdal.Open(gdal_dataSet)
gt=gdal_dataSet.GetGeoTransform()
cols = gdal_dataSet.RasterXSize
rows = gdal_dataSet.RasterYSize
def GetExtent(gt,cols,rows):
ext=[]
xarr=[0,cols]
yarr=[0,rows]
for px in xarr:
for py in yarr:
x=gt[0]+(px*gt[1])+(py*gt[2])
y=gt[3]+(px*gt[4])+(py*gt[5])
ext.append([x,y])
yarr.reverse()
return ext
def ReprojectCoords(coords,src_srs,tgt_srs):
trans_coords=[]
transform = osr.CoordinateTransformation( src_srs, tgt_srs)
for x,y in coords:
x,y,z = transform.TransformPoint(x,y)
trans_coords.append([x,y])
return trans_coords
ext = GetExtent(gt,cols,rows)
src_srs=osr.SpatialReference()
src_srs.ImportFromWkt(gdal_dataSet.GetProjection())
if target_srs == False:
return ext
elif target_srs == 'wgs84':
target_srs=osr.SpatialReference()
target_srs.ImportFromEPSG(4326)
return ReprojectCoords(ext,src_srs,target_srs)
def resizeToDEM(imPath, sizeDEM = None, geoTransform = None, projection = None, noData = None):
imDS = gdal.Open(imPath, gdal.GA_ReadOnly)
imPix = imDS.RasterXSize
imScn = imDS.RasterYSize
nscn, npix = sizeDEM
if sizeDEM is not None:
if nscn != imScn or npix != imPix:
print("Size Mismatch")
image = imDS.ReadAsArray()
if noData is not None:
image = np.float32(image)
image[image == noData] = np.nan
imNew = cv2.resize(image, (npix, nscn), interpolation=cv2.INTER_CUBIC)
writeNumpyArr2Geotiff(imPath, imNew, geoTransform = geoTransform, projection = projection, GDAL_dtype = gdal.GDT_UInt16, noDataValue = noData)
def map_uint16_to_uint8(img, lower_bound=None, upper_bound=None):
if not(0 <= lower_bound < 2**16) and lower_bound is not None:
raise ValueError(
'"lower_bound" must be in the range [0, 65535]')
if not(0 <= upper_bound < 2**16) and upper_bound is not None:
raise ValueError(
'"upper_bound" must be in the range [0, 65535]')
if lower_bound is None:
lower_bound = np.min(img)
if upper_bound is None:
upper_bound = np.max(img)
if lower_bound >= upper_bound:
raise ValueError(
'"lower_bound" must be smaller than "upper_bound"')
lut = np.concatenate([
np.zeros(lower_bound, dtype=np.uint16),
np.linspace(0, 255, upper_bound - lower_bound).astype(np.uint16),
np.ones(2**16 - upper_bound, dtype=np.uint16) * 255
])
return lut[img].astype(np.uint8)
def closeCV(mask, kernelSize = 11):
kernel = np.ones((kernelSize, kernelSize),np.uint8)
return cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
def newGeoTransform(geoTransform, maskBounds):
newGeoTransform = (geoTransform[0]+ maskBounds['xMin'] * geoTransform[1],
geoTransform[1],
geoTransform[2],
geoTransform[3] + maskBounds['yMin'] * geoTransform[5],
geoTransform[4],
geoTransform[5])
return newGeoTransform
def shrinkGeoTransform(geoTransform, factor):
newGeoTransform = (geoTransform[0],
geoTransform[1] / factor,
geoTransform[2],
geoTransform[3],
geoTransform[4],
geoTransform[5] / factor)
return newGeoTransform
| true | true |
f71b651c18866a5ae438540b4c87f225edab7b8a | 21,388 | py | Python | gym_let_mpc/let_mpc.py | eivindeb/gym-letMPC | 7041aa56a25aa9a1c749088f2b370c910d21fe75 | [
"MIT"
] | 6 | 2020-12-04T18:15:29.000Z | 2022-02-26T11:01:31.000Z | gym_let_mpc/let_mpc.py | eivindeb/gym-letMPC | 7041aa56a25aa9a1c749088f2b370c910d21fe75 | [
"MIT"
] | null | null | null | gym_let_mpc/let_mpc.py | eivindeb/gym-letMPC | 7041aa56a25aa9a1c749088f2b370c910d21fe75 | [
"MIT"
] | 5 | 2021-03-08T06:00:27.000Z | 2021-11-22T08:14:12.000Z | import gym
from gym.utils import seeding
import numpy as np
import json
from gym_let_mpc.simulator import ControlSystem
from gym_let_mpc.controllers import ETMPC, AHMPC
import collections.abc
import matplotlib.pyplot as plt
from gym_let_mpc.utils import str_replace_whole_words
import copy
class LetMPCEnv(gym.Env):
def __init__(self, config_path):
with open(config_path) as file_object:
config = json.load(file_object)
if config["mpc"]["model"] == "plant":
config["mpc"]["model"] = copy.deepcopy(config["plant"]["model"])
elif config["mpc"]["model"].get("parameters", None) == "plant":
config["mpc"]["model"]["parameters"] = copy.deepcopy(config["plant"]["model"]["parameters"])
if config["lqr"]["model"] == "plant":
config["lqr"]["model"] = copy.deepcopy(config["plant"]["model"])
elif config["lqr"]["model"] == "mpc":
config["lqr"]["model"] = copy.deepcopy(config["mpc"]["model"])
elif config["lqr"]["model"].get("parameters", None) == "plant":
config["lqr"]["model"]["parameters"] = copy.deepcopy(config["plant"]["model"]["parameters"])
elif config["lqr"]["model"].get("parameters", None) == "mpc":
config["lqr"]["model"]["parameters"] = copy.deepcopy(config["mpc"]["model"]["parameters"])
self.config = config
assert "max_steps" in self.config["environment"]
self.max_steps = self.config["environment"]["max_steps"]
assert "randomize" in self.config["environment"]
assert "state" in self.config["environment"]["randomize"] and "reference" in self.config["environment"]["randomize"]
assert "render" in self.config["environment"]
if config["mpc"]["type"] == "ETMPC":
assert len(config["environment"]["action"]["variables"]) == 1 and \
config["environment"]["action"]["variables"][0]["name"] == "mpc_compute"
controller = ETMPC(config["mpc"], config["lqr"])
self.action_space = gym.spaces.Discrete(2)
elif config["mpc"]["type"] == "AHMPC":
assert len(config["environment"]["action"]["variables"]) == 1 and \
config["environment"]["action"]["variables"][0]["name"] == "mpc_horizon"
controller = AHMPC(config["mpc"])
self.action_space = gym.spaces.Box(low=np.array([1]), high=np.array([50]), dtype=np.float32)
else:
raise ValueError
self.control_system = ControlSystem(config["plant"], controller=controller)
self.history = None
self.steps_count = None
self.np_random = None
self.min_constraint_delta = 0.25 # TODO: how and where to set
obs_high = []
obs_low = []
for obs_var in self.config["environment"]["observation"]["variables"]:
for var_transform in obs_var.get("transform", ["none"]):
for lim_i, lim in enumerate(obs_var.get("limits", [None, None])):
if lim is None:
if lim_i == 0:
obs_low.append(-np.finfo(np.float32).max)
else:
obs_high.append(np.finfo(np.float32).max)
else:
if var_transform == "none":
if lim_i == 0:
obs_low.append(lim)
else:
obs_high.append(lim)
elif var_transform == "absolute":
if lim_i == 0:
obs_low.append(0)
else:
obs_high.append(lim)
elif var_transform == "square":
if lim_i == 0:
obs_low.append(0)
else:
obs_high.append(lim ** 2)
else:
raise NotImplementedError
self.observation_space = gym.spaces.Box(low=np.array(obs_low, dtype=np.float32),
high=np.array(obs_high, dtype=np.float32),
dtype=np.float32)
self.value_function_is_set = False
self.viewer = None
def seed(self, seed=None):
"""
Seed the random number generator of the control system.
:param seed: (int) seed for random state
"""
self.np_random, seed = gym.utils.seeding.np_random(seed)
self.control_system.seed(seed)
return [seed]
def reset(self, state=None, reference=None, constraint=None, model=None, process_noise=None, tvp=None):
"""
Reset state of environment. Note that the simulator is reset, the MPC solution is computed and the first
MPC action is applied to the plant.
:param state: (dict) initial conditions (value) for state name (key).
:param reference: (dict) reference value (value) for reference name (key).
:param constraint: (dict) constraint values (value) for constraint names (key).
:param model: (dict) dictionary of dictionary where first key is model that it applies to ["plant", "mpc", "lqr"],
first value is dictionary of model parameters where second value is the specified model parameter value.
:param process_noise: (dict) process noise values (value) as ndarray for state name (key). The process noise at
each time step loops through the provided array.
:param tvp: (dict) values of time-varying parameters. New values are generated if values arent specified
for all time steps elapsed.
:return: ([float]) observation vector
"""
def update_dict_recursively(d, u):
for k, v in u.items():
if isinstance(v, collections.abc.Mapping):
d[k] = update_dict_recursively(d.get(k, {}), v)
else:
d[k] = v
return d
sampled_state = self.sample_state()
sampled_reference = self.sample_reference()
sampled_constraint = self.sample_constraints()
sampled_model = self.sample_model()
if state is not None:
sampled_state.update(state)
elif len(sampled_state) == 0:
sampled_state = None
if reference is not None:
sampled_reference.update(reference)
elif len(sampled_reference) == 0:
sampled_reference = None
if constraint is not None:
sampled_constraint.update(constraint)
elif len(sampled_constraint) == 0:
sampled_constraint = None
if model is not None:
sampled_model = update_dict_recursively(sampled_model, model)
elif len(sampled_model) == 0:
sampled_model = None
self.control_system.reset(state=sampled_state, reference=sampled_reference, constraint=sampled_constraint,
model=sampled_model, process_noise=process_noise, tvp=tvp)
if self.config["mpc"]["type"] == "ETMPC":
self.control_system.step(action=np.array([1]))
obs = self.get_observation()
self.history = {"obs": [obs], "actions": [], "rewards": []}
self.steps_count = 0
return obs
def step(self, action):
a_dict = {a_props["name"]: action[a_i]
for a_i, a_props in enumerate(self.config["environment"]["action"]["variables"])}
self.control_system.step(np.round(a_dict["mpc_horizon"]).astype(np.int32))#np.atleast_1d(int(a_dict["mpc_compute"])))
self.history["actions"].append(a_dict)
self.steps_count += 1
info = {}
obs = self.get_observation()
done = False
if self.steps_count >= self.max_steps:
done = True
info["termination"] = "steps"
elif len(self.config["environment"].get("end_on_constraint_violation", [])) > 0:
for c_name, c_d in self.control_system.get_constraint_distances().items():
if c_name.split("-")[1] in self.config["environment"]["end_on_constraint_violation"] and c_d > 0:
done = True
info["termination"] = "constraint"
break
rew = self.get_reward(done=done)
for category, v in self.config["environment"].get("info", {}).items():
if category == "reward":
for rew_name, rew_expr in v.items():
info["reward/{}".format(rew_name)] = self.get_reward(rew_expr, done=done)
else:
raise NotImplementedError
if self.value_function_is_set:
step_vf_data = {"mpc_state": self.control_system.get_state_vector(self.control_system.history["state"][-2]),
"mpc_next_state": self.control_system.controller.mpc_state_preds[:, -1, -1]}
step_vf_data["mpc_n_horizon"] = self.control_system.controller.history["mpc_horizon"][-1]
info["mpc_value_fn"] = (self.control_system.controller.value_function.eval([step_vf_data["mpc_next_state"].reshape(1, -1)])[0][0, 0]).astype(np.float64)
step_vf_data["mpc_rewards"] = self.control_system.controller.mpc.opt_f_num.toarray()[0, 0] - \
self.config["mpc"]["objective"].get("discount_factor") ** (step_vf_data["mpc_n_horizon"] + 1) * info["mpc_value_fn"]
info["mpc_computation_time"] = sum([v for k, v in self.control_system.controller.mpc.solver_stats.items() if k.startswith("t_proc")])
info["data"] = step_vf_data
info["mpc_avg_stage_cost"] = step_vf_data["mpc_rewards"] / step_vf_data["mpc_n_horizon"]
info.update({k: v.astype(np.float64) if hasattr(v, "dtype") else v for k, v in a_dict.items()})
self.history["obs"].append(obs)
self.history["rewards"].append(rew)
return obs, rew, done, info
def render(self, mode='human', save_path=None): # TODO: add env renders
figure, axes = None, None
if self.viewer is None:
env_plots = [plot_name for plot_name, make_plot in self.config["environment"]["render"].items() if make_plot]
if len(env_plots) > 0:
figure, axes = plt.subplots(self.control_system.render_n_axes + len(env_plots), sharex=True,
figsize=(9, 16))
self.viewer = self.control_system.render(figure=figure, axes=axes, return_viewer=True)
for i, plot in enumerate(env_plots):
self.viewer["axes"][plot] = axes[-(i + 1)]
else:
self.viewer = self.control_system.render(figure=figure, axes=axes, return_viewer=True)
for plot_name, make_plot in self.config["environment"]["render"].items():
if make_plot:
self.viewer["axes"][plot_name].set_ylabel("-".join(plot_name.split("_")[1:]))
x_data = np.array(range(self.steps_count)) * self.control_system.config["params"]["t_step"]
self.viewer["axes"][plot_name].clear()
if plot_name == "plot_action":
for a_var in self.config["environment"]["action"]["variables"]:
y_data = [step_a[a_var["name"]] for step_a in self.history["actions"]]
self.viewer["axes"][plot_name].plot(x_data, y_data, label=a_var["name"], drawstyle="steps")
elif plot_name == "plot_reward":
self.viewer["axes"][plot_name].plot(x_data, self.history["rewards"], label="reward")
self.viewer["axes"][plot_name].text(max(x_data) + self.control_system.config["params"]["t_step"],
self.history["rewards"][-1],
"{:.3f}".format(np.sum(self.history["rewards"])))
else:
raise ValueError
for axis in self.viewer["axes"].values():
axis.legend()
if save_path is not None:
self.viewer["figure"].savefig(save_path, bbox_inches="tight", format="png")
plt.close(self.viewer["figure"])
else:
self.viewer["figure"].show()
def get_observation(self):
obs = []
for var in self.config["environment"]["observation"]["variables"]:
var_val = self._get_variable_value(var)
for transform in var.get("transform", ["none"]):
if transform == "none":
obs.append(var_val)
elif transform == "absolute":
obs.append(abs(var_val))
elif transform == "square":
obs.append(var_val ** 2)
else:
raise ValueError
return np.array(obs)
def get_reward(self, rew_expr=None, done=False):
if rew_expr is None:
rew_expr = self.config["environment"]["reward"]["expression"]
rew_expr = str_replace_whole_words(rew_expr, "done", int(done))
for var in sorted(self.config["environment"]["reward"]["variables"], key=lambda x: len(x), reverse=True):
var_val = self._get_variable_value(var)
if isinstance(var_val, list) or isinstance(var_val, np.ndarray): # TODO: needs to be better way to do this
var_val = var_val[0]
rew_expr = str_replace_whole_words(rew_expr, var["name"], var_val)
return eval(rew_expr)
def _get_variable_value(self, var):
if var["type"] == "state":
val = self.control_system.current_state[var["name"]]
elif var["type"] == "input":
if var.get("value_type", "absolute") == "absolute":
val = self.control_system.controller.current_input[var["name"]]
elif var.get("value_type") == "delta":
val = self.control_system.controller.history["inputs"][-2][var["name"]] - \
self.control_system.controller.current_input[var["name"]]
else:
raise ValueError
elif var["type"] == "reference":
val = self.control_system.controller.current_reference[var["name"]]
elif var["type"] == "tvp":
val = self.control_system.tvps[var["name"]].get_values(self.steps_count)
elif var["type"] == "error":
val = self.control_system.controller.history["errors"][-1][var["name"]]
if np.isnan(val):
val = 0
elif var["type"] == "epsilon":
val = self.control_system.controller.history["epsilons"][-1][var["name"]]
if np.isnan(val):
val = 0
elif var["type"] == "constraint":
if var.get("value_type") == "distance":
val = self.control_system.get_constraint_distances((var["name"],))[var["name"]]
else:
raise ValueError
elif var["type"] == "action":
if var.get("value_type", "agent") == "agent":
val = self.history["actions"][-1][var["name"]]
elif var.get("value_type") == "controller":
val = self.control_system.controller.history[var["name"]][-1]
else:
raise ValueError
elif var["type"] == "time":
if var.get("value_type") == "fraction":
val = self.control_system.controller.steps_since_mpc_computation / self.control_system.controller.mpc.n_horizon
elif var.get("value_type") == "absolute":
val = self.control_system.controller.steps_since_mpc_computation
else:
raise ValueError
elif var["type"] == "parameter":
if var["value_type"] in ["plant", "mpc", "lqr"]:
val = self.config[var["value_type"]]["model"]["parameters"][var["name"]]
else:
raise ValueError
else:
raise ValueError
if isinstance(val, np.ndarray):
val = val[0]
if "limits" in var:
val = np.clip(val, var["limits"][0], var["limits"][1])
return val
def sample_constraints(self):
constraints = {}
for c_name, c_props in self.config["environment"].get("randomize", {}).get("constraints", {}).items():
constraint_val = getattr(self.np_random, c_props["type"])(**c_props["kw"])
if c_name.split("-")[1] in [k.split("-")[1] for k in constraints.keys()]:
other_bound_type = "u" if c_name.split("-")[2] == "l" else "l"
other_bound_val = constraints[c_name[:-1] + other_bound_type]
if other_bound_type == "u":
constraint_val = min(other_bound_val - self.min_constraint_delta, constraint_val)
else:
constraint_val = max(other_bound_val + self.min_constraint_delta, constraint_val)
constraints[c_name] = constraint_val
return constraints
def sample_state(self):
state = {}
for s_name, s_props in self.config["environment"].get("randomize", {}).get("state", {}).items():
state[s_name] = getattr(self.np_random, s_props["type"])(**s_props["kw"])
return state
def sample_reference(self):
reference = {}
for r_name, r_props in self.config["environment"].get("randomize", {}).get("reference", {}).items():
reference[r_name] = getattr(self.np_random, r_props["type"])(**r_props["kw"])
return reference
def sample_model(self):
model = {}
for s_name, s_props in self.config["environment"].get("randomize", {}).get("model", {}).get("states", {}).items():
model["states"] = {s_name: {}}
for component_name, component_props in s_props.items():
model["states"][s_name][component_name] = \
{comp_v_name: getattr(self.np_random, v_prop["type"])(**v_prop["kw"])
for comp_v_name, v_prop in component_props.items()}
model = {dest: model for dest in self.config["environment"].get("randomize", {}).get("model", {}).get("apply", [])}
return model
def stop(self):
pass
def create_dataset(self, n_scenarios):
dataset = []
self.reset()
for i in range(n_scenarios):
process_noise = np.array([self.control_system._get_process_noise() for i in range(self.max_steps)])
ep_dict = {"state": self.sample_state(), "reference": self.sample_reference(),
"constraint": self.sample_constraints(), "model": self.sample_model(),
"process_noise": {}, "tvp": {}}
s_i = 0
for s_name, s_props in self.config["plant"]["model"]["states"].items():
if "W" in s_props:
ep_dict["process_noise"][s_name] = process_noise[:, s_i]
s_i += 1
for tvp_name, tvp_obj in self.control_system.tvps.items():
tvp_obj.generate_values(self.max_steps)
ep_dict["tvp"][tvp_name] = tvp_obj.values
dataset.append(ep_dict)
self.reset()
return dataset
def set_value_function(self, input_ph, output_ph, tf_session):
self.control_system.controller.set_value_function(input_ph, output_ph, tf_session)
self.value_function_is_set = True
def set_learning_status(self, status):
if self.value_function_is_set:
self.control_system.controller.value_function.set_enabled(status)
if __name__ == "__main__": # TODO: constraints on pendulum and end episode if constraints violated
env = LetMPCEnv("configs/cart_pendulum_horizon.json")
env.seed(0)
"""
from tensorflow_casadi import TensorFlowEvaluator, MLP
import tensorflow as tf
a = tf.placeholder(shape=(None, 4), dtype=tf.float32)
mlp = MLP(a)
sess = tf.Session()
val_fun = TensorFlowEvaluator([mlp.input_ph], [mlp.output], sess)
env.set_value_function(mlp.input_ph, mlp.output, sess)
"""
import pickle
with open("../../lmpc-horizon/datasets/cart_pendulum_10.pkl", "rb") as f:
test_set = pickle.load(f)
rews = {}
for i in range(1):
import time
obs = env.reset(**test_set[5])
done = False
t_before = time.process_time()
horizon = 10
while not done:
t_step = time.process_time()
if env.steps_count % 1 == 0 and False:
horizon = 25 if horizon == 50 else 50
obs, rew, done, info = env.step([horizon])#[np.random.randint(1, 10)])
for rew_comp, v in info.items():
if rew_comp.startswith("reward/"):
if rew_comp not in rews:
rews[rew_comp] = []
rews[rew_comp].append(v)
if time.process_time() - t_step > 1:
print(env.control_system.controller.mpc.solver_stats)
print(env.steps_count)
for k, v in rews.items():
print("{}: {}".format(k, sum(v)))
print("Elapsed time {}".format(time.process_time() - t_before))
env.render()
| 47.423503 | 164 | 0.565738 | import gym
from gym.utils import seeding
import numpy as np
import json
from gym_let_mpc.simulator import ControlSystem
from gym_let_mpc.controllers import ETMPC, AHMPC
import collections.abc
import matplotlib.pyplot as plt
from gym_let_mpc.utils import str_replace_whole_words
import copy
class LetMPCEnv(gym.Env):
def __init__(self, config_path):
with open(config_path) as file_object:
config = json.load(file_object)
if config["mpc"]["model"] == "plant":
config["mpc"]["model"] = copy.deepcopy(config["plant"]["model"])
elif config["mpc"]["model"].get("parameters", None) == "plant":
config["mpc"]["model"]["parameters"] = copy.deepcopy(config["plant"]["model"]["parameters"])
if config["lqr"]["model"] == "plant":
config["lqr"]["model"] = copy.deepcopy(config["plant"]["model"])
elif config["lqr"]["model"] == "mpc":
config["lqr"]["model"] = copy.deepcopy(config["mpc"]["model"])
elif config["lqr"]["model"].get("parameters", None) == "plant":
config["lqr"]["model"]["parameters"] = copy.deepcopy(config["plant"]["model"]["parameters"])
elif config["lqr"]["model"].get("parameters", None) == "mpc":
config["lqr"]["model"]["parameters"] = copy.deepcopy(config["mpc"]["model"]["parameters"])
self.config = config
assert "max_steps" in self.config["environment"]
self.max_steps = self.config["environment"]["max_steps"]
assert "randomize" in self.config["environment"]
assert "state" in self.config["environment"]["randomize"] and "reference" in self.config["environment"]["randomize"]
assert "render" in self.config["environment"]
if config["mpc"]["type"] == "ETMPC":
assert len(config["environment"]["action"]["variables"]) == 1 and \
config["environment"]["action"]["variables"][0]["name"] == "mpc_compute"
controller = ETMPC(config["mpc"], config["lqr"])
self.action_space = gym.spaces.Discrete(2)
elif config["mpc"]["type"] == "AHMPC":
assert len(config["environment"]["action"]["variables"]) == 1 and \
config["environment"]["action"]["variables"][0]["name"] == "mpc_horizon"
controller = AHMPC(config["mpc"])
self.action_space = gym.spaces.Box(low=np.array([1]), high=np.array([50]), dtype=np.float32)
else:
raise ValueError
self.control_system = ControlSystem(config["plant"], controller=controller)
self.history = None
self.steps_count = None
self.np_random = None
self.min_constraint_delta = 0.25
obs_high = []
obs_low = []
for obs_var in self.config["environment"]["observation"]["variables"]:
for var_transform in obs_var.get("transform", ["none"]):
for lim_i, lim in enumerate(obs_var.get("limits", [None, None])):
if lim is None:
if lim_i == 0:
obs_low.append(-np.finfo(np.float32).max)
else:
obs_high.append(np.finfo(np.float32).max)
else:
if var_transform == "none":
if lim_i == 0:
obs_low.append(lim)
else:
obs_high.append(lim)
elif var_transform == "absolute":
if lim_i == 0:
obs_low.append(0)
else:
obs_high.append(lim)
elif var_transform == "square":
if lim_i == 0:
obs_low.append(0)
else:
obs_high.append(lim ** 2)
else:
raise NotImplementedError
self.observation_space = gym.spaces.Box(low=np.array(obs_low, dtype=np.float32),
high=np.array(obs_high, dtype=np.float32),
dtype=np.float32)
self.value_function_is_set = False
self.viewer = None
def seed(self, seed=None):
self.np_random, seed = gym.utils.seeding.np_random(seed)
self.control_system.seed(seed)
return [seed]
def reset(self, state=None, reference=None, constraint=None, model=None, process_noise=None, tvp=None):
def update_dict_recursively(d, u):
for k, v in u.items():
if isinstance(v, collections.abc.Mapping):
d[k] = update_dict_recursively(d.get(k, {}), v)
else:
d[k] = v
return d
sampled_state = self.sample_state()
sampled_reference = self.sample_reference()
sampled_constraint = self.sample_constraints()
sampled_model = self.sample_model()
if state is not None:
sampled_state.update(state)
elif len(sampled_state) == 0:
sampled_state = None
if reference is not None:
sampled_reference.update(reference)
elif len(sampled_reference) == 0:
sampled_reference = None
if constraint is not None:
sampled_constraint.update(constraint)
elif len(sampled_constraint) == 0:
sampled_constraint = None
if model is not None:
sampled_model = update_dict_recursively(sampled_model, model)
elif len(sampled_model) == 0:
sampled_model = None
self.control_system.reset(state=sampled_state, reference=sampled_reference, constraint=sampled_constraint,
model=sampled_model, process_noise=process_noise, tvp=tvp)
if self.config["mpc"]["type"] == "ETMPC":
self.control_system.step(action=np.array([1]))
obs = self.get_observation()
self.history = {"obs": [obs], "actions": [], "rewards": []}
self.steps_count = 0
return obs
def step(self, action):
a_dict = {a_props["name"]: action[a_i]
for a_i, a_props in enumerate(self.config["environment"]["action"]["variables"])}
self.control_system.step(np.round(a_dict["mpc_horizon"]).astype(np.int32))
self.history["actions"].append(a_dict)
self.steps_count += 1
info = {}
obs = self.get_observation()
done = False
if self.steps_count >= self.max_steps:
done = True
info["termination"] = "steps"
elif len(self.config["environment"].get("end_on_constraint_violation", [])) > 0:
for c_name, c_d in self.control_system.get_constraint_distances().items():
if c_name.split("-")[1] in self.config["environment"]["end_on_constraint_violation"] and c_d > 0:
done = True
info["termination"] = "constraint"
break
rew = self.get_reward(done=done)
for category, v in self.config["environment"].get("info", {}).items():
if category == "reward":
for rew_name, rew_expr in v.items():
info["reward/{}".format(rew_name)] = self.get_reward(rew_expr, done=done)
else:
raise NotImplementedError
if self.value_function_is_set:
step_vf_data = {"mpc_state": self.control_system.get_state_vector(self.control_system.history["state"][-2]),
"mpc_next_state": self.control_system.controller.mpc_state_preds[:, -1, -1]}
step_vf_data["mpc_n_horizon"] = self.control_system.controller.history["mpc_horizon"][-1]
info["mpc_value_fn"] = (self.control_system.controller.value_function.eval([step_vf_data["mpc_next_state"].reshape(1, -1)])[0][0, 0]).astype(np.float64)
step_vf_data["mpc_rewards"] = self.control_system.controller.mpc.opt_f_num.toarray()[0, 0] - \
self.config["mpc"]["objective"].get("discount_factor") ** (step_vf_data["mpc_n_horizon"] + 1) * info["mpc_value_fn"]
info["mpc_computation_time"] = sum([v for k, v in self.control_system.controller.mpc.solver_stats.items() if k.startswith("t_proc")])
info["data"] = step_vf_data
info["mpc_avg_stage_cost"] = step_vf_data["mpc_rewards"] / step_vf_data["mpc_n_horizon"]
info.update({k: v.astype(np.float64) if hasattr(v, "dtype") else v for k, v in a_dict.items()})
self.history["obs"].append(obs)
self.history["rewards"].append(rew)
return obs, rew, done, info
def render(self, mode='human', save_path=None):
figure, axes = None, None
if self.viewer is None:
env_plots = [plot_name for plot_name, make_plot in self.config["environment"]["render"].items() if make_plot]
if len(env_plots) > 0:
figure, axes = plt.subplots(self.control_system.render_n_axes + len(env_plots), sharex=True,
figsize=(9, 16))
self.viewer = self.control_system.render(figure=figure, axes=axes, return_viewer=True)
for i, plot in enumerate(env_plots):
self.viewer["axes"][plot] = axes[-(i + 1)]
else:
self.viewer = self.control_system.render(figure=figure, axes=axes, return_viewer=True)
for plot_name, make_plot in self.config["environment"]["render"].items():
if make_plot:
self.viewer["axes"][plot_name].set_ylabel("-".join(plot_name.split("_")[1:]))
x_data = np.array(range(self.steps_count)) * self.control_system.config["params"]["t_step"]
self.viewer["axes"][plot_name].clear()
if plot_name == "plot_action":
for a_var in self.config["environment"]["action"]["variables"]:
y_data = [step_a[a_var["name"]] for step_a in self.history["actions"]]
self.viewer["axes"][plot_name].plot(x_data, y_data, label=a_var["name"], drawstyle="steps")
elif plot_name == "plot_reward":
self.viewer["axes"][plot_name].plot(x_data, self.history["rewards"], label="reward")
self.viewer["axes"][plot_name].text(max(x_data) + self.control_system.config["params"]["t_step"],
self.history["rewards"][-1],
"{:.3f}".format(np.sum(self.history["rewards"])))
else:
raise ValueError
for axis in self.viewer["axes"].values():
axis.legend()
if save_path is not None:
self.viewer["figure"].savefig(save_path, bbox_inches="tight", format="png")
plt.close(self.viewer["figure"])
else:
self.viewer["figure"].show()
def get_observation(self):
obs = []
for var in self.config["environment"]["observation"]["variables"]:
var_val = self._get_variable_value(var)
for transform in var.get("transform", ["none"]):
if transform == "none":
obs.append(var_val)
elif transform == "absolute":
obs.append(abs(var_val))
elif transform == "square":
obs.append(var_val ** 2)
else:
raise ValueError
return np.array(obs)
def get_reward(self, rew_expr=None, done=False):
if rew_expr is None:
rew_expr = self.config["environment"]["reward"]["expression"]
rew_expr = str_replace_whole_words(rew_expr, "done", int(done))
for var in sorted(self.config["environment"]["reward"]["variables"], key=lambda x: len(x), reverse=True):
var_val = self._get_variable_value(var)
if isinstance(var_val, list) or isinstance(var_val, np.ndarray):
var_val = var_val[0]
rew_expr = str_replace_whole_words(rew_expr, var["name"], var_val)
return eval(rew_expr)
def _get_variable_value(self, var):
if var["type"] == "state":
val = self.control_system.current_state[var["name"]]
elif var["type"] == "input":
if var.get("value_type", "absolute") == "absolute":
val = self.control_system.controller.current_input[var["name"]]
elif var.get("value_type") == "delta":
val = self.control_system.controller.history["inputs"][-2][var["name"]] - \
self.control_system.controller.current_input[var["name"]]
else:
raise ValueError
elif var["type"] == "reference":
val = self.control_system.controller.current_reference[var["name"]]
elif var["type"] == "tvp":
val = self.control_system.tvps[var["name"]].get_values(self.steps_count)
elif var["type"] == "error":
val = self.control_system.controller.history["errors"][-1][var["name"]]
if np.isnan(val):
val = 0
elif var["type"] == "epsilon":
val = self.control_system.controller.history["epsilons"][-1][var["name"]]
if np.isnan(val):
val = 0
elif var["type"] == "constraint":
if var.get("value_type") == "distance":
val = self.control_system.get_constraint_distances((var["name"],))[var["name"]]
else:
raise ValueError
elif var["type"] == "action":
if var.get("value_type", "agent") == "agent":
val = self.history["actions"][-1][var["name"]]
elif var.get("value_type") == "controller":
val = self.control_system.controller.history[var["name"]][-1]
else:
raise ValueError
elif var["type"] == "time":
if var.get("value_type") == "fraction":
val = self.control_system.controller.steps_since_mpc_computation / self.control_system.controller.mpc.n_horizon
elif var.get("value_type") == "absolute":
val = self.control_system.controller.steps_since_mpc_computation
else:
raise ValueError
elif var["type"] == "parameter":
if var["value_type"] in ["plant", "mpc", "lqr"]:
val = self.config[var["value_type"]]["model"]["parameters"][var["name"]]
else:
raise ValueError
else:
raise ValueError
if isinstance(val, np.ndarray):
val = val[0]
if "limits" in var:
val = np.clip(val, var["limits"][0], var["limits"][1])
return val
def sample_constraints(self):
constraints = {}
for c_name, c_props in self.config["environment"].get("randomize", {}).get("constraints", {}).items():
constraint_val = getattr(self.np_random, c_props["type"])(**c_props["kw"])
if c_name.split("-")[1] in [k.split("-")[1] for k in constraints.keys()]:
other_bound_type = "u" if c_name.split("-")[2] == "l" else "l"
other_bound_val = constraints[c_name[:-1] + other_bound_type]
if other_bound_type == "u":
constraint_val = min(other_bound_val - self.min_constraint_delta, constraint_val)
else:
constraint_val = max(other_bound_val + self.min_constraint_delta, constraint_val)
constraints[c_name] = constraint_val
return constraints
def sample_state(self):
state = {}
for s_name, s_props in self.config["environment"].get("randomize", {}).get("state", {}).items():
state[s_name] = getattr(self.np_random, s_props["type"])(**s_props["kw"])
return state
def sample_reference(self):
reference = {}
for r_name, r_props in self.config["environment"].get("randomize", {}).get("reference", {}).items():
reference[r_name] = getattr(self.np_random, r_props["type"])(**r_props["kw"])
return reference
def sample_model(self):
model = {}
for s_name, s_props in self.config["environment"].get("randomize", {}).get("model", {}).get("states", {}).items():
model["states"] = {s_name: {}}
for component_name, component_props in s_props.items():
model["states"][s_name][component_name] = \
{comp_v_name: getattr(self.np_random, v_prop["type"])(**v_prop["kw"])
for comp_v_name, v_prop in component_props.items()}
model = {dest: model for dest in self.config["environment"].get("randomize", {}).get("model", {}).get("apply", [])}
return model
def stop(self):
pass
def create_dataset(self, n_scenarios):
dataset = []
self.reset()
for i in range(n_scenarios):
process_noise = np.array([self.control_system._get_process_noise() for i in range(self.max_steps)])
ep_dict = {"state": self.sample_state(), "reference": self.sample_reference(),
"constraint": self.sample_constraints(), "model": self.sample_model(),
"process_noise": {}, "tvp": {}}
s_i = 0
for s_name, s_props in self.config["plant"]["model"]["states"].items():
if "W" in s_props:
ep_dict["process_noise"][s_name] = process_noise[:, s_i]
s_i += 1
for tvp_name, tvp_obj in self.control_system.tvps.items():
tvp_obj.generate_values(self.max_steps)
ep_dict["tvp"][tvp_name] = tvp_obj.values
dataset.append(ep_dict)
self.reset()
return dataset
def set_value_function(self, input_ph, output_ph, tf_session):
self.control_system.controller.set_value_function(input_ph, output_ph, tf_session)
self.value_function_is_set = True
def set_learning_status(self, status):
if self.value_function_is_set:
self.control_system.controller.value_function.set_enabled(status)
if __name__ == "__main__":
env = LetMPCEnv("configs/cart_pendulum_horizon.json")
env.seed(0)
import pickle
with open("../../lmpc-horizon/datasets/cart_pendulum_10.pkl", "rb") as f:
test_set = pickle.load(f)
rews = {}
for i in range(1):
import time
obs = env.reset(**test_set[5])
done = False
t_before = time.process_time()
horizon = 10
while not done:
t_step = time.process_time()
if env.steps_count % 1 == 0 and False:
horizon = 25 if horizon == 50 else 50
obs, rew, done, info = env.step([horizon])
for rew_comp, v in info.items():
if rew_comp.startswith("reward/"):
if rew_comp not in rews:
rews[rew_comp] = []
rews[rew_comp].append(v)
if time.process_time() - t_step > 1:
print(env.control_system.controller.mpc.solver_stats)
print(env.steps_count)
for k, v in rews.items():
print("{}: {}".format(k, sum(v)))
print("Elapsed time {}".format(time.process_time() - t_before))
env.render()
| true | true |
f71b65b3b003148f57d2ed310d5f76f0d067c474 | 933 | py | Python | violas_client/canoser/bool_t.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | null | null | null | violas_client/canoser/bool_t.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | null | null | null | violas_client/canoser/bool_t.py | violas-core/violas-client | e8798f7d081ac218b78b81fd7eb2f8da92631a16 | [
"MIT"
] | 1 | 2022-01-05T06:49:42.000Z | 2022-01-05T06:49:42.000Z | from violas_client.canoser.base import Base
class BoolT(Base):
@classmethod
def encode(self, value):
if value:
return b'\1'
else:
return b'\0'
@classmethod
def decode_bytes(self, value):
if value == b'\0':
return False
elif value == b'\1':
return True
else:
raise TypeError("bool should be 0 or 1.")
@classmethod
def decode(self, cursor):
value = cursor.read_bytes(1)
return self.decode_bytes(value)
@classmethod
def from_value(cls, value):
if value:
return True
return False
@classmethod
def check_value(self, value):
if not isinstance(value, bool):
raise TypeError('value {} is not bool'.format(value))
@classmethod
def to_json_serializable(cls, value):
return value
| 23.923077 | 66 | 0.543408 | from violas_client.canoser.base import Base
class BoolT(Base):
@classmethod
def encode(self, value):
if value:
return b'\1'
else:
return b'\0'
@classmethod
def decode_bytes(self, value):
if value == b'\0':
return False
elif value == b'\1':
return True
else:
raise TypeError("bool should be 0 or 1.")
@classmethod
def decode(self, cursor):
value = cursor.read_bytes(1)
return self.decode_bytes(value)
@classmethod
def from_value(cls, value):
if value:
return True
return False
@classmethod
def check_value(self, value):
if not isinstance(value, bool):
raise TypeError('value {} is not bool'.format(value))
@classmethod
def to_json_serializable(cls, value):
return value
| true | true |
f71b6618acab7a74ff8f4e811e451717d08dc511 | 1,097 | py | Python | 4.conditionals/challenge3_rouillonh.py | rouillonh/ChallengePython | 7e7d9b69f60394fd1f00a6a4aa32f97de95b1b92 | [
"MIT"
] | null | null | null | 4.conditionals/challenge3_rouillonh.py | rouillonh/ChallengePython | 7e7d9b69f60394fd1f00a6a4aa32f97de95b1b92 | [
"MIT"
] | null | null | null | 4.conditionals/challenge3_rouillonh.py | rouillonh/ChallengePython | 7e7d9b69f60394fd1f00a6a4aa32f97de95b1b92 | [
"MIT"
] | null | null | null | print("\tWelcome to the Voter Registration App")
#Pedimos nombre y edad para asi registrar su voto
name = input("\nPlease enter your name: ").title()
age = int(input("Please enter your age: "))
partidos = ['Republican','Democratic','Independent','Libertarian','Green']
#Si es mayor de edad, podrá votar
if age >= 18:
#Dependiendo del partido que escoja, se imprimirá un mensaje
print("\nCongratulations ",name,"! You are old enough to register to vote.")
print("\nHere is a list of political parties to join.")
for i in partidos:
print("-",i)
p = input("\nWhat party would you like to join: ").capitalize()
if p in 'Republican,Democratic':
print("Congratulations ",name,"! You have joined the ",p," party!")
print("That is a major party!")
elif p == 'Independent':
print("Congratulations ",name,"! You have joined the ",p," party!")
print("You are an independent person!")
else:
print("That is not a major party")
#Si no lo es, no podrá hacerlo
elif age < 18:
print("You are not old enough to register to vote.")
| 43.88 | 80 | 0.65907 | print("\tWelcome to the Voter Registration App")
name = input("\nPlease enter your name: ").title()
age = int(input("Please enter your age: "))
partidos = ['Republican','Democratic','Independent','Libertarian','Green']
if age >= 18:
print("\nCongratulations ",name,"! You are old enough to register to vote.")
print("\nHere is a list of political parties to join.")
for i in partidos:
print("-",i)
p = input("\nWhat party would you like to join: ").capitalize()
if p in 'Republican,Democratic':
print("Congratulations ",name,"! You have joined the ",p," party!")
print("That is a major party!")
elif p == 'Independent':
print("Congratulations ",name,"! You have joined the ",p," party!")
print("You are an independent person!")
else:
print("That is not a major party")
elif age < 18:
print("You are not old enough to register to vote.")
| true | true |
f71b6745cb39d3ccd6a45e1c0ecd693cdffb6acf | 2,559 | py | Python | etc/mtrace/parse_mtrace.py | diamantopoulos/memluv | f3a283d65f07b19d48589e02ac484563e12e22e8 | [
"Apache-2.0"
] | 9 | 2015-12-16T08:05:06.000Z | 2022-02-25T08:29:30.000Z | etc/mtrace/parse_mtrace.py | diamantopoulos/memluv | f3a283d65f07b19d48589e02ac484563e12e22e8 | [
"Apache-2.0"
] | 1 | 2022-02-26T07:40:23.000Z | 2022-03-15T03:27:59.000Z | etc/mtrace/parse_mtrace.py | diamantopoulos/memluv | f3a283d65f07b19d48589e02ac484563e12e22e8 | [
"Apache-2.0"
] | null | null | null | """
Parsing a mtrace log file and append to timeline-footprint format
"""
from numpy import *
import numpy as np
import glob
import os
import linecache
import csv
# 1038 bytes is the size of a heap log file with no heap activity (only heap info)
def ValidHeapFile(fpath):
header_lines=1
with open(fpath) as f:
lines = len(list(f))
return True if os.path.isfile(fpath) and lines > header_lines else False
print ("INFO: --------------------- \nINFO: Parsing mtrace logs \nINFO: ---------------------")
mtrace_files = glob.glob("/tmp/mtrace*.txt")
mtraces=len(mtrace_files)
print ("INFO: Total mtrace logs found:", mtraces)
colours=['b','g','r','c','m','y','k']
elapsed_time=208000
#with plt.xkcd():
total_bytes_allocated=0
index=0
fout = open("/tmp/mtrace.out",'w')
lines_parsed=0
event_time=0
#Heaps log parsing
for cur_mtrace in sorted(mtrace_files):
if ValidHeapFile(cur_mtrace):
fin = open(cur_mtrace,'r')
total_lines = len(fin.readlines())
tic=elapsed_time/(total_lines-3)
print ("total_lines = ", total_lines, "tic = ", tic)
fin.close()
fin = open(cur_mtrace,'r')
for line in fin:
line = line.rstrip().split(' ')
#print ("length(line) = ", len(line), "index=", index)
if lines_parsed>=2 and lines_parsed<total_lines-1:
sign = line[2]
if sign == '+':
cur_bytes = line[4]
cur_bytes_dec = int(cur_bytes, 16)
total_bytes_allocated = total_bytes_allocated + cur_bytes_dec
#print ("INFO: Adding ", cur_bytes_dec, "bytes", "total_bytes_allocated=", total_bytes_allocated)
elif sign == '-':
total_bytes_allocated = total_bytes_allocated - cur_bytes_dec
#print ("INFO: Subtracting ", cur_bytes_dec, "bytes", "total_bytes_allocated=", total_bytes_allocated)
else:
print ("ERROR: Unknown sign", sign, "Aborting...")
__exit__
event_time=event_time+tic
fout.write(str(index)+" "+str(event_time)+" "+str(total_bytes_allocated)+"\n")
index=index+1
else:
print ("WARNING: Ignoring this line", line)
lines_parsed=lines_parsed+1
else:
print ("INFO: Current mtrace path :", cur_mtrace, "-> Skipping empty file")
fin.close()
fout.close()
| 34.581081 | 126 | 0.569754 | from numpy import *
import numpy as np
import glob
import os
import linecache
import csv
def ValidHeapFile(fpath):
header_lines=1
with open(fpath) as f:
lines = len(list(f))
return True if os.path.isfile(fpath) and lines > header_lines else False
print ("INFO: --------------------- \nINFO: Parsing mtrace logs \nINFO: ---------------------")
mtrace_files = glob.glob("/tmp/mtrace*.txt")
mtraces=len(mtrace_files)
print ("INFO: Total mtrace logs found:", mtraces)
colours=['b','g','r','c','m','y','k']
elapsed_time=208000
total_bytes_allocated=0
index=0
fout = open("/tmp/mtrace.out",'w')
lines_parsed=0
event_time=0
for cur_mtrace in sorted(mtrace_files):
if ValidHeapFile(cur_mtrace):
fin = open(cur_mtrace,'r')
total_lines = len(fin.readlines())
tic=elapsed_time/(total_lines-3)
print ("total_lines = ", total_lines, "tic = ", tic)
fin.close()
fin = open(cur_mtrace,'r')
for line in fin:
line = line.rstrip().split(' ')
if lines_parsed>=2 and lines_parsed<total_lines-1:
sign = line[2]
if sign == '+':
cur_bytes = line[4]
cur_bytes_dec = int(cur_bytes, 16)
total_bytes_allocated = total_bytes_allocated + cur_bytes_dec
elif sign == '-':
total_bytes_allocated = total_bytes_allocated - cur_bytes_dec
else:
print ("ERROR: Unknown sign", sign, "Aborting...")
__exit__
event_time=event_time+tic
fout.write(str(index)+" "+str(event_time)+" "+str(total_bytes_allocated)+"\n")
index=index+1
else:
print ("WARNING: Ignoring this line", line)
lines_parsed=lines_parsed+1
else:
print ("INFO: Current mtrace path :", cur_mtrace, "-> Skipping empty file")
fin.close()
fout.close()
| true | true |
f71b675d58f0489d8b6561c581bfe700396f87fb | 965 | py | Python | python/day12-2.py | Aerdan/adventcode-2020 | 83120aa8c7fc9d1f2d34780610401e3c6d4f583b | [
"BSD-1-Clause"
] | null | null | null | python/day12-2.py | Aerdan/adventcode-2020 | 83120aa8c7fc9d1f2d34780610401e3c6d4f583b | [
"BSD-1-Clause"
] | null | null | null | python/day12-2.py | Aerdan/adventcode-2020 | 83120aa8c7fc9d1f2d34780610401e3c6d4f583b | [
"BSD-1-Clause"
] | null | null | null | #!/usr/bin/env python3
from math import sin, cos, radians
data = []
with open('input12.txt') as f:
for line in f:
data.append(line.strip())
x, y = 10, 1
sx, sy = 0, 0
d = 'E'
c = 'NESW'
for line in data:
insn = line[0]
dist = int(line[1:])
if insn == 'F':
# move to waypoint dist times
for i in range(dist):
sx += x
sy += y
elif insn == 'N':
y += dist
elif insn == 'E':
x += dist
elif insn == 'S':
y -= dist
elif insn == 'W':
x -= dist
elif insn == 'L':
dist = radians(dist)
nx = x * cos(dist) - y * sin(dist)
ny = y * cos(dist) + x * sin(dist)
x = round(nx)
y = round(ny)
elif insn == 'R':
dist = radians(360 - dist)
nx = x * cos(dist) - y * sin(dist)
ny = y * cos(dist) + x * sin(dist)
x = round(nx)
y = round(ny)
md = abs(sx) + abs(sy)
print(sx, sy, md)
| 19.693878 | 42 | 0.449741 |
from math import sin, cos, radians
data = []
with open('input12.txt') as f:
for line in f:
data.append(line.strip())
x, y = 10, 1
sx, sy = 0, 0
d = 'E'
c = 'NESW'
for line in data:
insn = line[0]
dist = int(line[1:])
if insn == 'F':
for i in range(dist):
sx += x
sy += y
elif insn == 'N':
y += dist
elif insn == 'E':
x += dist
elif insn == 'S':
y -= dist
elif insn == 'W':
x -= dist
elif insn == 'L':
dist = radians(dist)
nx = x * cos(dist) - y * sin(dist)
ny = y * cos(dist) + x * sin(dist)
x = round(nx)
y = round(ny)
elif insn == 'R':
dist = radians(360 - dist)
nx = x * cos(dist) - y * sin(dist)
ny = y * cos(dist) + x * sin(dist)
x = round(nx)
y = round(ny)
md = abs(sx) + abs(sy)
print(sx, sy, md)
| true | true |
f71b693c8f73a9ec5102fb39ced2b8f6a4ea8b4b | 511 | py | Python | tcfcli/cmds/local/libs/local/debug_context.py | tencentyun/scfcli | ef15508ad34a851cf0d2750dfaa5202f6a600887 | [
"Apache-2.0"
] | 103 | 2019-06-11T06:09:56.000Z | 2021-12-18T22:48:59.000Z | tcfcli/cmds/local/libs/local/debug_context.py | TencentCloud/Serverless-cli | 57f98b24cfd10712770a4806212cfb69d981a11a | [
"Apache-2.0"
] | 8 | 2019-07-12T12:08:40.000Z | 2020-10-20T07:18:17.000Z | tcfcli/cmds/local/libs/local/debug_context.py | TencentCloud/Serverless-cli | 57f98b24cfd10712770a4806212cfb69d981a11a | [
"Apache-2.0"
] | 49 | 2019-06-11T06:26:05.000Z | 2020-02-19T08:13:36.000Z | # -*- coding: utf-8 -*-
import os
class DebugContext(object):
def __init__(self,
debug_port=None,
debugger_path=None,
debug_args=None):
self.debug_port = debug_port
self.debugger_path = debugger_path
self.debug_args = debug_args
if self.debug_port:
os.environ["PYTHONUNBUFFERED"] = "1"
def __bool__(self):
return bool(self.debug_port)
def __nonzero__(self):
return self.__bool__()
| 21.291667 | 48 | 0.579256 |
import os
class DebugContext(object):
def __init__(self,
debug_port=None,
debugger_path=None,
debug_args=None):
self.debug_port = debug_port
self.debugger_path = debugger_path
self.debug_args = debug_args
if self.debug_port:
os.environ["PYTHONUNBUFFERED"] = "1"
def __bool__(self):
return bool(self.debug_port)
def __nonzero__(self):
return self.__bool__()
| true | true |
f71b6aed9afed4cf56533fb2127e350f2b0dc11b | 289 | py | Python | tests/integration/test_notes.py | mhk001/python-alerta-client | 6e02f8a2245cef223df3048d445921e1ba90ad1c | [
"Apache-2.0"
] | 20 | 2017-04-14T08:05:48.000Z | 2022-01-11T06:26:17.000Z | tests/integration/test_notes.py | mhk001/python-alerta-client | 6e02f8a2245cef223df3048d445921e1ba90ad1c | [
"Apache-2.0"
] | 99 | 2016-09-30T20:53:05.000Z | 2022-03-14T10:00:59.000Z | tests/integration/test_notes.py | mhk001/python-alerta-client | 6e02f8a2245cef223df3048d445921e1ba90ad1c | [
"Apache-2.0"
] | 33 | 2016-10-04T20:44:58.000Z | 2022-03-04T21:35:49.000Z | import unittest
from alertaclient.api import Client
class AlertTestCase(unittest.TestCase):
def setUp(self):
self.client = Client(endpoint='http://api:8080', key='demo-key')
def test_notes(self):
# add tests here when /notes endpoints are created
pass
| 20.642857 | 72 | 0.681661 | import unittest
from alertaclient.api import Client
class AlertTestCase(unittest.TestCase):
def setUp(self):
self.client = Client(endpoint='http://api:8080', key='demo-key')
def test_notes(self):
pass
| true | true |
f71b6b5b67e80a03f5062113889382389fc8dc72 | 29,281 | py | Python | resource/pypi/cryptography-1.7.1/tests/hazmat/primitives/fixtures_rsa.py | hipnusleo/Laserjet | f53e0b740f48f2feb0c0bb285ec6728b313b4ccc | [
"Apache-2.0"
] | null | null | null | resource/pypi/cryptography-1.7.1/tests/hazmat/primitives/fixtures_rsa.py | hipnusleo/Laserjet | f53e0b740f48f2feb0c0bb285ec6728b313b4ccc | [
"Apache-2.0"
] | null | null | null | resource/pypi/cryptography-1.7.1/tests/hazmat/primitives/fixtures_rsa.py | hipnusleo/Laserjet | f53e0b740f48f2feb0c0bb285ec6728b313b4ccc | [
"Apache-2.0"
] | null | null | null | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateNumbers, RSAPublicNumbers
)
RSA_KEY_512 = RSAPrivateNumbers(
p=int(
"d57846898d5c0de249c08467586cb458fa9bc417cdf297f73cfc52281b787cd9", 16
),
q=int(
"d10f71229e87e010eb363db6a85fd07df72d985b73c42786191f2ce9134afb2d", 16
),
d=int(
"272869352cacf9c866c4e107acc95d4c608ca91460a93d28588d51cfccc07f449"
"18bbe7660f9f16adc2b4ed36ca310ef3d63b79bd447456e3505736a45a6ed21", 16
),
dmp1=int(
"addff2ec7564c6b64bc670d250b6f24b0b8db6b2810099813b7e7658cecf5c39", 16
),
dmq1=int(
"463ae9c6b77aedcac1397781e50e4afc060d4b216dc2778494ebe42a6850c81", 16
),
iqmp=int(
"54deef8548f65cad1d411527a32dcb8e712d3e128e4e0ff118663fae82a758f4", 16
),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ae5411f963c50e3267fafcf76381c8b1e5f7b741fdb2a544bcf48bd607b10c991"
"90caeb8011dc22cf83d921da55ec32bd05cac3ee02ca5e1dbef93952850b525",
16
),
)
)
RSA_KEY_512_ALT = RSAPrivateNumbers(
p=int(
"febe19c29a0b50fefa4f7b1832f84df1caf9be8242da25c9d689e18226e67ce5",
16),
q=int(
"eb616c639dd999feda26517e1c77b6878f363fe828c4e6670ec1787f28b1e731",
16),
d=int(
"80edecfde704a806445a4cc782b85d3f36f17558f385654ea767f006470fdfcbda5e2"
"206839289d3f419b4e4fb8e1acee1b4fb9c591f69b64ec83937f5829241", 16),
dmp1=int(
"7f4fa06e2a3077a54691cc5216bf13ad40a4b9fa3dd0ea4bca259487484baea5",
16),
dmq1=int(
"35eaa70d5a8711c352ed1c15ab27b0e3f46614d575214535ae279b166597fac1",
16),
iqmp=int(
"cc1f272de6846851ec80cb89a02dbac78f44b47bc08f53b67b4651a3acde8b19",
16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ea397388b999ef0f7e7416fa000367efd9a0ba0deddd3f8160d1c36d62267f210"
"fbd9c97abeb6654450ff03e7601b8caa6c6f4cba18f0b52c179d17e8f258ad5",
16),
)
)
RSA_KEY_522 = RSAPrivateNumbers(
p=int(
"1a8aab9a069f92b52fdf05824f2846223dc27adfc806716a247a77d4c36885e4bf",
16),
q=int(
"19e8d620d177ec54cdb733bb1915e72ef644b1202b889ceb524613efa49c07eb4f",
16),
d=int(
"10b8a7c0a92c1ae2d678097d69db3bfa966b541fb857468291d48d1b52397ea2bac0d"
"4370c159015c7219e3806a01bbafaffdd46f86e3da1e2d1fe80a0369ccd745", 16),
dmp1=int(
"3eb6277f66e6e2dcf89f1b8529431f730839dbd9a3e49555159bc8470eee886e5",
16),
dmq1=int(
"184b4d74aa54c361e51eb23fee4eae5e4786b37b11b6e0447af9c0b9c4e4953c5b",
16),
iqmp=int(
"f80e9ab4fa7b35d0d232ef51c4736d1f2dcf2c7b1dd8716211b1bf1337e74f8ae",
16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"2afaea0e0bb6fca037da7d190b5270a6c665bc18e7a456f7e69beaac4433db748"
"ba99acdd14697e453bca596eb35b47f2d48f1f85ef08ce5109dad557a9cf85ebf"
"1", 16),
),
)
RSA_KEY_599 = RSAPrivateNumbers(
p=int(
"cf95d20be0c7af69f4b3d909f65d858c26d1a7ef34da8e3977f4fa230580e58814b54"
"24be99", 16),
q=int(
"6052be4b28debd4265fe12ace5aa4a0c4eb8d63ff8853c66824b35622161eb48a3bc8"
"c3ada5", 16),
d=int(
"69d9adc465e61585d3142d7cc8dd30605e8d1cbbf31009bc2cd5538dc40528d5d68ee"
"fe6a42d23674b6ec76e192351bf368c8968f0392110bf1c2825dbcff071270b80adcc"
"fa1d19d00a1", 16),
dmp1=int(
"a86d10edde456687fba968b1f298d2e07226adb1221b2a466a93f3d83280f0bb46c20"
"2b6811", 16),
dmq1=int(
"40d570e08611e6b1da94b95d46f8e7fe80be48f7a5ff8838375b08039514a399b11c2"
"80735", 16),
iqmp=int(
"cd051cb0ea68b88765c041262ace2ec4db11dab14afd192742e34d5da3328637fabdf"
"bae26e", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"4e1b470fe00642426f3808e74c959632dd67855a4c503c5b7876ccf4dc7f6a1a4"
"9107b90d26daf0a7879a6858218345fbc6e59f01cd095ca5647c27c25265e6c47"
"4fea89537191c7073d9d", 16),
)
)
RSA_KEY_745 = RSAPrivateNumbers(
p=int(
"1c5a0cfe9a86debd19eca33ba961f15bc598aa7983a545ce775b933afc89eb51bcf90"
"836257fdd060d4b383240241d", 16
),
q=int(
"fb2634f657f82ee6b70553382c4e2ed26b947c97ce2f0016f1b282cf2998184ad0527"
"a9eead826dd95fe06b57a025", 16
),
d=int(
"402f30f976bc07d15ff0779abff127b20a8b6b1d0024cc2ad8b6762d38f174f81e792"
"3b49d80bdbdd80d9675cbc7b2793ec199a0430eb5c84604dacfdb29259ae6a1a44676"
"22f0b23d4cb0f5cb1db4b8173c8d9d3e57a74dbd200d2141", 16),
dmp1=int(
"e5e95b7751a6649f199be21bef7a51c9e49821d945b6fc5f538b4a670d8762c375b00"
"8e70f31d52b3ea2bd14c3101", 16),
dmq1=int(
"12b85d5843645f72990fcf8d2f58408b34b3a3b9d9078dd527fceb5d2fb7839008092"
"dd4aca2a1fb00542801dcef5", 16),
iqmp=int(
"5672740d947f621fc7969e3a44ec26736f3f819863d330e63e9409e139d20753551ac"
"c16544dd2bdadb9dee917440", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"1bd085f92237774d34013b477ceebbb2f2feca71118db9b7429341477947e7b1d"
"04e8c43ede3c52bb25781af58d4ff81289f301eac62dc3bcd7dafd7a4d5304e9f"
"308e766952fbf2b62373e66611fa53189987dbef9f7243dcbbeb25831", 16),
)
)
RSA_KEY_768 = RSAPrivateNumbers(
p=int(
"f80c0061b607f93206b68e208906498d68c6e396faf457150cf975c8f849848465869"
"7ecd402313397088044c4c2071b", 16),
q=int(
"e5b5dbecc93c6d306fc14e6aa9737f9be2728bc1a326a8713d2849b34c1cb54c63468"
"3a68abb1d345dbf15a3c492cf55", 16),
d=int(
"d44601442255ffa331212c60385b5e898555c75c0272632ff42d57c4b16ca97dbca9f"
"d6d99cd2c9fd298df155ed5141b4be06c651934076133331d4564d73faed7ce98e283"
"2f7ce3949bc183be7e7ca34f6dd04a9098b6c73649394b0a76c541", 16),
dmp1=int(
"a5763406fa0b65929661ce7b2b8c73220e43a5ebbfe99ff15ddf464fd238105ad4f2a"
"c83818518d70627d8908703bb03", 16),
dmq1=int(
"cb467a9ef899a39a685aecd4d0ad27b0bfdc53b68075363c373d8eb2bed8eccaf3533"
"42f4db735a9e087b7539c21ba9d", 16),
iqmp=int(
"5fe86bd3aee0c4d09ef11e0530a78a4534c9b833422813b5c934a450c8e564d8097a0"
"6fd74f1ebe2d5573782093f587a", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"de92f1eb5f4abf426b6cac9dd1e9bf57132a4988b4ed3f8aecc15e251028bd6df"
"46eb97c711624af7db15e6430894d1b640c13929329241ee094f5a4fe1a20bc9b"
"75232320a72bc567207ec54d6b48dccb19737cf63acc1021abb337f19130f7",
16),
)
)
RSA_KEY_1024 = RSAPrivateNumbers(
p=int(
"ea4d9d9a1a068be44b9a5f8f6de0512b2c5ba1fb804a4655babba688e6e890b347c1a"
"7426685a929337f513ae4256f0b7e5022d642237f960c5b24b96bee8e51", 16),
q=int(
"cffb33e400d6f08b410d69deb18a85cf0ed88fcca9f32d6f2f66c62143d49aff92c11"
"4de937d4f1f62d4635ee89af99ce86d38a2b05310f3857c7b5d586ac8f9", 16),
d=int(
"3d12d46d04ce942fb99be7bf30587b8cd3e21d75a2720e7bda1b867f1d418d91d8b9f"
"e1c00181fdde94f2faf33b4e6f800a1b3ae3b972ccb6d5079dcb6c794070ac8306d59"
"c00b58b7a9a81122a6b055832de7c72334a07494d8e7c9fbeed2cc37e011d9e6bfc6e"
"9bcddbef7f0f5771d9cf82cd4b268c97ec684575c24b6c881", 16),
dmp1=int(
"470f2b11257b7ec9ca34136f487f939e6861920ad8a9ae132a02e74af5dceaa5b4c98"
"2949ccb44b67e2bcad2f58674db237fe250e0d62b47b28fa1dfaa603b41", 16),
dmq1=int(
"c616e8317d6b3ae8272973709b80e8397256697ff14ea03389de454f619f99915a617"
"45319fefbe154ec1d49441a772c2f63f7d15c478199afc60469bfd0d561", 16),
iqmp=int(
"d15e7c9ad357dfcd5dbdc8427680daf1006761bcfba93a7f86589ad88832a8d564b1c"
"d4291a658c96fbaea7ca588795820902d85caebd49c2d731e3fe0243130", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"be5aac07456d990133ebce69c06b48845b972ab1ad9f134bc5683c6b5489b5119"
"ede07be3bed0e355d48e0dfab1e4fb5187adf42d7d3fb0401c082acb8481bf17f"
"0e871f8877be04c3a1197d40aa260e2e0c48ed3fd2b93dc3fc0867591f67f3cd6"
"0a77adee1d68a8c3730a5702485f6ac9ede7f0fd2918e037ee4cc1fc1b4c9",
16),
)
)
RSA_KEY_1025 = RSAPrivateNumbers(
p=int(
"18e9bfb7071725da04d31c103fa3563648c69def43a204989214eb57b0c8b299f9ef3"
"5dda79a62d8d67fd2a9b69fbd8d0490aa2edc1e111a2b8eb7c737bb691a5", 16),
q=int(
"d8eccaeeb95815f3079d13685f3f72ca2bf2550b349518049421375df88ca9bbb4ba8"
"cb0e3502203c9eeae174112509153445d251313e4711a102818c66fcbb7", 16),
d=int(
"fe9ac54910b8b1bc948a03511c54cab206a1d36d50d591124109a48abb7480977ccb0"
"47b4d4f1ce7b0805df2d4fa3fe425f49b78535a11f4b87a4eba0638b3340c23d4e6b2"
"1ecebe9d5364ea6ead2d47b27836019e6ecb407000a50dc95a8614c9d0031a6e3a524"
"d2345cfb76e15c1f69d5ba35bdfb6ec63bcb115a757ef79d9", 16),
dmp1=int(
"18537e81006a68ea76d590cc88e73bd26bc38d09c977959748e5265c0ce21c0b5fd26"
"53d975f97ef759b809f791487a8fff1264bf561627fb4527a3f0bbb72c85", 16),
dmq1=int(
"c807eac5a1f1e1239f04b04dd16eff9a00565127a91046fa89e1eb5d6301cace85447"
"4d1f47b0332bd35b4214b66e9166953241538f761f30d969272ee214f17", 16),
iqmp=int(
"133aa74dd41fe70fa244f07d0c4091a22f8c8f0134fe6aea9ec8b55383b758fefe358"
"2beec36eca91715eee7d21931f24fa9e97e8e3a50f9cd0f731574a5eafcc", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"151c44fed756370fb2d4a0e6ec7dcac84068ca459b6aaf22daf902dca72c77563"
"bf276fe3523f38f5ddaf3ea9aa88486a9d8760ff732489075862bee0e599de5c5"
"f509b4519f4f446521bad15cd279a498fe1e89107ce0d237e3103d7c5eb801666"
"42e2924b152aebff97b71fdd2d68ebb45034cc784e2e822ff6d1edf98af3f3",
16),
)
)
RSA_KEY_1026 = RSAPrivateNumbers(
p=int(
"1fcbfb8719c5bdb5fe3eb0937c76bb096e750b9442dfe31d6a877a13aed2a6a4e9f79"
"40f815f1c307dd6bc2b4b207bb6fe5be3a15bd2875a957492ce197cdedb1", 16),
q=int(
"1f704a0f6b8966dd52582fdc08227dd3dbaeaa781918b41144b692711091b4ca4eb62"
"985c3513853828ce8739001dfba9a9a7f1a23cbcaf74280be925e2e7b50d", 16),
d=int(
"c67975e35a1d0d0b3ebfca736262cf91990cb31cf4ac473c0c816f3bc2720bcba2475"
"e8d0de8535d257816c0fc53afc1b597eada8b229069d6ef2792fc23f59ffb4dc6c3d9"
"0a3c462082025a4cba7561296dd3d8870c4440d779406f00879afe2c681e7f5ee055e"
"ff829e6e55883ec20830c72300762e6e3a333d94b4dbe4501", 16),
dmp1=int(
"314730ca7066c55d086a9fbdf3670ef7cef816b9efea8b514b882ae9d647217cf41d7"
"e9989269dc9893d02e315cb81f058c49043c2cac47adea58bdf5e20e841", 16),
dmq1=int(
"1da28a9d687ff7cfeebc2439240de7505a8796376968c8ec723a2b669af8ce53d9c88"
"af18540bd78b2da429014923fa435f22697ac60812d7ca9c17a557f394cd", 16),
iqmp=int(
"727947b57b8a36acd85180522f1b381bce5fdbd962743b3b14af98a36771a80f58ddd"
"62675d72a5935190da9ddc6fd6d6d5e9e9f805a2e92ab8d56b820493cdf", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"3e7a5e6483e55eb8b723f9c46732d21b0af9e06a4a1099962d67a35ee3f62e312"
"9cfae6ab0446da18e26f33e1d753bc1cc03585c100cf0ab5ef056695706fc8b0c"
"9c710cd73fe6e5beda70f515a96fabd3cc5ac49efcb2594b220ff3b603fcd927f"
"6a0838ef04bf52f3ed9eab801f09e5aed1613ddeb946ed0fbb02060b3a36fd",
16),
)
)
RSA_KEY_1027 = RSAPrivateNumbers(
p=int(
"30135e54cfb072c3d3eaf2000f3ed92ceafc85efc867b9d4bf5612f2978c432040093"
"4829f741c0f002b54af2a4433ff872b6321ef00ff1e72cba4e0ced937c7d", 16),
q=int(
"1d01a8aead6f86b78c875f18edd74214e06535d65da054aeb8e1851d6f3319b4fb6d8"
"6b01e07d19f8261a1ded7dc08116345509ab9790e3f13e65c037e5bb7e27", 16),
d=int(
"21cf4477df79561c7818731da9b9c88cd793f1b4b8e175bd0bfb9c0941a4dc648ecf1"
"6d96b35166c9ea116f4c2eb33ce1c231e641a37c25e54c17027bdec08ddafcb83642e"
"795a0dd133155ccc5eed03b6e745930d9ac7cfe91f9045149f33295af03a2198c660f"
"08d8150d13ce0e2eb02f21ac75d63b55822f77bd5be8d07619", 16),
dmp1=int(
"173fb695931e845179511c18b546b265cb79b517c135902377281bdf9f34205e1f399"
"4603ad63e9f6e7885ea73a929f03fa0d6bed943051ce76cddde2d89d434d", 16),
dmq1=int(
"10956b387b2621327da0c3c8ffea2af8be967ee25163222746c28115a406e632a7f12"
"5a9397224f1fa5c116cd3a313e5c508d31db2deb83b6e082d213e33f7fcf", 16),
iqmp=int(
"234f833949f2c0d797bc6a0e906331e17394fa8fbc8449395766d3a8d222cf6167c48"
"8e7fe1fe9721d3e3b699a595c8e6f063d92bd840dbc84d763b2b37002109", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"57281707d7f9b1369c117911758980e32c05b133ac52c225bcf68b79157ff47ea"
"0a5ae9f579ef1fd7e42937f921eb3123c4a045cc47a2159fbbf904783e654954c"
"42294c30a95c15db7c7b91f136244e548f62474b137087346c5522e54f226f49d"
"6c93bc58cb39972e41bde452bb3ae9d60eb93e5e1ce91d222138d9890c7d0b",
16),
)
)
RSA_KEY_1028 = RSAPrivateNumbers(
p=int(
"359d17378fae8e9160097daee78a206bd52efe1b757c12a6da8026cc4fc4bb2620f12"
"b8254f4db6aed8228be8ee3e5a27ec7d31048602f01edb00befd209e8c75", 16),
q=int(
"33a2e70b93d397c46e63b273dcd3dcfa64291342a6ce896e1ec8f1c0edc44106550f3"
"c06e7d3ca6ea29eccf3f6ab5ac6235c265313d6ea8e8767e6a343f616581", 16),
d=int(
"880640088d331aa5c0f4cf2887809a420a2bc086e671e6ffe4e47a8c80792c038a314"
"9a8e45ef9a72816ab45b36e3af6800351067a6b2751843d4232413146bb575491463a"
"8addd06ce3d1bcf7028ec6c5d938c545a20f0a40214b5c574ca7e840062b2b5f8ed49"
"4b144bb2113677c4b10519177fee1d4f5fb8a1c159b0b47c01", 16),
dmp1=int(
"75f8c52dad2c1cea26b8bba63236ee4059489e3d2db766136098bcc6b67fde8f77cd3"
"640035107bfb1ffc6480983cfb84fe0c3be008424ebc968a7db7e01f005", 16),
dmq1=int(
"3893c59469e4ede5cd0e6ff9837ca023ba9b46ff40c60ccf1bec10f7d38db5b1ba817"
"6c41a3f750ec4203b711455aca06d1e0adffc5cffa42bb92c7cb77a6c01", 16),
iqmp=int(
"ad32aafae3c962ac25459856dc8ef1f733c3df697eced29773677f435d186cf759d1a"
"5563dd421ec47b4d7e7f12f29647c615166d9c43fc49001b29089344f65", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ad0696bef71597eb3a88e135d83c596930cac73868fbd7e6b2d64f34eea5c28cc"
"e3510c68073954d3ba4deb38643e7a820a4cf06e75f7f82eca545d412bd637819"
"45c28d406e95a6cced5ae924a8bfa4f3def3e0250d91246c269ec40c89c93a85a"
"cd3770ba4d2e774732f43abe94394de43fb57f93ca25f7a59d75d400a3eff5",
16),
)
)
RSA_KEY_1029 = RSAPrivateNumbers(
p=int(
"66f33e513c0b6b6adbf041d037d9b1f0ebf8de52812a3ac397a963d3f71ba64b3ad04"
"e4d4b5e377e6fa22febcac292c907dc8dcfe64c807fd9a7e3a698850d983", 16),
q=int(
"3b47a89a19022461dcc2d3c05b501ee76955e8ce3cf821beb4afa85a21a26fd7203db"
"deb8941f1c60ada39fd6799f6c07eb8554113f1020460ec40e93cd5f6b21", 16),
d=int(
"280c42af8b1c719821f2f6e2bf5f3dd53c81b1f3e1e7cc4fce6e2f830132da0665bde"
"bc1e307106b112b52ad5754867dddd028116cf4471bc14a58696b99524b1ad8f05b31"
"cf47256e54ab4399b6a073b2c0452441438dfddf47f3334c13c5ec86ece4d33409056"
"139328fafa992fb5f5156f25f9b21d3e1c37f156d963d97e41", 16),
dmp1=int(
"198c7402a4ec10944c50ab8488d7b5991c767e75eb2817bd427dff10335ae141fa2e8"
"7c016dc22d975cac229b9ffdf7d943ddfd3a04b8bf82e83c3b32c5698b11", 16),
dmq1=int(
"15fd30c7687b68ef7c2a30cdeb913ec56c4757c218cf9a04d995470797ee5f3a17558"
"fbb6d00af245d2631d893b382da48a72bc8a613024289895952ab245b0c1", 16),
iqmp=int(
"4f8fde17e84557a3f4e242d889e898545ab55a1a8e075c9bb0220173ccffe84659abe"
"a235104f82e32750309389d4a52af57dbb6e48d831917b6efeb190176570", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"17d6e0a09aa5b2d003e51f43b9c37ffde74688f5e3b709fd02ef375cb6b8d15e2"
"99a9f74981c3eeaaf947d5c2d64a1a80f5c5108a49a715c3f7be95a016b8d3300"
"965ead4a4df76e642d761526803e9434d4ec61b10cb50526d4dcaef02593085de"
"d8c331c1b27b200a45628403065efcb2c0a0ca1f75d648d40a007fbfbf2cae3",
16),
)
)
RSA_KEY_1030 = RSAPrivateNumbers(
p=int(
"6f4ac8a8172ef1154cf7f80b5e91de723c35a4c512860bfdbafcc3b994a2384bf7796"
"3a2dd0480c7e04d5d418629651a0de8979add6f47b23da14c27a682b69c9", 16),
q=int(
"65a9f83e07dea5b633e036a9dccfb32c46bf53c81040a19c574c3680838fc6d28bde9"
"55c0ff18b30481d4ab52a9f5e9f835459b1348bbb563ad90b15a682fadb3", 16),
d=int(
"290db707b3e1a96445ae8ea93af55a9f211a54ebe52995c2eb28085d1e3f09c986e73"
"a00010c8e4785786eaaa5c85b98444bd93b585d0c24363ccc22c482e150a3fd900176"
"86968e4fa20423ae72823b0049defceccb39bb34aa4ef64e6b14463b76d6a871c859e"
"37285455b94b8e1527d1525b1682ac6f7c8fd79d576c55318c1", 16),
dmp1=int(
"23f7fa84010225dea98297032dac5d45745a2e07976605681acfe87e0920a8ab3caf5"
"9d9602f3d63dc0584f75161fd8fff20c626c21c5e02a85282276a74628a9", 16),
dmq1=int(
"18ebb657765464a8aa44bf019a882b72a2110a77934c54915f70e6375088b10331982"
"962bce1c7edd8ef9d3d95aa2566d2a99da6ebab890b95375919408d00f33", 16),
iqmp=int(
"3d59d208743c74054151002d77dcdfc55af3d41357e89af88d7eef2767be54c290255"
"9258d85cf2a1083c035a33e65a1ca46dc8b706847c1c6434cef7b71a9dae", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"2c326574320818a6a8cb6b3328e2d6c1ba2a3f09b6eb2bc543c03ab18eb5efdaa"
"8fcdbb6b4e12168304f587999f9d96a421fc80cb933a490df85d25883e6a88750"
"d6bd8b3d4117251eee8f45e70e6daac7dbbd92a9103c623a09355cf00e3f16168"
"e38b9c4cb5b368deabbed8df466bc6835eaba959bc1c2f4ec32a09840becc8b",
16),
)
)
RSA_KEY_1031 = RSAPrivateNumbers(
p=int(
"c0958c08e50137db989fb7cc93abf1984543e2f955d4f43fb2967f40105e79274c852"
"293fa06ce63ca8436155e475ed6d1f73fea4c8e2516cc79153e3dc83e897", 16),
q=int(
"78cae354ea5d6862e5d71d20273b7cddb8cdfab25478fe865180676b04250685c4d03"
"30c216574f7876a7b12dfe69f1661d3b0cea6c2c0dcfb84050f817afc28d", 16),
d=int(
"1d55cc02b17a5d25bfb39f2bc58389004d0d7255051507f75ef347cdf5519d1a00f4b"
"d235ce4171bfab7bdb7a6dcfae1cf41433fb7da5923cc84f15a675c0b83492c95dd99"
"a9fc157aea352ffdcbb5d59dbc3662171d5838d69f130678ee27841a79ef64f679ce9"
"3821fa69c03f502244c04b737edad8967def8022a144feaab29", 16),
dmp1=int(
"5b1c2504ec3a984f86b4414342b5bcf59a0754f13adf25b2a0edbc43f5ba8c3cc061d"
"80b03e5866d059968f0d10a98deaeb4f7830436d76b22cf41f2914e13eff", 16),
dmq1=int(
"6c361e1819691ab5d67fb2a8f65c958d301cdf24d90617c68ec7005edfb4a7b638cde"
"79d4b61cfba5c86e8c0ccf296bc7f611cb8d4ae0e072a0f68552ec2d5995", 16),
iqmp=int(
"b7d61945fdc8b92e075b15554bab507fa8a18edd0a18da373ec6c766c71eece61136a"
"84b90b6d01741d40458bfad17a9bee9d4a8ed2f6e270782dc3bf5d58b56e", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"5adebaa926ea11fb635879487fdd53dcfbb391a11ac7279bb3b4877c9b811370a"
"9f73da0690581691626d8a7cf5d972cced9c2091ccf999024b23b4e6dc6d99f80"
"a454737dec0caffaebe4a3fac250ed02079267c8f39620b5ae3e125ca35338522"
"dc9353ecac19cb2fe3b9e3a9291619dbb1ea3a7c388e9ee6469fbf5fb22892b",
16),
)
)
RSA_KEY_1536 = RSAPrivateNumbers(
p=int(
"f1a65fa4e2aa6e7e2b560251e8a4cd65b625ad9f04f6571785782d1c213d91c961637"
"0c572f2783caf2899f7fb690cf99a0184257fbd4b071b212c88fb348279a5387e61f1"
"17e9c62980c45ea863fa9292087c0f66ecdcde6443d5a37268bf71", 16),
q=int(
"e54c2cbc3839b1da6ae6fea45038d986d6f523a3ae76051ba20583aab711ea5965cf5"
"3cf54128cc9573f7460bba0fd6758a57aaf240c391790fb38ab473d83ef735510c53d"
"1d10c31782e8fd7da42615e33565745c30a5e6ceb2a3ae0666cc35", 16),
d=int(
"7bcad87e23da2cb2a8c328883fabce06e1f8e9b776c8bf253ad9884e6200e3bd9bd3b"
"a2cbe87d3854527bf005ba5d878c5b0fa20cfb0a2a42884ae95ca12bf7304285e9214"
"5e992f7006c7c0ae839ad550da495b143bec0f4806c7f44caed45f3ccc6dc44cfaf30"
"7abdb757e3d28e41c2d21366835c0a41e50a95af490ac03af061d2feb36ac0afb87be"
"a13fb0f0c5a410727ebedb286c77f9469473fae27ef2c836da6071ef7efc1647f1233"
"4009a89eecb09a8287abc8c2afd1ddd9a1b0641", 16),
dmp1=int(
"a845366cd6f9df1f34861bef7594ed025aa83a12759e245f58adaa9bdff9c3befb760"
"75d3701e90038e888eec9bf092df63400152cb25fc07effc6c74c45f0654ccbde15cd"
"90dd5504298a946fa5cf22a956072da27a6602e6c6e5c97f2db9c1", 16),
dmq1=int(
"28b0c1e78cdac03310717992d321a3888830ec6829978c048156152d805b4f8919c61"
"70b5dd204e5ddf3c6c53bc6aff15d0bd09faff7f351b94abb9db980b31f150a6d7573"
"08eb66938f89a5225cb4dd817a824c89e7a0293b58fc2eefb7e259", 16),
iqmp=int(
"6c1536c0e16e42a094b6caaf50231ba81916871497d73dcbbbd4bdeb9e60cae0413b3"
"8143b5d680275b29ed7769fe5577e4f9b3647ddb064941120914526d64d80016d2eb7"
"dc362da7c569623157f3d7cff8347f11494bf5c048d77e28d3f515", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"d871bb2d27672e54fc62c4680148cbdf848438da804e2c48b5a9c9f9daf6cc6e8"
"ea7d2296f25064537a9a542aef3dd449ea75774238d4da02c353d1bee70013dcc"
"c248ceef4050160705c188043c8559bf6dbfb6c4bb382eda4e9547575a8227d5b"
"3c0a7088391364cf9f018d8bea053b226ec65e8cdbeaf48a071d0074860a734b1"
"cb7d2146d43014b20776dea42f7853a54690e6cbbf3331a9f43763cfe2a51c329"
"3bea3b2eebec0d8e43eb317a443afe541107d886e5243c096091543ae65", 16),
)
)
RSA_KEY_2048 = RSAPrivateNumbers(
p=int(
"e14202e58c5f7446648d75e5dc465781f661f6b73000c080368afcfb21377f4ef19da"
"845d4ef9bc6b151f6d9f34629103f2e57615f9ba0a3a2fbb035069e1d63b4bb0e78ad"
"dad1ec3c6f87e25c877a1c4c1972098e09158ef7b9bc163852a18d44a70b7b31a03dc"
"2614fd9ab7bf002cba79054544af3bfbdb6aed06c7b24e6ab", 16),
q=int(
"dbe2bea1ff92599bd19f9d045d6ce62250c05cfeac5117f3cf3e626cb696e3d886379"
"557d5a57b7476f9cf886accfd40508a805fe3b45a78e1a8a125e516cda91640ee6398"
"ec5a39d3e6b177ef12ab00d07907a17640e4ca454fd8487da3c4ffa0d5c2a5edb1221"
"1c8e33c7ee9fa6753771fd111ec04b8317f86693eb2928c89", 16),
d=int(
"aef17f80f2653bc30539f26dd4c82ed6abc1d1b53bc0abcdbee47e9a8ab433abde865"
"9fcfae1244d22de6ad333c95aee7d47f30b6815065ac3322744d3ea75058002cd1b29"
"3141ee2a6dc682342432707080071bd2131d6262cab07871c28aa5238b87173fb78c3"
"7f9c7bcd18c12e8971bb77fd9fa3e0792fec18d8d9bed0b03ba02b263606f24dbace1"
"c8263ce2802a769a090e993fd49abc50c3d3c78c29bee2de0c98055d2f102f1c5684b"
"8dddee611d5205392d8e8dd61a15bf44680972a87f040a611a149271eeb2573f8bf6f"
"627dfa70e77def2ee6584914fa0290e041349ea0999cdff3e493365885b906cbcf195"
"843345809a85098cca90fea014a21", 16),
dmp1=int(
"9ba56522ffcfa5244eae805c87cc0303461f82be29691b9a7c15a5a050df6c143c575"
"7c288d3d7ab7f32c782e9d9fcddc10a604e6425c0e5d0e46069035d95a923646d276d"
"d9d95b8696fa29ab0de18e53f6f119310f8dd9efca62f0679291166fed8cbd5f18fe1"
"3a5f1ead1d71d8c90f40382818c18c8d069be793dbc094f69", 16),
dmq1=int(
"a8d4a0aaa2212ccc875796a81353da1fdf00d46676c88d2b96a4bfcdd924622d8e607"
"f3ac1c01dda7ebfb0a97dd7875c2a7b2db6728fb827b89c519f5716fb3228f4121647"
"04b30253c17de2289e9cce3343baa82eb404f789e094a094577a9b0c5314f1725fdf5"
"8e87611ad20da331bd30b8aebc7dc97d0e9a9ba8579772c9", 16),
iqmp=int(
"17bd5ef638c49440d1853acb3fa63a5aca28cb7f94ed350db7001c8445da8943866a7"
"0936e1ee2716c98b484e357cc054d82fbbd98d42f880695d38a1dd4eb096f629b9417"
"aca47e6de5da9f34e60e8a0ffd7e35be74deeef67298d94b3e0db73fc4b7a4cb360c8"
"9d2117a0bfd9434d37dc7c027d6b01e5295c875015510917d", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"c17afc7e77474caa5aa83036158a3ffbf7b5216851ba2230e5d6abfcc1c6cfef5"
"9e923ea1330bc593b73802ab608a6e4a3306523a3116ba5aa3966145174e13b6c"
"49e9b78062e449d72efb10fd49e91fa08b96d051e782e9f5abc5b5a6f7984827a"
"db8e73da00f22b2efdcdb76eab46edad98ed65662743fdc6c0e336a5d0cdbaa7d"
"c29e53635e24c87a5b2c4215968063cdeb68a972babbc1e3cff00fb9a80e372a4"
"d0c2c920d1e8cee333ce470dc2e8145adb05bf29aee1d24f141e8cc784989c587"
"fc6fbacd979f3f2163c1d7299b365bc72ffe2848e967aed1e48dcc515b3a50ed4"
"de04fd053846ca10a223b10cc841cc80fdebee44f3114c13e886af583", 16),
)
)
RSA_KEY_2048_ALT = RSAPrivateNumbers(
d=int(
"7522768467449591813737881904131688860626637897199391200040629"
"8641018746450502628484395471408986929218353894683769457466923"
"3079369551423094451013669595729568593462009746342148367797495"
"5529909313614750246672441810743580455199636293179539903480635"
"3091286716112931976896334411287175213124504134181121011488550"
"5290054443979198998564749640800633368957384058700741073997703"
"8877364695937023906368630297588990131009278072614118207348356"
"4640244134189285070202534488517371577359510236833464698189075"
"5160693085297816063285814039518178249628112908466649245545732"
"5791532385553960363601827996980725025898649392004494256400884"
"092073"
),
dmp1=int(
"5847872614112935747739644055317429405973942336206460017493394"
"9737607778799766591021036792892472774720417920838206576785118"
"8889624058962939702950175807073343659386156232294197300491647"
"1029508414050591959344812347424476498076532682798598325230069"
"0925827594762920534235575029199380552228825468180187156871965"
"973"
),
dmq1=int(
"2949536259161239302081155875068405238857801001054083407704879"
"8210876832264504685327766351157044892283801611558399025326793"
"4131638001934454489864437565651739832511702151461257267169691"
"6611992398459006200708626815153304591390855807749769768978152"
"9854112656599931724820610358669306523835327459478374630794532"
"167"
),
iqmp=int(
"7331180989818931535458916053540252830484856703208982675535284"
"4613815808798190559315018094080936347757336989616401164752221"
"8101156529898067044923499386460167055405998646366011838018441"
"3678947694258190172377716154009305082091341215866326061721180"
"3836418654472188816187630316821692982783286322262994892003058"
"782"
),
p=int(
"1460007723851883695617573533155574746587863843382715314919865"
"2434108956187429726002840717317310431378483921058946835896252"
"7109559207437158778332364464259678946305487699031865937075508"
"8616612925453842458055546540240601585731206561647892336916583"
"0023641764106581040198845259766246869529221084602380669333021"
"0819"
),
q=int(
"1433897765867889178402883410610177836503402597775250087462018"
"4617952933433119527945447840336616357136736935069377619782227"
"2822380830300262175671282877680573202309319960687756231128996"
"9764855320953993690199846269451095044922353809602378616938811"
"7513900906279873343591486841303392490561500301994171338761080"
"4439"
),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"209350181338107812610165420955871971489973659392253291327"
"839812910252466502190690572476688311285621239204212139711"
"207388949164851984253143698667018532039612470954223918242"
"145976986600705122576087630525229796950722166468064721258"
"490916138706756006902066136471049807637157890128560592039"
"941717275079733754782848729566190631725183735944031456237"
"089928120178187552521649483240599003240074352860189285952"
"078970127554801074176375499583703254849309993132931268013"
"715070507278514207864914944621214574162116786377990456375"
"964817771730371110612100247262908550409785456157505694419"
"00451152778245269283276012328748538414051025541"
)
)
)
| 48.478477 | 80 | 0.765787 |
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateNumbers, RSAPublicNumbers
)
RSA_KEY_512 = RSAPrivateNumbers(
p=int(
"d57846898d5c0de249c08467586cb458fa9bc417cdf297f73cfc52281b787cd9", 16
),
q=int(
"d10f71229e87e010eb363db6a85fd07df72d985b73c42786191f2ce9134afb2d", 16
),
d=int(
"272869352cacf9c866c4e107acc95d4c608ca91460a93d28588d51cfccc07f449"
"18bbe7660f9f16adc2b4ed36ca310ef3d63b79bd447456e3505736a45a6ed21", 16
),
dmp1=int(
"addff2ec7564c6b64bc670d250b6f24b0b8db6b2810099813b7e7658cecf5c39", 16
),
dmq1=int(
"463ae9c6b77aedcac1397781e50e4afc060d4b216dc2778494ebe42a6850c81", 16
),
iqmp=int(
"54deef8548f65cad1d411527a32dcb8e712d3e128e4e0ff118663fae82a758f4", 16
),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ae5411f963c50e3267fafcf76381c8b1e5f7b741fdb2a544bcf48bd607b10c991"
"90caeb8011dc22cf83d921da55ec32bd05cac3ee02ca5e1dbef93952850b525",
16
),
)
)
RSA_KEY_512_ALT = RSAPrivateNumbers(
p=int(
"febe19c29a0b50fefa4f7b1832f84df1caf9be8242da25c9d689e18226e67ce5",
16),
q=int(
"eb616c639dd999feda26517e1c77b6878f363fe828c4e6670ec1787f28b1e731",
16),
d=int(
"80edecfde704a806445a4cc782b85d3f36f17558f385654ea767f006470fdfcbda5e2"
"206839289d3f419b4e4fb8e1acee1b4fb9c591f69b64ec83937f5829241", 16),
dmp1=int(
"7f4fa06e2a3077a54691cc5216bf13ad40a4b9fa3dd0ea4bca259487484baea5",
16),
dmq1=int(
"35eaa70d5a8711c352ed1c15ab27b0e3f46614d575214535ae279b166597fac1",
16),
iqmp=int(
"cc1f272de6846851ec80cb89a02dbac78f44b47bc08f53b67b4651a3acde8b19",
16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ea397388b999ef0f7e7416fa000367efd9a0ba0deddd3f8160d1c36d62267f210"
"fbd9c97abeb6654450ff03e7601b8caa6c6f4cba18f0b52c179d17e8f258ad5",
16),
)
)
RSA_KEY_522 = RSAPrivateNumbers(
p=int(
"1a8aab9a069f92b52fdf05824f2846223dc27adfc806716a247a77d4c36885e4bf",
16),
q=int(
"19e8d620d177ec54cdb733bb1915e72ef644b1202b889ceb524613efa49c07eb4f",
16),
d=int(
"10b8a7c0a92c1ae2d678097d69db3bfa966b541fb857468291d48d1b52397ea2bac0d"
"4370c159015c7219e3806a01bbafaffdd46f86e3da1e2d1fe80a0369ccd745", 16),
dmp1=int(
"3eb6277f66e6e2dcf89f1b8529431f730839dbd9a3e49555159bc8470eee886e5",
16),
dmq1=int(
"184b4d74aa54c361e51eb23fee4eae5e4786b37b11b6e0447af9c0b9c4e4953c5b",
16),
iqmp=int(
"f80e9ab4fa7b35d0d232ef51c4736d1f2dcf2c7b1dd8716211b1bf1337e74f8ae",
16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"2afaea0e0bb6fca037da7d190b5270a6c665bc18e7a456f7e69beaac4433db748"
"ba99acdd14697e453bca596eb35b47f2d48f1f85ef08ce5109dad557a9cf85ebf"
"1", 16),
),
)
RSA_KEY_599 = RSAPrivateNumbers(
p=int(
"cf95d20be0c7af69f4b3d909f65d858c26d1a7ef34da8e3977f4fa230580e58814b54"
"24be99", 16),
q=int(
"6052be4b28debd4265fe12ace5aa4a0c4eb8d63ff8853c66824b35622161eb48a3bc8"
"c3ada5", 16),
d=int(
"69d9adc465e61585d3142d7cc8dd30605e8d1cbbf31009bc2cd5538dc40528d5d68ee"
"fe6a42d23674b6ec76e192351bf368c8968f0392110bf1c2825dbcff071270b80adcc"
"fa1d19d00a1", 16),
dmp1=int(
"a86d10edde456687fba968b1f298d2e07226adb1221b2a466a93f3d83280f0bb46c20"
"2b6811", 16),
dmq1=int(
"40d570e08611e6b1da94b95d46f8e7fe80be48f7a5ff8838375b08039514a399b11c2"
"80735", 16),
iqmp=int(
"cd051cb0ea68b88765c041262ace2ec4db11dab14afd192742e34d5da3328637fabdf"
"bae26e", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"4e1b470fe00642426f3808e74c959632dd67855a4c503c5b7876ccf4dc7f6a1a4"
"9107b90d26daf0a7879a6858218345fbc6e59f01cd095ca5647c27c25265e6c47"
"4fea89537191c7073d9d", 16),
)
)
RSA_KEY_745 = RSAPrivateNumbers(
p=int(
"1c5a0cfe9a86debd19eca33ba961f15bc598aa7983a545ce775b933afc89eb51bcf90"
"836257fdd060d4b383240241d", 16
),
q=int(
"fb2634f657f82ee6b70553382c4e2ed26b947c97ce2f0016f1b282cf2998184ad0527"
"a9eead826dd95fe06b57a025", 16
),
d=int(
"402f30f976bc07d15ff0779abff127b20a8b6b1d0024cc2ad8b6762d38f174f81e792"
"3b49d80bdbdd80d9675cbc7b2793ec199a0430eb5c84604dacfdb29259ae6a1a44676"
"22f0b23d4cb0f5cb1db4b8173c8d9d3e57a74dbd200d2141", 16),
dmp1=int(
"e5e95b7751a6649f199be21bef7a51c9e49821d945b6fc5f538b4a670d8762c375b00"
"8e70f31d52b3ea2bd14c3101", 16),
dmq1=int(
"12b85d5843645f72990fcf8d2f58408b34b3a3b9d9078dd527fceb5d2fb7839008092"
"dd4aca2a1fb00542801dcef5", 16),
iqmp=int(
"5672740d947f621fc7969e3a44ec26736f3f819863d330e63e9409e139d20753551ac"
"c16544dd2bdadb9dee917440", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"1bd085f92237774d34013b477ceebbb2f2feca71118db9b7429341477947e7b1d"
"04e8c43ede3c52bb25781af58d4ff81289f301eac62dc3bcd7dafd7a4d5304e9f"
"308e766952fbf2b62373e66611fa53189987dbef9f7243dcbbeb25831", 16),
)
)
RSA_KEY_768 = RSAPrivateNumbers(
p=int(
"f80c0061b607f93206b68e208906498d68c6e396faf457150cf975c8f849848465869"
"7ecd402313397088044c4c2071b", 16),
q=int(
"e5b5dbecc93c6d306fc14e6aa9737f9be2728bc1a326a8713d2849b34c1cb54c63468"
"3a68abb1d345dbf15a3c492cf55", 16),
d=int(
"d44601442255ffa331212c60385b5e898555c75c0272632ff42d57c4b16ca97dbca9f"
"d6d99cd2c9fd298df155ed5141b4be06c651934076133331d4564d73faed7ce98e283"
"2f7ce3949bc183be7e7ca34f6dd04a9098b6c73649394b0a76c541", 16),
dmp1=int(
"a5763406fa0b65929661ce7b2b8c73220e43a5ebbfe99ff15ddf464fd238105ad4f2a"
"c83818518d70627d8908703bb03", 16),
dmq1=int(
"cb467a9ef899a39a685aecd4d0ad27b0bfdc53b68075363c373d8eb2bed8eccaf3533"
"42f4db735a9e087b7539c21ba9d", 16),
iqmp=int(
"5fe86bd3aee0c4d09ef11e0530a78a4534c9b833422813b5c934a450c8e564d8097a0"
"6fd74f1ebe2d5573782093f587a", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"de92f1eb5f4abf426b6cac9dd1e9bf57132a4988b4ed3f8aecc15e251028bd6df"
"46eb97c711624af7db15e6430894d1b640c13929329241ee094f5a4fe1a20bc9b"
"75232320a72bc567207ec54d6b48dccb19737cf63acc1021abb337f19130f7",
16),
)
)
RSA_KEY_1024 = RSAPrivateNumbers(
p=int(
"ea4d9d9a1a068be44b9a5f8f6de0512b2c5ba1fb804a4655babba688e6e890b347c1a"
"7426685a929337f513ae4256f0b7e5022d642237f960c5b24b96bee8e51", 16),
q=int(
"cffb33e400d6f08b410d69deb18a85cf0ed88fcca9f32d6f2f66c62143d49aff92c11"
"4de937d4f1f62d4635ee89af99ce86d38a2b05310f3857c7b5d586ac8f9", 16),
d=int(
"3d12d46d04ce942fb99be7bf30587b8cd3e21d75a2720e7bda1b867f1d418d91d8b9f"
"e1c00181fdde94f2faf33b4e6f800a1b3ae3b972ccb6d5079dcb6c794070ac8306d59"
"c00b58b7a9a81122a6b055832de7c72334a07494d8e7c9fbeed2cc37e011d9e6bfc6e"
"9bcddbef7f0f5771d9cf82cd4b268c97ec684575c24b6c881", 16),
dmp1=int(
"470f2b11257b7ec9ca34136f487f939e6861920ad8a9ae132a02e74af5dceaa5b4c98"
"2949ccb44b67e2bcad2f58674db237fe250e0d62b47b28fa1dfaa603b41", 16),
dmq1=int(
"c616e8317d6b3ae8272973709b80e8397256697ff14ea03389de454f619f99915a617"
"45319fefbe154ec1d49441a772c2f63f7d15c478199afc60469bfd0d561", 16),
iqmp=int(
"d15e7c9ad357dfcd5dbdc8427680daf1006761bcfba93a7f86589ad88832a8d564b1c"
"d4291a658c96fbaea7ca588795820902d85caebd49c2d731e3fe0243130", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"be5aac07456d990133ebce69c06b48845b972ab1ad9f134bc5683c6b5489b5119"
"ede07be3bed0e355d48e0dfab1e4fb5187adf42d7d3fb0401c082acb8481bf17f"
"0e871f8877be04c3a1197d40aa260e2e0c48ed3fd2b93dc3fc0867591f67f3cd6"
"0a77adee1d68a8c3730a5702485f6ac9ede7f0fd2918e037ee4cc1fc1b4c9",
16),
)
)
RSA_KEY_1025 = RSAPrivateNumbers(
p=int(
"18e9bfb7071725da04d31c103fa3563648c69def43a204989214eb57b0c8b299f9ef3"
"5dda79a62d8d67fd2a9b69fbd8d0490aa2edc1e111a2b8eb7c737bb691a5", 16),
q=int(
"d8eccaeeb95815f3079d13685f3f72ca2bf2550b349518049421375df88ca9bbb4ba8"
"cb0e3502203c9eeae174112509153445d251313e4711a102818c66fcbb7", 16),
d=int(
"fe9ac54910b8b1bc948a03511c54cab206a1d36d50d591124109a48abb7480977ccb0"
"47b4d4f1ce7b0805df2d4fa3fe425f49b78535a11f4b87a4eba0638b3340c23d4e6b2"
"1ecebe9d5364ea6ead2d47b27836019e6ecb407000a50dc95a8614c9d0031a6e3a524"
"d2345cfb76e15c1f69d5ba35bdfb6ec63bcb115a757ef79d9", 16),
dmp1=int(
"18537e81006a68ea76d590cc88e73bd26bc38d09c977959748e5265c0ce21c0b5fd26"
"53d975f97ef759b809f791487a8fff1264bf561627fb4527a3f0bbb72c85", 16),
dmq1=int(
"c807eac5a1f1e1239f04b04dd16eff9a00565127a91046fa89e1eb5d6301cace85447"
"4d1f47b0332bd35b4214b66e9166953241538f761f30d969272ee214f17", 16),
iqmp=int(
"133aa74dd41fe70fa244f07d0c4091a22f8c8f0134fe6aea9ec8b55383b758fefe358"
"2beec36eca91715eee7d21931f24fa9e97e8e3a50f9cd0f731574a5eafcc", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"151c44fed756370fb2d4a0e6ec7dcac84068ca459b6aaf22daf902dca72c77563"
"bf276fe3523f38f5ddaf3ea9aa88486a9d8760ff732489075862bee0e599de5c5"
"f509b4519f4f446521bad15cd279a498fe1e89107ce0d237e3103d7c5eb801666"
"42e2924b152aebff97b71fdd2d68ebb45034cc784e2e822ff6d1edf98af3f3",
16),
)
)
RSA_KEY_1026 = RSAPrivateNumbers(
p=int(
"1fcbfb8719c5bdb5fe3eb0937c76bb096e750b9442dfe31d6a877a13aed2a6a4e9f79"
"40f815f1c307dd6bc2b4b207bb6fe5be3a15bd2875a957492ce197cdedb1", 16),
q=int(
"1f704a0f6b8966dd52582fdc08227dd3dbaeaa781918b41144b692711091b4ca4eb62"
"985c3513853828ce8739001dfba9a9a7f1a23cbcaf74280be925e2e7b50d", 16),
d=int(
"c67975e35a1d0d0b3ebfca736262cf91990cb31cf4ac473c0c816f3bc2720bcba2475"
"e8d0de8535d257816c0fc53afc1b597eada8b229069d6ef2792fc23f59ffb4dc6c3d9"
"0a3c462082025a4cba7561296dd3d8870c4440d779406f00879afe2c681e7f5ee055e"
"ff829e6e55883ec20830c72300762e6e3a333d94b4dbe4501", 16),
dmp1=int(
"314730ca7066c55d086a9fbdf3670ef7cef816b9efea8b514b882ae9d647217cf41d7"
"e9989269dc9893d02e315cb81f058c49043c2cac47adea58bdf5e20e841", 16),
dmq1=int(
"1da28a9d687ff7cfeebc2439240de7505a8796376968c8ec723a2b669af8ce53d9c88"
"af18540bd78b2da429014923fa435f22697ac60812d7ca9c17a557f394cd", 16),
iqmp=int(
"727947b57b8a36acd85180522f1b381bce5fdbd962743b3b14af98a36771a80f58ddd"
"62675d72a5935190da9ddc6fd6d6d5e9e9f805a2e92ab8d56b820493cdf", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"3e7a5e6483e55eb8b723f9c46732d21b0af9e06a4a1099962d67a35ee3f62e312"
"9cfae6ab0446da18e26f33e1d753bc1cc03585c100cf0ab5ef056695706fc8b0c"
"9c710cd73fe6e5beda70f515a96fabd3cc5ac49efcb2594b220ff3b603fcd927f"
"6a0838ef04bf52f3ed9eab801f09e5aed1613ddeb946ed0fbb02060b3a36fd",
16),
)
)
RSA_KEY_1027 = RSAPrivateNumbers(
p=int(
"30135e54cfb072c3d3eaf2000f3ed92ceafc85efc867b9d4bf5612f2978c432040093"
"4829f741c0f002b54af2a4433ff872b6321ef00ff1e72cba4e0ced937c7d", 16),
q=int(
"1d01a8aead6f86b78c875f18edd74214e06535d65da054aeb8e1851d6f3319b4fb6d8"
"6b01e07d19f8261a1ded7dc08116345509ab9790e3f13e65c037e5bb7e27", 16),
d=int(
"21cf4477df79561c7818731da9b9c88cd793f1b4b8e175bd0bfb9c0941a4dc648ecf1"
"6d96b35166c9ea116f4c2eb33ce1c231e641a37c25e54c17027bdec08ddafcb83642e"
"795a0dd133155ccc5eed03b6e745930d9ac7cfe91f9045149f33295af03a2198c660f"
"08d8150d13ce0e2eb02f21ac75d63b55822f77bd5be8d07619", 16),
dmp1=int(
"173fb695931e845179511c18b546b265cb79b517c135902377281bdf9f34205e1f399"
"4603ad63e9f6e7885ea73a929f03fa0d6bed943051ce76cddde2d89d434d", 16),
dmq1=int(
"10956b387b2621327da0c3c8ffea2af8be967ee25163222746c28115a406e632a7f12"
"5a9397224f1fa5c116cd3a313e5c508d31db2deb83b6e082d213e33f7fcf", 16),
iqmp=int(
"234f833949f2c0d797bc6a0e906331e17394fa8fbc8449395766d3a8d222cf6167c48"
"8e7fe1fe9721d3e3b699a595c8e6f063d92bd840dbc84d763b2b37002109", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"57281707d7f9b1369c117911758980e32c05b133ac52c225bcf68b79157ff47ea"
"0a5ae9f579ef1fd7e42937f921eb3123c4a045cc47a2159fbbf904783e654954c"
"42294c30a95c15db7c7b91f136244e548f62474b137087346c5522e54f226f49d"
"6c93bc58cb39972e41bde452bb3ae9d60eb93e5e1ce91d222138d9890c7d0b",
16),
)
)
RSA_KEY_1028 = RSAPrivateNumbers(
p=int(
"359d17378fae8e9160097daee78a206bd52efe1b757c12a6da8026cc4fc4bb2620f12"
"b8254f4db6aed8228be8ee3e5a27ec7d31048602f01edb00befd209e8c75", 16),
q=int(
"33a2e70b93d397c46e63b273dcd3dcfa64291342a6ce896e1ec8f1c0edc44106550f3"
"c06e7d3ca6ea29eccf3f6ab5ac6235c265313d6ea8e8767e6a343f616581", 16),
d=int(
"880640088d331aa5c0f4cf2887809a420a2bc086e671e6ffe4e47a8c80792c038a314"
"9a8e45ef9a72816ab45b36e3af6800351067a6b2751843d4232413146bb575491463a"
"8addd06ce3d1bcf7028ec6c5d938c545a20f0a40214b5c574ca7e840062b2b5f8ed49"
"4b144bb2113677c4b10519177fee1d4f5fb8a1c159b0b47c01", 16),
dmp1=int(
"75f8c52dad2c1cea26b8bba63236ee4059489e3d2db766136098bcc6b67fde8f77cd3"
"640035107bfb1ffc6480983cfb84fe0c3be008424ebc968a7db7e01f005", 16),
dmq1=int(
"3893c59469e4ede5cd0e6ff9837ca023ba9b46ff40c60ccf1bec10f7d38db5b1ba817"
"6c41a3f750ec4203b711455aca06d1e0adffc5cffa42bb92c7cb77a6c01", 16),
iqmp=int(
"ad32aafae3c962ac25459856dc8ef1f733c3df697eced29773677f435d186cf759d1a"
"5563dd421ec47b4d7e7f12f29647c615166d9c43fc49001b29089344f65", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"ad0696bef71597eb3a88e135d83c596930cac73868fbd7e6b2d64f34eea5c28cc"
"e3510c68073954d3ba4deb38643e7a820a4cf06e75f7f82eca545d412bd637819"
"45c28d406e95a6cced5ae924a8bfa4f3def3e0250d91246c269ec40c89c93a85a"
"cd3770ba4d2e774732f43abe94394de43fb57f93ca25f7a59d75d400a3eff5",
16),
)
)
RSA_KEY_1029 = RSAPrivateNumbers(
p=int(
"66f33e513c0b6b6adbf041d037d9b1f0ebf8de52812a3ac397a963d3f71ba64b3ad04"
"e4d4b5e377e6fa22febcac292c907dc8dcfe64c807fd9a7e3a698850d983", 16),
q=int(
"3b47a89a19022461dcc2d3c05b501ee76955e8ce3cf821beb4afa85a21a26fd7203db"
"deb8941f1c60ada39fd6799f6c07eb8554113f1020460ec40e93cd5f6b21", 16),
d=int(
"280c42af8b1c719821f2f6e2bf5f3dd53c81b1f3e1e7cc4fce6e2f830132da0665bde"
"bc1e307106b112b52ad5754867dddd028116cf4471bc14a58696b99524b1ad8f05b31"
"cf47256e54ab4399b6a073b2c0452441438dfddf47f3334c13c5ec86ece4d33409056"
"139328fafa992fb5f5156f25f9b21d3e1c37f156d963d97e41", 16),
dmp1=int(
"198c7402a4ec10944c50ab8488d7b5991c767e75eb2817bd427dff10335ae141fa2e8"
"7c016dc22d975cac229b9ffdf7d943ddfd3a04b8bf82e83c3b32c5698b11", 16),
dmq1=int(
"15fd30c7687b68ef7c2a30cdeb913ec56c4757c218cf9a04d995470797ee5f3a17558"
"fbb6d00af245d2631d893b382da48a72bc8a613024289895952ab245b0c1", 16),
iqmp=int(
"4f8fde17e84557a3f4e242d889e898545ab55a1a8e075c9bb0220173ccffe84659abe"
"a235104f82e32750309389d4a52af57dbb6e48d831917b6efeb190176570", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"17d6e0a09aa5b2d003e51f43b9c37ffde74688f5e3b709fd02ef375cb6b8d15e2"
"99a9f74981c3eeaaf947d5c2d64a1a80f5c5108a49a715c3f7be95a016b8d3300"
"965ead4a4df76e642d761526803e9434d4ec61b10cb50526d4dcaef02593085de"
"d8c331c1b27b200a45628403065efcb2c0a0ca1f75d648d40a007fbfbf2cae3",
16),
)
)
RSA_KEY_1030 = RSAPrivateNumbers(
p=int(
"6f4ac8a8172ef1154cf7f80b5e91de723c35a4c512860bfdbafcc3b994a2384bf7796"
"3a2dd0480c7e04d5d418629651a0de8979add6f47b23da14c27a682b69c9", 16),
q=int(
"65a9f83e07dea5b633e036a9dccfb32c46bf53c81040a19c574c3680838fc6d28bde9"
"55c0ff18b30481d4ab52a9f5e9f835459b1348bbb563ad90b15a682fadb3", 16),
d=int(
"290db707b3e1a96445ae8ea93af55a9f211a54ebe52995c2eb28085d1e3f09c986e73"
"a00010c8e4785786eaaa5c85b98444bd93b585d0c24363ccc22c482e150a3fd900176"
"86968e4fa20423ae72823b0049defceccb39bb34aa4ef64e6b14463b76d6a871c859e"
"37285455b94b8e1527d1525b1682ac6f7c8fd79d576c55318c1", 16),
dmp1=int(
"23f7fa84010225dea98297032dac5d45745a2e07976605681acfe87e0920a8ab3caf5"
"9d9602f3d63dc0584f75161fd8fff20c626c21c5e02a85282276a74628a9", 16),
dmq1=int(
"18ebb657765464a8aa44bf019a882b72a2110a77934c54915f70e6375088b10331982"
"962bce1c7edd8ef9d3d95aa2566d2a99da6ebab890b95375919408d00f33", 16),
iqmp=int(
"3d59d208743c74054151002d77dcdfc55af3d41357e89af88d7eef2767be54c290255"
"9258d85cf2a1083c035a33e65a1ca46dc8b706847c1c6434cef7b71a9dae", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"2c326574320818a6a8cb6b3328e2d6c1ba2a3f09b6eb2bc543c03ab18eb5efdaa"
"8fcdbb6b4e12168304f587999f9d96a421fc80cb933a490df85d25883e6a88750"
"d6bd8b3d4117251eee8f45e70e6daac7dbbd92a9103c623a09355cf00e3f16168"
"e38b9c4cb5b368deabbed8df466bc6835eaba959bc1c2f4ec32a09840becc8b",
16),
)
)
RSA_KEY_1031 = RSAPrivateNumbers(
p=int(
"c0958c08e50137db989fb7cc93abf1984543e2f955d4f43fb2967f40105e79274c852"
"293fa06ce63ca8436155e475ed6d1f73fea4c8e2516cc79153e3dc83e897", 16),
q=int(
"78cae354ea5d6862e5d71d20273b7cddb8cdfab25478fe865180676b04250685c4d03"
"30c216574f7876a7b12dfe69f1661d3b0cea6c2c0dcfb84050f817afc28d", 16),
d=int(
"1d55cc02b17a5d25bfb39f2bc58389004d0d7255051507f75ef347cdf5519d1a00f4b"
"d235ce4171bfab7bdb7a6dcfae1cf41433fb7da5923cc84f15a675c0b83492c95dd99"
"a9fc157aea352ffdcbb5d59dbc3662171d5838d69f130678ee27841a79ef64f679ce9"
"3821fa69c03f502244c04b737edad8967def8022a144feaab29", 16),
dmp1=int(
"5b1c2504ec3a984f86b4414342b5bcf59a0754f13adf25b2a0edbc43f5ba8c3cc061d"
"80b03e5866d059968f0d10a98deaeb4f7830436d76b22cf41f2914e13eff", 16),
dmq1=int(
"6c361e1819691ab5d67fb2a8f65c958d301cdf24d90617c68ec7005edfb4a7b638cde"
"79d4b61cfba5c86e8c0ccf296bc7f611cb8d4ae0e072a0f68552ec2d5995", 16),
iqmp=int(
"b7d61945fdc8b92e075b15554bab507fa8a18edd0a18da373ec6c766c71eece61136a"
"84b90b6d01741d40458bfad17a9bee9d4a8ed2f6e270782dc3bf5d58b56e", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"5adebaa926ea11fb635879487fdd53dcfbb391a11ac7279bb3b4877c9b811370a"
"9f73da0690581691626d8a7cf5d972cced9c2091ccf999024b23b4e6dc6d99f80"
"a454737dec0caffaebe4a3fac250ed02079267c8f39620b5ae3e125ca35338522"
"dc9353ecac19cb2fe3b9e3a9291619dbb1ea3a7c388e9ee6469fbf5fb22892b",
16),
)
)
RSA_KEY_1536 = RSAPrivateNumbers(
p=int(
"f1a65fa4e2aa6e7e2b560251e8a4cd65b625ad9f04f6571785782d1c213d91c961637"
"0c572f2783caf2899f7fb690cf99a0184257fbd4b071b212c88fb348279a5387e61f1"
"17e9c62980c45ea863fa9292087c0f66ecdcde6443d5a37268bf71", 16),
q=int(
"e54c2cbc3839b1da6ae6fea45038d986d6f523a3ae76051ba20583aab711ea5965cf5"
"3cf54128cc9573f7460bba0fd6758a57aaf240c391790fb38ab473d83ef735510c53d"
"1d10c31782e8fd7da42615e33565745c30a5e6ceb2a3ae0666cc35", 16),
d=int(
"7bcad87e23da2cb2a8c328883fabce06e1f8e9b776c8bf253ad9884e6200e3bd9bd3b"
"a2cbe87d3854527bf005ba5d878c5b0fa20cfb0a2a42884ae95ca12bf7304285e9214"
"5e992f7006c7c0ae839ad550da495b143bec0f4806c7f44caed45f3ccc6dc44cfaf30"
"7abdb757e3d28e41c2d21366835c0a41e50a95af490ac03af061d2feb36ac0afb87be"
"a13fb0f0c5a410727ebedb286c77f9469473fae27ef2c836da6071ef7efc1647f1233"
"4009a89eecb09a8287abc8c2afd1ddd9a1b0641", 16),
dmp1=int(
"a845366cd6f9df1f34861bef7594ed025aa83a12759e245f58adaa9bdff9c3befb760"
"75d3701e90038e888eec9bf092df63400152cb25fc07effc6c74c45f0654ccbde15cd"
"90dd5504298a946fa5cf22a956072da27a6602e6c6e5c97f2db9c1", 16),
dmq1=int(
"28b0c1e78cdac03310717992d321a3888830ec6829978c048156152d805b4f8919c61"
"70b5dd204e5ddf3c6c53bc6aff15d0bd09faff7f351b94abb9db980b31f150a6d7573"
"08eb66938f89a5225cb4dd817a824c89e7a0293b58fc2eefb7e259", 16),
iqmp=int(
"6c1536c0e16e42a094b6caaf50231ba81916871497d73dcbbbd4bdeb9e60cae0413b3"
"8143b5d680275b29ed7769fe5577e4f9b3647ddb064941120914526d64d80016d2eb7"
"dc362da7c569623157f3d7cff8347f11494bf5c048d77e28d3f515", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"d871bb2d27672e54fc62c4680148cbdf848438da804e2c48b5a9c9f9daf6cc6e8"
"ea7d2296f25064537a9a542aef3dd449ea75774238d4da02c353d1bee70013dcc"
"c248ceef4050160705c188043c8559bf6dbfb6c4bb382eda4e9547575a8227d5b"
"3c0a7088391364cf9f018d8bea053b226ec65e8cdbeaf48a071d0074860a734b1"
"cb7d2146d43014b20776dea42f7853a54690e6cbbf3331a9f43763cfe2a51c329"
"3bea3b2eebec0d8e43eb317a443afe541107d886e5243c096091543ae65", 16),
)
)
RSA_KEY_2048 = RSAPrivateNumbers(
p=int(
"e14202e58c5f7446648d75e5dc465781f661f6b73000c080368afcfb21377f4ef19da"
"845d4ef9bc6b151f6d9f34629103f2e57615f9ba0a3a2fbb035069e1d63b4bb0e78ad"
"dad1ec3c6f87e25c877a1c4c1972098e09158ef7b9bc163852a18d44a70b7b31a03dc"
"2614fd9ab7bf002cba79054544af3bfbdb6aed06c7b24e6ab", 16),
q=int(
"dbe2bea1ff92599bd19f9d045d6ce62250c05cfeac5117f3cf3e626cb696e3d886379"
"557d5a57b7476f9cf886accfd40508a805fe3b45a78e1a8a125e516cda91640ee6398"
"ec5a39d3e6b177ef12ab00d07907a17640e4ca454fd8487da3c4ffa0d5c2a5edb1221"
"1c8e33c7ee9fa6753771fd111ec04b8317f86693eb2928c89", 16),
d=int(
"aef17f80f2653bc30539f26dd4c82ed6abc1d1b53bc0abcdbee47e9a8ab433abde865"
"9fcfae1244d22de6ad333c95aee7d47f30b6815065ac3322744d3ea75058002cd1b29"
"3141ee2a6dc682342432707080071bd2131d6262cab07871c28aa5238b87173fb78c3"
"7f9c7bcd18c12e8971bb77fd9fa3e0792fec18d8d9bed0b03ba02b263606f24dbace1"
"c8263ce2802a769a090e993fd49abc50c3d3c78c29bee2de0c98055d2f102f1c5684b"
"8dddee611d5205392d8e8dd61a15bf44680972a87f040a611a149271eeb2573f8bf6f"
"627dfa70e77def2ee6584914fa0290e041349ea0999cdff3e493365885b906cbcf195"
"843345809a85098cca90fea014a21", 16),
dmp1=int(
"9ba56522ffcfa5244eae805c87cc0303461f82be29691b9a7c15a5a050df6c143c575"
"7c288d3d7ab7f32c782e9d9fcddc10a604e6425c0e5d0e46069035d95a923646d276d"
"d9d95b8696fa29ab0de18e53f6f119310f8dd9efca62f0679291166fed8cbd5f18fe1"
"3a5f1ead1d71d8c90f40382818c18c8d069be793dbc094f69", 16),
dmq1=int(
"a8d4a0aaa2212ccc875796a81353da1fdf00d46676c88d2b96a4bfcdd924622d8e607"
"f3ac1c01dda7ebfb0a97dd7875c2a7b2db6728fb827b89c519f5716fb3228f4121647"
"04b30253c17de2289e9cce3343baa82eb404f789e094a094577a9b0c5314f1725fdf5"
"8e87611ad20da331bd30b8aebc7dc97d0e9a9ba8579772c9", 16),
iqmp=int(
"17bd5ef638c49440d1853acb3fa63a5aca28cb7f94ed350db7001c8445da8943866a7"
"0936e1ee2716c98b484e357cc054d82fbbd98d42f880695d38a1dd4eb096f629b9417"
"aca47e6de5da9f34e60e8a0ffd7e35be74deeef67298d94b3e0db73fc4b7a4cb360c8"
"9d2117a0bfd9434d37dc7c027d6b01e5295c875015510917d", 16),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"c17afc7e77474caa5aa83036158a3ffbf7b5216851ba2230e5d6abfcc1c6cfef5"
"9e923ea1330bc593b73802ab608a6e4a3306523a3116ba5aa3966145174e13b6c"
"49e9b78062e449d72efb10fd49e91fa08b96d051e782e9f5abc5b5a6f7984827a"
"db8e73da00f22b2efdcdb76eab46edad98ed65662743fdc6c0e336a5d0cdbaa7d"
"c29e53635e24c87a5b2c4215968063cdeb68a972babbc1e3cff00fb9a80e372a4"
"d0c2c920d1e8cee333ce470dc2e8145adb05bf29aee1d24f141e8cc784989c587"
"fc6fbacd979f3f2163c1d7299b365bc72ffe2848e967aed1e48dcc515b3a50ed4"
"de04fd053846ca10a223b10cc841cc80fdebee44f3114c13e886af583", 16),
)
)
RSA_KEY_2048_ALT = RSAPrivateNumbers(
d=int(
"7522768467449591813737881904131688860626637897199391200040629"
"8641018746450502628484395471408986929218353894683769457466923"
"3079369551423094451013669595729568593462009746342148367797495"
"5529909313614750246672441810743580455199636293179539903480635"
"3091286716112931976896334411287175213124504134181121011488550"
"5290054443979198998564749640800633368957384058700741073997703"
"8877364695937023906368630297588990131009278072614118207348356"
"4640244134189285070202534488517371577359510236833464698189075"
"5160693085297816063285814039518178249628112908466649245545732"
"5791532385553960363601827996980725025898649392004494256400884"
"092073"
),
dmp1=int(
"5847872614112935747739644055317429405973942336206460017493394"
"9737607778799766591021036792892472774720417920838206576785118"
"8889624058962939702950175807073343659386156232294197300491647"
"1029508414050591959344812347424476498076532682798598325230069"
"0925827594762920534235575029199380552228825468180187156871965"
"973"
),
dmq1=int(
"2949536259161239302081155875068405238857801001054083407704879"
"8210876832264504685327766351157044892283801611558399025326793"
"4131638001934454489864437565651739832511702151461257267169691"
"6611992398459006200708626815153304591390855807749769768978152"
"9854112656599931724820610358669306523835327459478374630794532"
"167"
),
iqmp=int(
"7331180989818931535458916053540252830484856703208982675535284"
"4613815808798190559315018094080936347757336989616401164752221"
"8101156529898067044923499386460167055405998646366011838018441"
"3678947694258190172377716154009305082091341215866326061721180"
"3836418654472188816187630316821692982783286322262994892003058"
"782"
),
p=int(
"1460007723851883695617573533155574746587863843382715314919865"
"2434108956187429726002840717317310431378483921058946835896252"
"7109559207437158778332364464259678946305487699031865937075508"
"8616612925453842458055546540240601585731206561647892336916583"
"0023641764106581040198845259766246869529221084602380669333021"
"0819"
),
q=int(
"1433897765867889178402883410610177836503402597775250087462018"
"4617952933433119527945447840336616357136736935069377619782227"
"2822380830300262175671282877680573202309319960687756231128996"
"9764855320953993690199846269451095044922353809602378616938811"
"7513900906279873343591486841303392490561500301994171338761080"
"4439"
),
public_numbers=RSAPublicNumbers(
e=65537,
n=int(
"209350181338107812610165420955871971489973659392253291327"
"839812910252466502190690572476688311285621239204212139711"
"207388949164851984253143698667018532039612470954223918242"
"145976986600705122576087630525229796950722166468064721258"
"490916138706756006902066136471049807637157890128560592039"
"941717275079733754782848729566190631725183735944031456237"
"089928120178187552521649483240599003240074352860189285952"
"078970127554801074176375499583703254849309993132931268013"
"715070507278514207864914944621214574162116786377990456375"
"964817771730371110612100247262908550409785456157505694419"
"00451152778245269283276012328748538414051025541"
)
)
)
| true | true |
f71b6b65aa6aa47c57fda3ac6483ee6b1a2be140 | 239 | py | Python | more-python-for-beginners/03 - Classes/basic_class.py | CloudBreadPaPa/c9-python-getting-started | c49580be5e7e88a480d05596a7a53c89d0be7dd3 | [
"MIT"
] | null | null | null | more-python-for-beginners/03 - Classes/basic_class.py | CloudBreadPaPa/c9-python-getting-started | c49580be5e7e88a480d05596a7a53c89d0be7dd3 | [
"MIT"
] | null | null | null | more-python-for-beginners/03 - Classes/basic_class.py | CloudBreadPaPa/c9-python-getting-started | c49580be5e7e88a480d05596a7a53c89d0be7dd3 | [
"MIT"
] | 1 | 2021-09-12T15:34:13.000Z | 2021-09-12T15:34:13.000Z | class Presenter():
def __init__(self, name):
# 생성자(Constructor)
self.name = name
def say_hello(self):
# 메서드(method)
print('Hello, ' + self.name)
presenter = Presenter('Chris')
presenter.name = 'Christopher'
presenter.say_hello() | 21.727273 | 30 | 0.698745 | class Presenter():
def __init__(self, name):
self.name = name
def say_hello(self):
print('Hello, ' + self.name)
presenter = Presenter('Chris')
presenter.name = 'Christopher'
presenter.say_hello() | true | true |
f71b6c3e8f25504c53f9b02239b585cd06f3f509 | 1,080 | py | Python | posts/views.py | hamzabell/hackernews_mvp | 54beff25f6d23f42b39a13dfe0c289768faa4c3d | [
"MIT"
] | null | null | null | posts/views.py | hamzabell/hackernews_mvp | 54beff25f6d23f42b39a13dfe0c289768faa4c3d | [
"MIT"
] | null | null | null | posts/views.py | hamzabell/hackernews_mvp | 54beff25f6d23f42b39a13dfe0c289768faa4c3d | [
"MIT"
] | null | null | null | from django.core.checks import messages
from rest_framework import generics
from rest_framework.response import Response
from posts.models import Post
from .serializers import PostSerializer, UpVoteSerializer
class PostList(generics.ListCreateAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class UpVoteAPIView(generics.GenericAPIView):
serializer_class = UpVoteSerializer
def post(self, request, format=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
post_id = serializer.data['post_id']
post= Post.objects.filter(pk=post_id).first()
if post:
post.upvotes_count += 1
post.save()
return Response({
'message': 'Post has been sucessfully upvoted'
})
return Response({
"message": "Post does not exist"
})
| 26.341463 | 62 | 0.682407 | from django.core.checks import messages
from rest_framework import generics
from rest_framework.response import Response
from posts.models import Post
from .serializers import PostSerializer, UpVoteSerializer
class PostList(generics.ListCreateAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class PostDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
class UpVoteAPIView(generics.GenericAPIView):
serializer_class = UpVoteSerializer
def post(self, request, format=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
post_id = serializer.data['post_id']
post= Post.objects.filter(pk=post_id).first()
if post:
post.upvotes_count += 1
post.save()
return Response({
'message': 'Post has been sucessfully upvoted'
})
return Response({
"message": "Post does not exist"
})
| true | true |
f71b6d9c04fe09d52e0af50a82d2a1e90ad0f9f1 | 8,352 | py | Python | estimagic/tests/differentiation/test_derivatives.py | vishalbelsare/estimagic | afae1be3a1566056d11962c495b67e64bc4a0822 | [
"BSD-3-Clause"
] | null | null | null | estimagic/tests/differentiation/test_derivatives.py | vishalbelsare/estimagic | afae1be3a1566056d11962c495b67e64bc4a0822 | [
"BSD-3-Clause"
] | null | null | null | estimagic/tests/differentiation/test_derivatives.py | vishalbelsare/estimagic | afae1be3a1566056d11962c495b67e64bc4a0822 | [
"BSD-3-Clause"
] | null | null | null | from functools import partial
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from pandas.testing import assert_frame_equal
from scipy.optimize._numdiff import approx_derivative
from estimagic.differentiation.derivatives import _consolidate_one_step_derivatives
from estimagic.differentiation.derivatives import _convert_evaluation_data_to_frame
from estimagic.differentiation.derivatives import (
_convert_richardson_candidates_to_frame,
)
from estimagic.differentiation.derivatives import _nan_skipping_batch_evaluator
from estimagic.differentiation.derivatives import _select_minimizer_along_axis
from estimagic.differentiation.derivatives import first_derivative
from estimagic.examples.numdiff_functions import logit_loglike
from estimagic.examples.numdiff_functions import logit_loglike_gradient
from estimagic.examples.numdiff_functions import logit_loglikeobs
from estimagic.examples.numdiff_functions import logit_loglikeobs_jacobian
from estimagic.utilities import namedtuple_from_kwargs
@pytest.fixture
def binary_choice_inputs():
fix_path = Path(__file__).resolve().parent / "binary_choice_inputs.pickle"
inputs = pd.read_pickle(fix_path)
return inputs
methods = ["forward", "backward", "central"]
@pytest.mark.parametrize("method", methods)
def test_first_derivative_jacobian(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
base_steps=None,
lower_bounds=np.full(fix["params_np"].shape, -np.inf),
upper_bounds=np.full(fix["params_np"].shape, np.inf),
min_steps=1e-8,
step_ratio=2.0,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
def test_first_derivative_jacobian_works_at_defaults(binary_choice_inputs):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(func=func, params=fix["params_np"], n_cores=1)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_gradient(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglike, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglike_gradient(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=4)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar(method):
def f(x):
return x ** 2
calculated = first_derivative(f, 3.0, n_cores=1)
expected = 6.0
assert calculated["derivative"] == expected
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar_with_return_func_value(method):
def f(x):
return x ** 2
calculated = first_derivative(
f, 3.0, return_func_value=True, return_info=False, n_cores=1
)
expected = {"derivative": 6.0, "func_value": 9.0}
assert calculated == expected
def test_nan_skipping_batch_evaluator():
arglist = [np.nan, np.ones(2), np.array([3, 4]), np.nan, np.array([1, 2])]
expected = [
np.full(2, np.nan),
np.ones(2),
np.array([9, 16]),
np.full(2, np.nan),
np.array([1, 4]),
]
calculated = _nan_skipping_batch_evaluator(
func=lambda x: x ** 2,
arguments=arglist,
n_cores=1,
error_handling="continue",
batch_evaluator="joblib",
)
for arr_calc, arr_exp in zip(calculated, expected):
if np.isnan(arr_exp).all():
assert np.isnan(arr_calc).all()
else:
aaae(arr_calc, arr_exp)
def test_consolidate_one_step_derivatives():
forward = np.ones((1, 4, 3))
forward[:, :, 0] = np.nan
backward = np.zeros_like(forward)
calculated = _consolidate_one_step_derivatives(
{"forward": forward, "backward": backward}, ["forward", "backward"]
)
expected = np.array([[0, 1, 1]] * 4)
aaae(calculated, expected)
@pytest.fixture()
def example_function_gradient_fixtures():
def f(x):
"""f:R^3 -> R"""
x1, x2, x3 = x[0], x[1], x[2]
y1 = np.sin(x1) + np.cos(x2) + x3 - x3
return y1
def fprime(x):
"""Gradient(f)(x):R^3 -> R^3"""
x1, x2, x3 = x[0], x[1], x[2]
grad = np.array([np.cos(x1), -np.sin(x2), x3 - x3])
return grad
return {"func": f, "func_prime": fprime}
@pytest.fixture()
def example_function_jacobian_fixtures():
def f(x):
"""f:R^3 -> R^2"""
x1, x2, x3 = x[0], x[1], x[2]
y1, y2 = np.sin(x1) + np.cos(x2), np.exp(x3)
return np.array([y1, y2])
def fprime(x):
"""Jacobian(f)(x):R^3 -> R^(2x3)"""
x1, x2, x3 = x[0], x[1], x[2]
jac = np.array([[np.cos(x1), -np.sin(x2), 0], [0, 0, np.exp(x3)]])
return jac
return {"func": f, "func_prime": fprime}
def test_first_derivative_gradient_richardson(example_function_gradient_fixtures):
f = example_function_gradient_fixtures["func"]
fprime = example_function_gradient_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_first_derivative_jacobian_richardson(example_function_jacobian_fixtures):
f = example_function_jacobian_fixtures["func"]
fprime = example_function_jacobian_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_convert_evaluation_data_to_frame():
arr = np.arange(4).reshape(2, 2)
arr2 = arr.reshape(2, 1, 2)
steps = namedtuple_from_kwargs(pos=arr, neg=-arr)
evals = namedtuple_from_kwargs(pos=arr2, neg=-arr2)
expected = [
[1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 1],
[1, 1, 0, 0, 2, 2],
[1, 1, 1, 0, 3, 3],
[-1, 0, 0, 0, 0, 0],
[-1, 0, 1, 0, 1, -1],
[-1, 1, 0, 0, 2, -2],
[-1, 1, 1, 0, 3, -3],
]
expected = pd.DataFrame(
expected, columns=["sign", "step_number", "dim_x", "dim_f", "step", "eval"]
)
got = _convert_evaluation_data_to_frame(steps, evals)
assert_frame_equal(expected, got.reset_index(), check_dtype=False)
def test__convert_richardson_candidates_to_frame():
jac = {
"forward1": np.array([[0, 1], [2, 3]]),
"forward2": np.array([[0.5, 1], [2, 3]]),
}
err = {
"forward1": np.array([[0, 0], [0, 1]]),
"forward2": np.array([[1, 0], [0, 0]]),
}
expected = [
["forward", 1, 0, 0, 0, 0],
["forward", 1, 1, 0, 1, 0],
["forward", 1, 0, 1, 2, 0],
["forward", 1, 1, 1, 3, 1],
["forward", 2, 0, 0, 0.5, 1],
["forward", 2, 1, 0, 1, 0],
["forward", 2, 0, 1, 2, 0],
["forward", 2, 1, 1, 3, 0],
]
expected = pd.DataFrame(
expected, columns=["method", "num_term", "dim_x", "dim_f", "der", "err"]
)
expected = expected.set_index(["method", "num_term", "dim_x", "dim_f"])
got = _convert_richardson_candidates_to_frame(jac, err)
assert_frame_equal(got, expected, check_dtype=False)
def test__select_minimizer_along_axis():
der = np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]])
err = np.array([[[0, 1], [1, 0]], [[1, 0], [0, 1]]])
expected = (np.array([[0, 5], [6, 3]]), np.array([[0, 0], [0, 0]]))
got = _select_minimizer_along_axis(der, err)
aaae(expected, got)
| 32.498054 | 88 | 0.639727 | from functools import partial
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal as aaae
from pandas.testing import assert_frame_equal
from scipy.optimize._numdiff import approx_derivative
from estimagic.differentiation.derivatives import _consolidate_one_step_derivatives
from estimagic.differentiation.derivatives import _convert_evaluation_data_to_frame
from estimagic.differentiation.derivatives import (
_convert_richardson_candidates_to_frame,
)
from estimagic.differentiation.derivatives import _nan_skipping_batch_evaluator
from estimagic.differentiation.derivatives import _select_minimizer_along_axis
from estimagic.differentiation.derivatives import first_derivative
from estimagic.examples.numdiff_functions import logit_loglike
from estimagic.examples.numdiff_functions import logit_loglike_gradient
from estimagic.examples.numdiff_functions import logit_loglikeobs
from estimagic.examples.numdiff_functions import logit_loglikeobs_jacobian
from estimagic.utilities import namedtuple_from_kwargs
@pytest.fixture
def binary_choice_inputs():
fix_path = Path(__file__).resolve().parent / "binary_choice_inputs.pickle"
inputs = pd.read_pickle(fix_path)
return inputs
methods = ["forward", "backward", "central"]
@pytest.mark.parametrize("method", methods)
def test_first_derivative_jacobian(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
base_steps=None,
lower_bounds=np.full(fix["params_np"].shape, -np.inf),
upper_bounds=np.full(fix["params_np"].shape, np.inf),
min_steps=1e-8,
step_ratio=2.0,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
def test_first_derivative_jacobian_works_at_defaults(binary_choice_inputs):
fix = binary_choice_inputs
func = partial(logit_loglikeobs, y=fix["y"], x=fix["x"])
calculated = first_derivative(func=func, params=fix["params_np"], n_cores=1)
expected = logit_loglikeobs_jacobian(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=6)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_gradient(binary_choice_inputs, method):
fix = binary_choice_inputs
func = partial(logit_loglike, y=fix["y"], x=fix["x"])
calculated = first_derivative(
func=func,
method=method,
params=fix["params_np"],
n_steps=1,
f0=func(fix["params_np"]),
n_cores=1,
)
expected = logit_loglike_gradient(fix["params_np"], fix["y"], fix["x"])
aaae(calculated["derivative"], expected, decimal=4)
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar(method):
def f(x):
return x ** 2
calculated = first_derivative(f, 3.0, n_cores=1)
expected = 6.0
assert calculated["derivative"] == expected
@pytest.mark.parametrize("method", methods)
def test_first_derivative_scalar_with_return_func_value(method):
def f(x):
return x ** 2
calculated = first_derivative(
f, 3.0, return_func_value=True, return_info=False, n_cores=1
)
expected = {"derivative": 6.0, "func_value": 9.0}
assert calculated == expected
def test_nan_skipping_batch_evaluator():
arglist = [np.nan, np.ones(2), np.array([3, 4]), np.nan, np.array([1, 2])]
expected = [
np.full(2, np.nan),
np.ones(2),
np.array([9, 16]),
np.full(2, np.nan),
np.array([1, 4]),
]
calculated = _nan_skipping_batch_evaluator(
func=lambda x: x ** 2,
arguments=arglist,
n_cores=1,
error_handling="continue",
batch_evaluator="joblib",
)
for arr_calc, arr_exp in zip(calculated, expected):
if np.isnan(arr_exp).all():
assert np.isnan(arr_calc).all()
else:
aaae(arr_calc, arr_exp)
def test_consolidate_one_step_derivatives():
forward = np.ones((1, 4, 3))
forward[:, :, 0] = np.nan
backward = np.zeros_like(forward)
calculated = _consolidate_one_step_derivatives(
{"forward": forward, "backward": backward}, ["forward", "backward"]
)
expected = np.array([[0, 1, 1]] * 4)
aaae(calculated, expected)
@pytest.fixture()
def example_function_gradient_fixtures():
def f(x):
x1, x2, x3 = x[0], x[1], x[2]
y1 = np.sin(x1) + np.cos(x2) + x3 - x3
return y1
def fprime(x):
x1, x2, x3 = x[0], x[1], x[2]
grad = np.array([np.cos(x1), -np.sin(x2), x3 - x3])
return grad
return {"func": f, "func_prime": fprime}
@pytest.fixture()
def example_function_jacobian_fixtures():
def f(x):
x1, x2, x3 = x[0], x[1], x[2]
y1, y2 = np.sin(x1) + np.cos(x2), np.exp(x3)
return np.array([y1, y2])
def fprime(x):
x1, x2, x3 = x[0], x[1], x[2]
jac = np.array([[np.cos(x1), -np.sin(x2), 0], [0, 0, np.exp(x3)]])
return jac
return {"func": f, "func_prime": fprime}
def test_first_derivative_gradient_richardson(example_function_gradient_fixtures):
f = example_function_gradient_fixtures["func"]
fprime = example_function_gradient_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_first_derivative_jacobian_richardson(example_function_jacobian_fixtures):
f = example_function_jacobian_fixtures["func"]
fprime = example_function_jacobian_fixtures["func_prime"]
true_fprime = fprime(np.ones(3))
scipy_fprime = approx_derivative(f, np.ones(3))
our_fprime = first_derivative(f, np.ones(3), n_steps=3, method="central", n_cores=1)
aaae(scipy_fprime, our_fprime["derivative"])
aaae(true_fprime, our_fprime["derivative"])
def test_convert_evaluation_data_to_frame():
arr = np.arange(4).reshape(2, 2)
arr2 = arr.reshape(2, 1, 2)
steps = namedtuple_from_kwargs(pos=arr, neg=-arr)
evals = namedtuple_from_kwargs(pos=arr2, neg=-arr2)
expected = [
[1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 1],
[1, 1, 0, 0, 2, 2],
[1, 1, 1, 0, 3, 3],
[-1, 0, 0, 0, 0, 0],
[-1, 0, 1, 0, 1, -1],
[-1, 1, 0, 0, 2, -2],
[-1, 1, 1, 0, 3, -3],
]
expected = pd.DataFrame(
expected, columns=["sign", "step_number", "dim_x", "dim_f", "step", "eval"]
)
got = _convert_evaluation_data_to_frame(steps, evals)
assert_frame_equal(expected, got.reset_index(), check_dtype=False)
def test__convert_richardson_candidates_to_frame():
jac = {
"forward1": np.array([[0, 1], [2, 3]]),
"forward2": np.array([[0.5, 1], [2, 3]]),
}
err = {
"forward1": np.array([[0, 0], [0, 1]]),
"forward2": np.array([[1, 0], [0, 0]]),
}
expected = [
["forward", 1, 0, 0, 0, 0],
["forward", 1, 1, 0, 1, 0],
["forward", 1, 0, 1, 2, 0],
["forward", 1, 1, 1, 3, 1],
["forward", 2, 0, 0, 0.5, 1],
["forward", 2, 1, 0, 1, 0],
["forward", 2, 0, 1, 2, 0],
["forward", 2, 1, 1, 3, 0],
]
expected = pd.DataFrame(
expected, columns=["method", "num_term", "dim_x", "dim_f", "der", "err"]
)
expected = expected.set_index(["method", "num_term", "dim_x", "dim_f"])
got = _convert_richardson_candidates_to_frame(jac, err)
assert_frame_equal(got, expected, check_dtype=False)
def test__select_minimizer_along_axis():
der = np.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]])
err = np.array([[[0, 1], [1, 0]], [[1, 0], [0, 1]]])
expected = (np.array([[0, 5], [6, 3]]), np.array([[0, 0], [0, 0]]))
got = _select_minimizer_along_axis(der, err)
aaae(expected, got)
| true | true |
f71b6dab14627d4699e80c6acbce3ef420b0a543 | 35 | py | Python | ciphey/basemods/Searchers/__init__.py | paramint/ciphey | 26195dfe1f216c3d43d07b50279b64eb026f0c13 | [
"MIT"
] | 1 | 2021-05-30T19:55:00.000Z | 2021-05-30T19:55:00.000Z | ciphey/basemods/Searchers/__init__.py | usama7628674/Ciphey | e18801c506e93e7e9377d0bbc6870ecd84ae2f61 | [
"MIT"
] | 4 | 2020-11-13T19:01:56.000Z | 2022-02-10T02:14:00.000Z | ciphey/basemods/Searchers/__init__.py | usama7628674/Ciphey | e18801c506e93e7e9377d0bbc6870ecd84ae2f61 | [
"MIT"
] | null | null | null | from . import ausearch, perfection
| 17.5 | 34 | 0.8 | from . import ausearch, perfection
| true | true |
f71b6e4295ed13a2ac4d43cdf95ee46cabd50a60 | 18,334 | py | Python | python/helpers/pycharm/teamcity/pytest_plugin.py | janchochol/intellij-community | fce543ac6018b411e519fe01ddc71a8c1bbd138b | [
"Apache-2.0"
] | null | null | null | python/helpers/pycharm/teamcity/pytest_plugin.py | janchochol/intellij-community | fce543ac6018b411e519fe01ddc71a8c1bbd138b | [
"Apache-2.0"
] | null | null | null | python/helpers/pycharm/teamcity/pytest_plugin.py | janchochol/intellij-community | fce543ac6018b411e519fe01ddc71a8c1bbd138b | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
"""
Aaron Buchanan
Nov. 2012
Plug-in for py.test for reporting to TeamCity server
Report results to TeamCity during test execution for immediate reporting
when using TeamCity.
This should be installed as a py.test plugin and will be automatically enabled by running
tests under TeamCity build.
"""
import os
import pprint
import sys
import re
import traceback
from datetime import timedelta
from teamcity.messages import TeamcityServiceMessages
from teamcity.common import convert_error_to_string, dump_test_stderr, dump_test_stdout
from teamcity import is_running_under_teamcity
from teamcity import diff_tools
diff_tools.patch_unittest_diff()
def unformat_pytest_explanation(s):
"""
Undo _pytest.assertion.util.format_explanation
"""
return s.replace("\\n", "\n")
def fetch_diff_error_from_message(err_message, swap_diff):
line_with_diff = None
diff_error_message = None
lines = err_message.split("\n")
if err_message.startswith("AssertionError: assert"):
# Everything in one line
line_with_diff = lines[0][len("AssertionError: assert "):]
elif len(err_message.split("\n")) > 1:
err_line = lines[1]
line_with_diff = err_line[len("assert "):]
diff_error_message = lines[0]
if line_with_diff and line_with_diff.count("==") == 1:
parts = [x.strip() for x in line_with_diff.split("==")]
parts = [s[1:-1] if s.startswith("'") or s.startswith('"') else s for s in parts]
# Pytest cuts too long lines, no need to check is_too_big
expected, actual = parts[1], parts[0]
if swap_diff:
expected, actual = actual, expected
expected = unformat_pytest_explanation(expected)
actual = unformat_pytest_explanation(actual)
return diff_tools.EqualsAssertionError(expected, actual, diff_error_message)
else:
return None
def _is_bool_supported():
"""
Type "bool" is not supported before 2.9
"""
try:
from pytest import __version__
from distutils import version
return version.LooseVersion(str(__version__)) >= version.LooseVersion("2.9")
except ImportError:
return False
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting", "reporting", after="general")
group._addoption('--teamcity', action="count",
dest="teamcity", default=0, help="force output of JetBrains TeamCity service messages")
group._addoption('--no-teamcity', action="count",
dest="no_teamcity", default=0, help="disable output of JetBrains TeamCity service messages")
kwargs = {"help": "skip output of passed tests for JetBrains TeamCity service messages"}
if _is_bool_supported():
kwargs.update({"type": "bool"})
parser.addini("skippassedoutput", **kwargs)
parser.addini("swapdiff", **kwargs)
def pytest_configure(config):
if config.option.no_teamcity >= 1:
enabled = False
elif config.option.teamcity >= 1:
enabled = True
else:
enabled = is_running_under_teamcity()
if enabled:
output_capture_enabled = getattr(config.option, 'capture', 'fd') != 'no'
coverage_controller = _get_coverage_controller(config)
skip_passed_output = bool(config.getini('skippassedoutput'))
config.option.verbose = 2 # don't truncate assert explanations
config._teamcityReporting = EchoTeamCityMessages(
output_capture_enabled,
coverage_controller,
skip_passed_output,
bool(config.getini('swapdiff'))
)
config.pluginmanager.register(config._teamcityReporting)
def pytest_unconfigure(config):
teamcity_reporting = getattr(config, '_teamcityReporting', None)
if teamcity_reporting:
del config._teamcityReporting
config.pluginmanager.unregister(teamcity_reporting)
def _get_coverage_controller(config):
cov_plugin = config.pluginmanager.getplugin('_cov')
if not cov_plugin:
return None
return cov_plugin.cov_controller
class EchoTeamCityMessages(object):
def __init__(self, output_capture_enabled, coverage_controller, skip_passed_output, swap_diff):
self.coverage_controller = coverage_controller
self.output_capture_enabled = output_capture_enabled
self.skip_passed_output = skip_passed_output
self.teamcity = TeamcityServiceMessages()
self.test_start_reported_mark = set()
self.max_reported_output_size = 1 * 1024 * 1024
self.reported_output_chunk_size = 50000
self.swap_diff = swap_diff
def get_id_from_location(self, location):
if type(location) is not tuple or len(location) != 3 or not hasattr(location[2], "startswith"):
return None
def convert_file_to_id(filename):
filename = re.sub(r"\.pyc?$", "", filename)
return filename.replace(os.sep, ".").replace("/", ".")
def add_prefix_to_filename_id(filename_id, prefix):
dot_location = filename_id.rfind('.')
if dot_location <= 0 or dot_location >= len(filename_id) - 1:
return None
return filename_id[:dot_location + 1] + prefix + filename_id[dot_location + 1:]
pylint_prefix = '[pylint] '
if location[2].startswith(pylint_prefix):
id_from_file = convert_file_to_id(location[2][len(pylint_prefix):])
return id_from_file + ".Pylint"
if location[2] == "PEP8-check":
id_from_file = convert_file_to_id(location[0])
return id_from_file + ".PEP8"
return None
def format_test_id(self, nodeid, location):
id_from_location = self.get_id_from_location(location)
if id_from_location is not None:
return id_from_location
test_id = nodeid
if test_id:
if test_id.find("::") < 0:
test_id += "::top_level"
else:
test_id = "top_level"
first_bracket = test_id.find("[")
if first_bracket > 0:
# [] -> (), make it look like nose parameterized tests
params = "(" + test_id[first_bracket + 1:]
if params.endswith("]"):
params = params[:-1] + ")"
test_id = test_id[:first_bracket]
if test_id.endswith("::"):
test_id = test_id[:-2]
else:
params = ""
test_id = test_id.replace("::()::", "::")
test_id = re.sub(r"\.pyc?::", r"::", test_id)
test_id = test_id.replace(".", "_").replace(os.sep, ".").replace("/", ".").replace('::', '.')
if params:
params = params.replace(".", "_")
test_id += params
return test_id
def format_location(self, location):
if type(location) is tuple and len(location) == 3:
return "%s:%s (%s)" % (str(location[0]), str(location[1]), str(location[2]))
return str(location)
def pytest_collection_finish(self, session):
self.teamcity.testCount(len(session.items))
def pytest_runtest_logstart(self, nodeid, location):
# test name fetched from location passed as metainfo to PyCharm
# it will be used to run specific test
# See IDEA-176950, PY-31836
test_name = location[2]
if test_name:
test_name = str(test_name).split(".")[-1]
self.ensure_test_start_reported(self.format_test_id(nodeid, location), test_name)
def ensure_test_start_reported(self, test_id, metainfo=None):
if test_id not in self.test_start_reported_mark:
if self.output_capture_enabled:
capture_standard_output = "false"
else:
capture_standard_output = "true"
self.teamcity.testStarted(test_id, flowId=test_id, captureStandardOutput=capture_standard_output, metainfo=metainfo)
self.test_start_reported_mark.add(test_id)
def report_has_output(self, report):
for (secname, data) in report.sections:
if report.when in secname and ('stdout' in secname or 'stderr' in secname):
return True
return False
def report_test_output(self, report, test_id):
for (secname, data) in report.sections:
# https://github.com/JetBrains/teamcity-messages/issues/112
# CollectReport didn't have 'when' property, but now it has.
# But we still need output on 'collect' state
if hasattr(report, "when") and report.when not in secname and report.when != 'collect':
continue
if not data:
continue
if 'stdout' in secname:
dump_test_stdout(self.teamcity, test_id, test_id, data)
elif 'stderr' in secname:
dump_test_stderr(self.teamcity, test_id, test_id, data)
def report_test_finished(self, test_id, duration=None):
self.teamcity.testFinished(test_id, testDuration=duration, flowId=test_id)
self.test_start_reported_mark.remove(test_id)
def report_test_failure(self, test_id, report, message=None, report_output=True):
if hasattr(report, 'duration'):
duration = timedelta(seconds=report.duration)
else:
duration = None
if message is None:
message = self.format_location(report.location)
self.ensure_test_start_reported(test_id)
if report_output:
self.report_test_output(report, test_id)
diff_error = None
try:
err_message = str(report.longrepr.reprcrash.message)
diff_name = diff_tools.EqualsAssertionError.__name__
# There is a string like "foo.bar.DiffError: [serialized_data]"
if diff_name in err_message:
serialized_data = err_message[err_message.index(diff_name) + len(diff_name) + 1:]
diff_error = diff_tools.deserialize_error(serialized_data)
# AssertionError is patched in py.test, we can try to fetch diff from it
# In general case message starts with "AssertionError: ", but can also starts with "assert" for top-level
# function. To support both cases we unify them
if err_message.startswith("assert"):
err_message = "AssertionError: " + err_message
if err_message.startswith("AssertionError:"):
diff_error = fetch_diff_error_from_message(err_message, self.swap_diff)
except Exception:
pass
if not diff_error:
from .jb_local_exc_store import get_exception
diff_error = get_exception()
if diff_error:
# Cut everything after postfix: it is internal view of DiffError
strace = str(report.longrepr)
data_postfix = "_ _ _ _ _"
if data_postfix in strace:
strace = strace[0:strace.index(data_postfix)]
self.teamcity.testFailed(test_id, diff_error.msg if diff_error.msg else message, strace,
flowId=test_id,
comparison_failure=diff_error
)
else:
self.teamcity.testFailed(test_id, message, str(report.longrepr), flowId=test_id)
self.report_test_finished(test_id, duration)
def report_test_skip(self, test_id, report):
if type(report.longrepr) is tuple and len(report.longrepr) == 3:
reason = report.longrepr[2]
else:
reason = str(report.longrepr)
if hasattr(report, 'duration'):
duration = timedelta(seconds=report.duration)
else:
duration = None
self.ensure_test_start_reported(test_id)
self.report_test_output(report, test_id)
self.teamcity.testIgnored(test_id, reason, flowId=test_id)
self.report_test_finished(test_id, duration)
def pytest_assertrepr_compare(self, config, op, left, right):
if op in ('==', '!='):
return ['{0} {1} {2}'.format(pprint.pformat(left), op, pprint.pformat(right))]
def pytest_runtest_logreport(self, report):
"""
:type report: _pytest.runner.TestReport
"""
test_id = self.format_test_id(report.nodeid, report.location)
duration = timedelta(seconds=report.duration)
if report.passed:
# Do not report passed setup/teardown if no output
if report.when == 'call':
self.ensure_test_start_reported(test_id)
if not self.skip_passed_output:
self.report_test_output(report, test_id)
self.report_test_finished(test_id, duration)
else:
if self.report_has_output(report) and not self.skip_passed_output:
block_name = "test " + report.when
self.teamcity.blockOpened(block_name, flowId=test_id)
self.report_test_output(report, test_id)
self.teamcity.blockClosed(block_name, flowId=test_id)
elif report.failed:
if report.when == 'call':
self.report_test_failure(test_id, report)
elif report.when == 'setup':
if self.report_has_output(report):
self.teamcity.blockOpened("test setup", flowId=test_id)
self.report_test_output(report, test_id)
self.teamcity.blockClosed("test setup", flowId=test_id)
self.report_test_failure(test_id, report, message="test setup failed", report_output=False)
elif report.when == 'teardown':
# Report failed teardown as a separate test as original test is already finished
self.report_test_failure(test_id + "_teardown", report)
elif report.skipped:
self.report_test_skip(test_id, report)
def pytest_collectreport(self, report):
test_id = self.format_test_id(report.nodeid, report.location) + "_collect"
if report.failed:
self.report_test_failure(test_id, report)
elif report.skipped:
self.report_test_skip(test_id, report)
def pytest_terminal_summary(self):
if self.coverage_controller is not None:
try:
self._report_coverage()
except Exception:
tb = traceback.format_exc()
self.teamcity.customMessage("Coverage statistics reporting failed", "ERROR", errorDetails=tb)
def _report_coverage(self):
from coverage.misc import NotPython
from coverage.results import Numbers
class _Reporter(object):
def __init__(self, coverage, config):
try:
from coverage.report import Reporter
except ImportError:
# Support for coverage >= 5.0.1.
from coverage.report import get_analysis_to_report
class Reporter(object):
def __init__(self, coverage, config):
self.coverage = coverage
self.config = config
self._file_reporters = []
def find_file_reporters(self, morfs):
return [fr for fr, _ in get_analysis_to_report(self.coverage, morfs)]
self._reporter = Reporter(coverage, config)
def find_file_reporters(self, morfs):
self.file_reporters = self._reporter.find_file_reporters(morfs)
def __getattr__(self, name):
return getattr(self._reporter, name)
class _CoverageReporter(_Reporter):
def __init__(self, coverage, config, messages):
super(_CoverageReporter, self).__init__(coverage, config)
if hasattr(coverage, 'data'):
self.branches = coverage.data.has_arcs()
else:
self.branches = coverage.get_data().has_arcs()
self.messages = messages
def report(self, morfs, outfile=None):
if hasattr(self, 'find_code_units'):
self.find_code_units(morfs)
else:
self.find_file_reporters(morfs)
total = Numbers()
if hasattr(self, 'code_units'):
units = self.code_units
else:
units = self.file_reporters
for cu in units:
try:
analysis = self.coverage._analyze(cu)
nums = analysis.numbers
total += nums
except KeyboardInterrupt:
raise
except Exception:
if self.config.ignore_errors:
continue
err = sys.exc_info()
typ, msg = err[:2]
if typ is NotPython and not cu.should_be_python():
continue
test_id = cu.name
details = convert_error_to_string(err)
self.messages.testStarted(test_id, flowId=test_id)
self.messages.testFailed(test_id, message="Coverage analysis failed", details=details, flowId=test_id)
self.messages.testFinished(test_id, flowId=test_id)
if total.n_files > 0:
covered = total.n_executed
total_statements = total.n_statements
if self.branches:
covered += total.n_executed_branches
total_statements += total.n_branches
self.messages.buildStatisticLinesCovered(covered)
self.messages.buildStatisticTotalLines(total_statements)
self.messages.buildStatisticLinesUncovered(total_statements - covered)
reporter = _CoverageReporter(
self.coverage_controller.cov,
self.coverage_controller.cov.config,
self.teamcity,
)
reporter.report(None)
| 38.761099 | 128 | 0.608432 |
import os
import pprint
import sys
import re
import traceback
from datetime import timedelta
from teamcity.messages import TeamcityServiceMessages
from teamcity.common import convert_error_to_string, dump_test_stderr, dump_test_stdout
from teamcity import is_running_under_teamcity
from teamcity import diff_tools
diff_tools.patch_unittest_diff()
def unformat_pytest_explanation(s):
return s.replace("\\n", "\n")
def fetch_diff_error_from_message(err_message, swap_diff):
line_with_diff = None
diff_error_message = None
lines = err_message.split("\n")
if err_message.startswith("AssertionError: assert"):
line_with_diff = lines[0][len("AssertionError: assert "):]
elif len(err_message.split("\n")) > 1:
err_line = lines[1]
line_with_diff = err_line[len("assert "):]
diff_error_message = lines[0]
if line_with_diff and line_with_diff.count("==") == 1:
parts = [x.strip() for x in line_with_diff.split("==")]
parts = [s[1:-1] if s.startswith("'") or s.startswith('"') else s for s in parts]
# Pytest cuts too long lines, no need to check is_too_big
expected, actual = parts[1], parts[0]
if swap_diff:
expected, actual = actual, expected
expected = unformat_pytest_explanation(expected)
actual = unformat_pytest_explanation(actual)
return diff_tools.EqualsAssertionError(expected, actual, diff_error_message)
else:
return None
def _is_bool_supported():
try:
from pytest import __version__
from distutils import version
return version.LooseVersion(str(__version__)) >= version.LooseVersion("2.9")
except ImportError:
return False
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting", "reporting", after="general")
group._addoption('--teamcity', action="count",
dest="teamcity", default=0, help="force output of JetBrains TeamCity service messages")
group._addoption('--no-teamcity', action="count",
dest="no_teamcity", default=0, help="disable output of JetBrains TeamCity service messages")
kwargs = {"help": "skip output of passed tests for JetBrains TeamCity service messages"}
if _is_bool_supported():
kwargs.update({"type": "bool"})
parser.addini("skippassedoutput", **kwargs)
parser.addini("swapdiff", **kwargs)
def pytest_configure(config):
if config.option.no_teamcity >= 1:
enabled = False
elif config.option.teamcity >= 1:
enabled = True
else:
enabled = is_running_under_teamcity()
if enabled:
output_capture_enabled = getattr(config.option, 'capture', 'fd') != 'no'
coverage_controller = _get_coverage_controller(config)
skip_passed_output = bool(config.getini('skippassedoutput'))
config.option.verbose = 2 # don't truncate assert explanations
config._teamcityReporting = EchoTeamCityMessages(
output_capture_enabled,
coverage_controller,
skip_passed_output,
bool(config.getini('swapdiff'))
)
config.pluginmanager.register(config._teamcityReporting)
def pytest_unconfigure(config):
teamcity_reporting = getattr(config, '_teamcityReporting', None)
if teamcity_reporting:
del config._teamcityReporting
config.pluginmanager.unregister(teamcity_reporting)
def _get_coverage_controller(config):
cov_plugin = config.pluginmanager.getplugin('_cov')
if not cov_plugin:
return None
return cov_plugin.cov_controller
class EchoTeamCityMessages(object):
def __init__(self, output_capture_enabled, coverage_controller, skip_passed_output, swap_diff):
self.coverage_controller = coverage_controller
self.output_capture_enabled = output_capture_enabled
self.skip_passed_output = skip_passed_output
self.teamcity = TeamcityServiceMessages()
self.test_start_reported_mark = set()
self.max_reported_output_size = 1 * 1024 * 1024
self.reported_output_chunk_size = 50000
self.swap_diff = swap_diff
def get_id_from_location(self, location):
if type(location) is not tuple or len(location) != 3 or not hasattr(location[2], "startswith"):
return None
def convert_file_to_id(filename):
filename = re.sub(r"\.pyc?$", "", filename)
return filename.replace(os.sep, ".").replace("/", ".")
def add_prefix_to_filename_id(filename_id, prefix):
dot_location = filename_id.rfind('.')
if dot_location <= 0 or dot_location >= len(filename_id) - 1:
return None
return filename_id[:dot_location + 1] + prefix + filename_id[dot_location + 1:]
pylint_prefix = '[pylint] '
if location[2].startswith(pylint_prefix):
id_from_file = convert_file_to_id(location[2][len(pylint_prefix):])
return id_from_file + ".Pylint"
if location[2] == "PEP8-check":
id_from_file = convert_file_to_id(location[0])
return id_from_file + ".PEP8"
return None
def format_test_id(self, nodeid, location):
id_from_location = self.get_id_from_location(location)
if id_from_location is not None:
return id_from_location
test_id = nodeid
if test_id:
if test_id.find("::") < 0:
test_id += "::top_level"
else:
test_id = "top_level"
first_bracket = test_id.find("[")
if first_bracket > 0:
# [] -> (), make it look like nose parameterized tests
params = "(" + test_id[first_bracket + 1:]
if params.endswith("]"):
params = params[:-1] + ")"
test_id = test_id[:first_bracket]
if test_id.endswith("::"):
test_id = test_id[:-2]
else:
params = ""
test_id = test_id.replace("::()::", "::")
test_id = re.sub(r"\.pyc?::", r"::", test_id)
test_id = test_id.replace(".", "_").replace(os.sep, ".").replace("/", ".").replace('::', '.')
if params:
params = params.replace(".", "_")
test_id += params
return test_id
def format_location(self, location):
if type(location) is tuple and len(location) == 3:
return "%s:%s (%s)" % (str(location[0]), str(location[1]), str(location[2]))
return str(location)
def pytest_collection_finish(self, session):
self.teamcity.testCount(len(session.items))
def pytest_runtest_logstart(self, nodeid, location):
# test name fetched from location passed as metainfo to PyCharm
# it will be used to run specific test
# See IDEA-176950, PY-31836
test_name = location[2]
if test_name:
test_name = str(test_name).split(".")[-1]
self.ensure_test_start_reported(self.format_test_id(nodeid, location), test_name)
def ensure_test_start_reported(self, test_id, metainfo=None):
if test_id not in self.test_start_reported_mark:
if self.output_capture_enabled:
capture_standard_output = "false"
else:
capture_standard_output = "true"
self.teamcity.testStarted(test_id, flowId=test_id, captureStandardOutput=capture_standard_output, metainfo=metainfo)
self.test_start_reported_mark.add(test_id)
def report_has_output(self, report):
for (secname, data) in report.sections:
if report.when in secname and ('stdout' in secname or 'stderr' in secname):
return True
return False
def report_test_output(self, report, test_id):
for (secname, data) in report.sections:
# https://github.com/JetBrains/teamcity-messages/issues/112
# CollectReport didn't have 'when' property, but now it has.
# But we still need output on 'collect' state
if hasattr(report, "when") and report.when not in secname and report.when != 'collect':
continue
if not data:
continue
if 'stdout' in secname:
dump_test_stdout(self.teamcity, test_id, test_id, data)
elif 'stderr' in secname:
dump_test_stderr(self.teamcity, test_id, test_id, data)
def report_test_finished(self, test_id, duration=None):
self.teamcity.testFinished(test_id, testDuration=duration, flowId=test_id)
self.test_start_reported_mark.remove(test_id)
def report_test_failure(self, test_id, report, message=None, report_output=True):
if hasattr(report, 'duration'):
duration = timedelta(seconds=report.duration)
else:
duration = None
if message is None:
message = self.format_location(report.location)
self.ensure_test_start_reported(test_id)
if report_output:
self.report_test_output(report, test_id)
diff_error = None
try:
err_message = str(report.longrepr.reprcrash.message)
diff_name = diff_tools.EqualsAssertionError.__name__
# There is a string like "foo.bar.DiffError: [serialized_data]"
if diff_name in err_message:
serialized_data = err_message[err_message.index(diff_name) + len(diff_name) + 1:]
diff_error = diff_tools.deserialize_error(serialized_data)
# AssertionError is patched in py.test, we can try to fetch diff from it
# In general case message starts with "AssertionError: ", but can also starts with "assert" for top-level
# function. To support both cases we unify them
if err_message.startswith("assert"):
err_message = "AssertionError: " + err_message
if err_message.startswith("AssertionError:"):
diff_error = fetch_diff_error_from_message(err_message, self.swap_diff)
except Exception:
pass
if not diff_error:
from .jb_local_exc_store import get_exception
diff_error = get_exception()
if diff_error:
# Cut everything after postfix: it is internal view of DiffError
strace = str(report.longrepr)
data_postfix = "_ _ _ _ _"
if data_postfix in strace:
strace = strace[0:strace.index(data_postfix)]
self.teamcity.testFailed(test_id, diff_error.msg if diff_error.msg else message, strace,
flowId=test_id,
comparison_failure=diff_error
)
else:
self.teamcity.testFailed(test_id, message, str(report.longrepr), flowId=test_id)
self.report_test_finished(test_id, duration)
def report_test_skip(self, test_id, report):
if type(report.longrepr) is tuple and len(report.longrepr) == 3:
reason = report.longrepr[2]
else:
reason = str(report.longrepr)
if hasattr(report, 'duration'):
duration = timedelta(seconds=report.duration)
else:
duration = None
self.ensure_test_start_reported(test_id)
self.report_test_output(report, test_id)
self.teamcity.testIgnored(test_id, reason, flowId=test_id)
self.report_test_finished(test_id, duration)
def pytest_assertrepr_compare(self, config, op, left, right):
if op in ('==', '!='):
return ['{0} {1} {2}'.format(pprint.pformat(left), op, pprint.pformat(right))]
def pytest_runtest_logreport(self, report):
test_id = self.format_test_id(report.nodeid, report.location)
duration = timedelta(seconds=report.duration)
if report.passed:
# Do not report passed setup/teardown if no output
if report.when == 'call':
self.ensure_test_start_reported(test_id)
if not self.skip_passed_output:
self.report_test_output(report, test_id)
self.report_test_finished(test_id, duration)
else:
if self.report_has_output(report) and not self.skip_passed_output:
block_name = "test " + report.when
self.teamcity.blockOpened(block_name, flowId=test_id)
self.report_test_output(report, test_id)
self.teamcity.blockClosed(block_name, flowId=test_id)
elif report.failed:
if report.when == 'call':
self.report_test_failure(test_id, report)
elif report.when == 'setup':
if self.report_has_output(report):
self.teamcity.blockOpened("test setup", flowId=test_id)
self.report_test_output(report, test_id)
self.teamcity.blockClosed("test setup", flowId=test_id)
self.report_test_failure(test_id, report, message="test setup failed", report_output=False)
elif report.when == 'teardown':
# Report failed teardown as a separate test as original test is already finished
self.report_test_failure(test_id + "_teardown", report)
elif report.skipped:
self.report_test_skip(test_id, report)
def pytest_collectreport(self, report):
test_id = self.format_test_id(report.nodeid, report.location) + "_collect"
if report.failed:
self.report_test_failure(test_id, report)
elif report.skipped:
self.report_test_skip(test_id, report)
def pytest_terminal_summary(self):
if self.coverage_controller is not None:
try:
self._report_coverage()
except Exception:
tb = traceback.format_exc()
self.teamcity.customMessage("Coverage statistics reporting failed", "ERROR", errorDetails=tb)
def _report_coverage(self):
from coverage.misc import NotPython
from coverage.results import Numbers
class _Reporter(object):
def __init__(self, coverage, config):
try:
from coverage.report import Reporter
except ImportError:
# Support for coverage >= 5.0.1.
from coverage.report import get_analysis_to_report
class Reporter(object):
def __init__(self, coverage, config):
self.coverage = coverage
self.config = config
self._file_reporters = []
def find_file_reporters(self, morfs):
return [fr for fr, _ in get_analysis_to_report(self.coverage, morfs)]
self._reporter = Reporter(coverage, config)
def find_file_reporters(self, morfs):
self.file_reporters = self._reporter.find_file_reporters(morfs)
def __getattr__(self, name):
return getattr(self._reporter, name)
class _CoverageReporter(_Reporter):
def __init__(self, coverage, config, messages):
super(_CoverageReporter, self).__init__(coverage, config)
if hasattr(coverage, 'data'):
self.branches = coverage.data.has_arcs()
else:
self.branches = coverage.get_data().has_arcs()
self.messages = messages
def report(self, morfs, outfile=None):
if hasattr(self, 'find_code_units'):
self.find_code_units(morfs)
else:
self.find_file_reporters(morfs)
total = Numbers()
if hasattr(self, 'code_units'):
units = self.code_units
else:
units = self.file_reporters
for cu in units:
try:
analysis = self.coverage._analyze(cu)
nums = analysis.numbers
total += nums
except KeyboardInterrupt:
raise
except Exception:
if self.config.ignore_errors:
continue
err = sys.exc_info()
typ, msg = err[:2]
if typ is NotPython and not cu.should_be_python():
continue
test_id = cu.name
details = convert_error_to_string(err)
self.messages.testStarted(test_id, flowId=test_id)
self.messages.testFailed(test_id, message="Coverage analysis failed", details=details, flowId=test_id)
self.messages.testFinished(test_id, flowId=test_id)
if total.n_files > 0:
covered = total.n_executed
total_statements = total.n_statements
if self.branches:
covered += total.n_executed_branches
total_statements += total.n_branches
self.messages.buildStatisticLinesCovered(covered)
self.messages.buildStatisticTotalLines(total_statements)
self.messages.buildStatisticLinesUncovered(total_statements - covered)
reporter = _CoverageReporter(
self.coverage_controller.cov,
self.coverage_controller.cov.config,
self.teamcity,
)
reporter.report(None)
| true | true |
f71b6ec1ac6a3e138fec3e28c7e2f2eda3b7aa07 | 2,948 | py | Python | mayan/apps/mayan_statistics/views.py | sophiawa/Mayan-EDMS | 42f20576d0c690b645a60bf53c5169cda4264231 | [
"Apache-2.0"
] | null | null | null | mayan/apps/mayan_statistics/views.py | sophiawa/Mayan-EDMS | 42f20576d0c690b645a60bf53c5169cda4264231 | [
"Apache-2.0"
] | 10 | 2021-03-19T23:48:12.000Z | 2022-03-12T00:41:49.000Z | mayan/apps/mayan_statistics/views.py | sophiawa/Mayan-EDMS | 42f20576d0c690b645a60bf53c5169cda4264231 | [
"Apache-2.0"
] | 1 | 2020-12-17T02:35:09.000Z | 2020-12-17T02:35:09.000Z | from django.contrib import messages
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.generics import ConfirmView, SimpleView, SingleObjectListView
from .classes import Statistic, StatisticNamespace
from .permissions import permission_statistics_view
from .tasks import task_execute_statistic
class NamespaceListView(SingleObjectListView):
extra_context = {
'hide_link': True,
'title': _('Statistics namespaces'),
}
template_name = 'appearance/generic_list.html'
view_permission = permission_statistics_view
def get_source_queryset(self):
return StatisticNamespace.get_all()
class NamespaceDetailView(SingleObjectListView):
view_permission = permission_statistics_view
def get_extra_context(self):
return {
'hide_link': True,
'object': self.get_namespace(),
'title': _('Namespace details for: %s') % self.get_namespace(),
}
def get_namespace(self):
return StatisticNamespace.get(slug=self.kwargs['slug'])
def get_source_queryset(self):
return self.get_namespace().statistics
class StatisticDetailView(SimpleView):
view_permission = permission_statistics_view
def get_extra_context(self):
obj = self.get_object()
return {
'chart_data': obj.get_chart_data(),
'namespace': obj.namespace,
'navigation_object_list': ('namespace', 'object'),
'no_data': not obj.get_results_data()['series'],
'object': obj,
'title': _('Results for: %s') % obj,
}
def get_object(self):
try:
return Statistic.get(self.kwargs['slug'])
except KeyError:
raise Http404(_('Statistic "%s" not found.') % self.kwargs['slug'])
def get_template_names(self):
return (self.get_object().renderer.template_name,)
class StatisticQueueView(ConfirmView):
view_permission = permission_statistics_view
def get_extra_context(self):
obj = self.get_object()
return {
'namespace': obj.namespace,
'object': obj,
# Translators: This text is asking users if they want to queue
# (to send to the queue) a statistic for it to be update ahead
# of schedule
'title': _(
'Queue statistic "%s" to be updated?'
) % obj,
}
def get_object(self):
try:
return Statistic.get(slug=self.kwargs['slug'])
except KeyError:
raise Http404(_('Statistic "%s" not found.') % self.kwargs['slug'])
def view_action(self):
task_execute_statistic.delay(slug=self.get_object().slug)
messages.success(
message=_(
'Statistic "%s" queued successfully for update.'
) % self.get_object().label, request=self.request
)
| 31.031579 | 84 | 0.636364 | from django.contrib import messages
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from mayan.apps.common.generics import ConfirmView, SimpleView, SingleObjectListView
from .classes import Statistic, StatisticNamespace
from .permissions import permission_statistics_view
from .tasks import task_execute_statistic
class NamespaceListView(SingleObjectListView):
extra_context = {
'hide_link': True,
'title': _('Statistics namespaces'),
}
template_name = 'appearance/generic_list.html'
view_permission = permission_statistics_view
def get_source_queryset(self):
return StatisticNamespace.get_all()
class NamespaceDetailView(SingleObjectListView):
view_permission = permission_statistics_view
def get_extra_context(self):
return {
'hide_link': True,
'object': self.get_namespace(),
'title': _('Namespace details for: %s') % self.get_namespace(),
}
def get_namespace(self):
return StatisticNamespace.get(slug=self.kwargs['slug'])
def get_source_queryset(self):
return self.get_namespace().statistics
class StatisticDetailView(SimpleView):
view_permission = permission_statistics_view
def get_extra_context(self):
obj = self.get_object()
return {
'chart_data': obj.get_chart_data(),
'namespace': obj.namespace,
'navigation_object_list': ('namespace', 'object'),
'no_data': not obj.get_results_data()['series'],
'object': obj,
'title': _('Results for: %s') % obj,
}
def get_object(self):
try:
return Statistic.get(self.kwargs['slug'])
except KeyError:
raise Http404(_('Statistic "%s" not found.') % self.kwargs['slug'])
def get_template_names(self):
return (self.get_object().renderer.template_name,)
class StatisticQueueView(ConfirmView):
view_permission = permission_statistics_view
def get_extra_context(self):
obj = self.get_object()
return {
'namespace': obj.namespace,
'object': obj,
'title': _(
'Queue statistic "%s" to be updated?'
) % obj,
}
def get_object(self):
try:
return Statistic.get(slug=self.kwargs['slug'])
except KeyError:
raise Http404(_('Statistic "%s" not found.') % self.kwargs['slug'])
def view_action(self):
task_execute_statistic.delay(slug=self.get_object().slug)
messages.success(
message=_(
'Statistic "%s" queued successfully for update.'
) % self.get_object().label, request=self.request
)
| true | true |
f71b6ef7a59d7d39b2bcee735e05d0bb4fe7d665 | 2,447 | py | Python | display_recognized_faces.py | theTechie/face-recognition | 4236405914971fa971eb8dab7f31022f154ac10b | [
"MIT"
] | null | null | null | display_recognized_faces.py | theTechie/face-recognition | 4236405914971fa971eb8dab7f31022f154ac10b | [
"MIT"
] | null | null | null | display_recognized_faces.py | theTechie/face-recognition | 4236405914971fa971eb8dab7f31022f154ac10b | [
"MIT"
] | null | null | null | import face_recognition
from PIL import Image, ImageDraw
from pathlib import Path
import recognize_face
known_path = Path("data/sample-2/jpeg/picked/known")
known_images = list(known_path.glob('*.jpeg'))
known_face_encodings = []
known_face_names = []
known_faces = [recognize_face.image_to_known_face(str(image_path), image_path.stem) for image_path in known_images]
print('I just learned to recognize %d persons... \n' % len(known_images))
unknown_path = Path("data/sample-4/unknown")
unknown_images = list(unknown_path.glob('**/*.jpeg'))
print('I am starting to identify %d unknown persons; lets see how many i know !! \n' % len(unknown_images))
output_path = Path("data/sample-4/output")
for image_to_identify in unknown_images:
unknown_image = face_recognition.load_image_file(str(image_to_identify))
# face_locations = face_recognition.face_locations(unknown_image)
# face_encodings = face_recognition.face_encodings(unknown_image, face_locations)
detected_faces = recognize_face.recognize(known_faces, unknown_image)
# Convert the image to a PIL-format image so that we can draw on top of it with the Pillow library
# See http://pillow.readthedocs.io/ for more about PIL/Pillow
pil_image = Image.fromarray(unknown_image)
# Create a Pillow ImageDraw Draw instance to draw with
draw = ImageDraw.Draw(pil_image)
known_color = (0, 255, 0)
unknown_color = (255, 0, 0)
# Loop through each face found in the unknown image
for name, (top, right, bottom, left), distance in detected_faces:
# Draw a box around the face using the Pillow module
if name == 'Unknown':
color = unknown_color
else:
color = known_color
draw.rectangle(((left, top), (right, bottom)), outline=color)
# Draw a label with a name below the face
label = name + ' - ' + str("{0:.2f}".format(distance))
text_width, text_height = draw.textsize(label)
draw.rectangle(((left, bottom - text_height - 10), (right, bottom)), fill=color, outline=color)
draw.text((left + 6, bottom - text_height - 5), label, fill=(255, 0, 0, 255))
# Display the resulting image
# pil_image.show()
# Remove the drawing library from memory as per the Pillow docs
del draw
# You can also save a copy of the new image to disk if you want by uncommenting this line
pil_image.save(output_path/image_to_identify.name)
| 38.84127 | 115 | 0.707805 | import face_recognition
from PIL import Image, ImageDraw
from pathlib import Path
import recognize_face
known_path = Path("data/sample-2/jpeg/picked/known")
known_images = list(known_path.glob('*.jpeg'))
known_face_encodings = []
known_face_names = []
known_faces = [recognize_face.image_to_known_face(str(image_path), image_path.stem) for image_path in known_images]
print('I just learned to recognize %d persons... \n' % len(known_images))
unknown_path = Path("data/sample-4/unknown")
unknown_images = list(unknown_path.glob('**/*.jpeg'))
print('I am starting to identify %d unknown persons; lets see how many i know !! \n' % len(unknown_images))
output_path = Path("data/sample-4/output")
for image_to_identify in unknown_images:
unknown_image = face_recognition.load_image_file(str(image_to_identify))
detected_faces = recognize_face.recognize(known_faces, unknown_image)
pil_image = Image.fromarray(unknown_image)
draw = ImageDraw.Draw(pil_image)
known_color = (0, 255, 0)
unknown_color = (255, 0, 0)
for name, (top, right, bottom, left), distance in detected_faces:
if name == 'Unknown':
color = unknown_color
else:
color = known_color
draw.rectangle(((left, top), (right, bottom)), outline=color)
label = name + ' - ' + str("{0:.2f}".format(distance))
text_width, text_height = draw.textsize(label)
draw.rectangle(((left, bottom - text_height - 10), (right, bottom)), fill=color, outline=color)
draw.text((left + 6, bottom - text_height - 5), label, fill=(255, 0, 0, 255))
del draw
pil_image.save(output_path/image_to_identify.name)
| true | true |
f71b6fe68084b084c7f741b11c1012ffaf12dd0a | 3,230 | py | Python | srv/fluffi/data/fluffiweb/app/utils/ftp.py | sears-s/fluffi | 5f2f6d019041a6268199b69bf2f34487b18b84fe | [
"MIT"
] | 96 | 2019-09-19T10:28:05.000Z | 2022-02-28T11:53:06.000Z | srv/fluffi/data/fluffiweb/app/utils/ftp.py | sears-s/fluffi | 5f2f6d019041a6268199b69bf2f34487b18b84fe | [
"MIT"
] | 123 | 2019-11-19T09:47:14.000Z | 2021-10-19T03:10:51.000Z | srv/fluffi/data/fluffiweb/app/utils/ftp.py | sears-s/fluffi | 5f2f6d019041a6268199b69bf2f34487b18b84fe | [
"MIT"
] | 23 | 2019-11-11T06:04:56.000Z | 2022-02-11T15:35:26.000Z | # Copyright 2017-2020 Siemens AG
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including without
# limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# Author(s): Michael Kraus, Junes Najah, Fabian Russwurm, Thomas Riedmaier
from ftplib import FTP
class FTPConnector:
def __init__(self, ftpURL):
self.ftpURL = ftpURL
self.ftpClient = FTP()
def getListOfFilesOnFTPServer(self, path):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
self.ftpClient.cwd(path)
ls = []
ls = self.ftpClient.nlst()
tupelsOfLS = zip(ls, ls)
self.ftpClient.quit()
return tupelsOfLS
def getListOfArchitecturesOnFTPServer(self, path, group):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
self.ftpClient.cwd(path)
ls = []
ls = self.ftpClient.nlst()
for i, w in enumerate(ls):
ls[i] = group + "-" + w
tupelsOfLS = zip(ls, ls)
self.ftpClient.quit()
return tupelsOfLS
def saveTargetFileOnFTPServer(self, targetFileData, name):
# ftplib storbinary is programmed to read file from disk before sending to ftp server
# solution is to extend the lib and rewrite storbinary...
# https://stackoverflow.com/questions/2671118/can-i-upload-an-object-in-memory-to-ftp-using-python
# workaround: write file to disk
# .....
path = 'tmp.zip'
target = open(path, 'wb')
target.write(targetFileData)
target.close()
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
f = open('tmp.zip', 'rb')
self.ftpClient.storbinary("STOR /SUT/" + name.split('.', 1)[0] + ".zip", f)
self.ftpClient.quit()
def saveArchivedProjectOnFTPServer(self, fileName):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
myFile = open(fileName, 'rb')
if myFile:
self.ftpClient.storbinary("STOR /archive/" + fileName, myFile)
self.ftpClient.quit()
myFile.close()
return True
print("Error: File not found")
self.ftpClient.quit()
myFile.close()
return False
| 35.108696 | 107 | 0.653251 |
from ftplib import FTP
class FTPConnector:
def __init__(self, ftpURL):
self.ftpURL = ftpURL
self.ftpClient = FTP()
def getListOfFilesOnFTPServer(self, path):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
self.ftpClient.cwd(path)
ls = []
ls = self.ftpClient.nlst()
tupelsOfLS = zip(ls, ls)
self.ftpClient.quit()
return tupelsOfLS
def getListOfArchitecturesOnFTPServer(self, path, group):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
self.ftpClient.cwd(path)
ls = []
ls = self.ftpClient.nlst()
for i, w in enumerate(ls):
ls[i] = group + "-" + w
tupelsOfLS = zip(ls, ls)
self.ftpClient.quit()
return tupelsOfLS
def saveTargetFileOnFTPServer(self, targetFileData, name):
path = 'tmp.zip'
target = open(path, 'wb')
target.write(targetFileData)
target.close()
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
f = open('tmp.zip', 'rb')
self.ftpClient.storbinary("STOR /SUT/" + name.split('.', 1)[0] + ".zip", f)
self.ftpClient.quit()
def saveArchivedProjectOnFTPServer(self, fileName):
self.ftpClient.connect(self.ftpURL)
self.ftpClient.login()
myFile = open(fileName, 'rb')
if myFile:
self.ftpClient.storbinary("STOR /archive/" + fileName, myFile)
self.ftpClient.quit()
myFile.close()
return True
print("Error: File not found")
self.ftpClient.quit()
myFile.close()
return False
| true | true |
f71b701cb0a9f7edf9be18a1b9115d0dbedac0c4 | 17,383 | py | Python | examples/language_model/bert/run_glue.py | weiwei1115/PaddleNLP | dd98f7f8b25b41d39228ba8a958b11a6212709a3 | [
"Apache-2.0"
] | 1 | 2021-02-24T14:03:55.000Z | 2021-02-24T14:03:55.000Z | examples/language_model/bert/run_glue.py | weiwei1115/PaddleNLP | dd98f7f8b25b41d39228ba8a958b11a6212709a3 | [
"Apache-2.0"
] | null | null | null | examples/language_model/bert/run_glue.py | weiwei1115/PaddleNLP | dd98f7f8b25b41d39228ba8a958b11a6212709a3 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import logging
import os
import sys
import random
import time
import math
import distutils.util
from functools import partial
import numpy as np
import paddle
from paddle.io import DataLoader
from paddle.metric import Metric, Accuracy, Precision, Recall
from paddlenlp.datasets import GlueCoLA, GlueSST2, GlueMRPC, GlueSTSB, GlueQQP, GlueMNLI, GlueQNLI, GlueRTE
from paddlenlp.data import Stack, Tuple, Pad
from paddlenlp.transformers import BertForSequenceClassification, BertTokenizer
from paddlenlp.transformers import ElectraForSequenceClassification, ElectraTokenizer
from paddlenlp.transformers import ErnieForSequenceClassification, ErnieTokenizer
from paddlenlp.transformers import LinearDecayWithWarmup
from paddlenlp.metrics import AccuracyAndF1, Mcc, PearsonAndSpearman
FORMAT = '%(asctime)s-%(levelname)s: %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
logger = logging.getLogger(__name__)
TASK_CLASSES = {
"cola": (GlueCoLA, Mcc),
"sst-2": (GlueSST2, Accuracy),
"mrpc": (GlueMRPC, AccuracyAndF1),
"sts-b": (GlueSTSB, PearsonAndSpearman),
"qqp": (GlueQQP, AccuracyAndF1),
"mnli": (GlueMNLI, Accuracy),
"qnli": (GlueQNLI, Accuracy),
"rte": (GlueRTE, Accuracy),
}
MODEL_CLASSES = {
"bert": (BertForSequenceClassification, BertTokenizer),
"ernie": (ErnieForSequenceClassification, ErnieTokenizer)
}
def parse_args():
parser = argparse.ArgumentParser()
# Required parameters
parser.add_argument(
"--task_name",
default=None,
type=str,
required=True,
help="The name of the task to train selected in the list: " +
", ".join(TASK_CLASSES.keys()), )
parser.add_argument(
"--model_type",
default=None,
type=str,
required=True,
help="Model type selected in the list: " +
", ".join(MODEL_CLASSES.keys()), )
parser.add_argument(
"--model_name_or_path",
default=None,
type=str,
required=True,
help="Path to pre-trained model or shortcut name selected in the list: "
+ ", ".join(
sum([
list(classes[-1].pretrained_init_configuration.keys())
for classes in MODEL_CLASSES.values()
], [])), )
parser.add_argument(
"--output_dir",
default=None,
type=str,
required=True,
help="The output directory where the model predictions and checkpoints will be written.",
)
parser.add_argument(
"--max_seq_length",
default=128,
type=int,
help="The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded.", )
parser.add_argument(
"--learning_rate",
default=1e-4,
type=float,
help="The initial learning rate for Adam.")
parser.add_argument(
"--num_train_epochs",
default=3,
type=int,
help="Total number of training epochs to perform.", )
parser.add_argument(
"--logging_steps",
type=int,
default=100,
help="Log every X updates steps.")
parser.add_argument(
"--save_steps",
type=int,
default=100,
help="Save checkpoint every X updates steps.")
parser.add_argument(
"--batch_size",
default=32,
type=int,
help="Batch size per GPU/CPU for training.", )
parser.add_argument(
"--weight_decay",
default=0.0,
type=float,
help="Weight decay if we apply some.")
parser.add_argument(
"--warmup_steps",
default=0,
type=int,
help="Linear warmup over warmup_steps. If > 0: Override warmup_proportion"
)
parser.add_argument(
"--warmup_proportion",
default=0.,
type=float,
help="Linear warmup proportion over total steps.")
parser.add_argument(
"--adam_epsilon",
default=1e-6,
type=float,
help="Epsilon for Adam optimizer.")
parser.add_argument(
"--max_steps",
default=-1,
type=int,
help="If > 0: set total number of training steps to perform. Override num_train_epochs.",
)
parser.add_argument(
"--seed", default=42, type=int, help="random seed for initialization")
parser.add_argument(
"--n_cards",
default=1,
type=int,
help="Number cards for the training, only support multi cards in the gpu."
)
parser.add_argument(
"--select_device",
type=str,
default="gpu",
help="Device for selecting for the training.")
parser.add_argument(
"--use_amp",
type=distutils.util.strtobool,
default=False,
help="Enable mixed precision training.")
parser.add_argument(
"--scale_loss",
type=float,
default=2**15,
help="The value of scale_loss for fp16.")
args = parser.parse_args()
return args
def set_seed(args):
# Use the same data seed(for data shuffle) for all procs to guarantee data
# consistency after sharding.
random.seed(args.seed)
np.random.seed(args.seed)
# Maybe different op seeds(for dropout) for different procs is better. By:
# `paddle.seed(args.seed + paddle.distributed.get_rank())`
paddle.seed(args.seed)
def evaluate(model, loss_fct, metric, data_loader):
model.eval()
metric.reset()
for batch in data_loader:
input_ids, segment_ids, labels = batch
logits = model(input_ids, segment_ids)
loss = loss_fct(logits, labels)
correct = metric.compute(logits, labels)
metric.update(correct)
res = metric.accumulate()
if isinstance(metric, AccuracyAndF1):
logger.info(
"eval loss: %f, acc: %s, precision: %s, recall: %s, f1: %s, acc and f1: %s."
% (loss.numpy(), res[0], res[1], res[2], res[3], res[4]))
elif isinstance(metric, Mcc):
logger.info("eval loss: %f, mcc: %s." % (loss.numpy(), res[0]))
elif isinstance(metric, PearsonAndSpearman):
logger.info(
"eval loss: %f, pearson: %s, spearman: %s, pearson and spearman: %s."
% (loss.numpy(), res[0], res[1], res[2]))
else:
logger.info("eval loss: %f, acc: %s." % (loss.numpy(), res))
model.train()
def convert_example(example,
tokenizer,
label_list,
max_seq_length=512,
is_test=False):
"""convert a glue example into necessary features"""
def _truncate_seqs(seqs, max_seq_length):
if len(seqs) == 1: # single sentence
# Account for [CLS] and [SEP] with "- 2"
seqs[0] = seqs[0][0:(max_seq_length - 2)]
else: # Sentence pair
# Account for [CLS], [SEP], [SEP] with "- 3"
tokens_a, tokens_b = seqs
max_seq_length -= 3
while True: # Truncate with longest_first strategy
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_seq_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
return seqs
def _concat_seqs(seqs, separators, seq_mask=0, separator_mask=1):
concat = sum((seq + sep for sep, seq in zip(separators, seqs)), [])
segment_ids = sum(
([i] * (len(seq) + len(sep))
for i, (sep, seq) in enumerate(zip(separators, seqs))), [])
if isinstance(seq_mask, int):
seq_mask = [[seq_mask] * len(seq) for seq in seqs]
if isinstance(separator_mask, int):
separator_mask = [[separator_mask] * len(sep) for sep in separators]
p_mask = sum((s_mask + mask
for sep, seq, s_mask, mask in zip(
separators, seqs, seq_mask, separator_mask)), [])
return concat, segment_ids, p_mask
if not is_test:
# `label_list == None` is for regression task
label_dtype = "int64" if label_list else "float32"
# Get the label
label = example[-1]
example = example[:-1]
# Create label maps if classification task
if label_list:
label_map = {}
for (i, l) in enumerate(label_list):
label_map[l] = i
label = label_map[label]
label = np.array([label], dtype=label_dtype)
# Tokenize raw text
tokens_raw = [tokenizer(l) for l in example]
# Truncate to the truncate_length,
tokens_trun = _truncate_seqs(tokens_raw, max_seq_length)
# Concate the sequences with special tokens
tokens_trun[0] = [tokenizer.cls_token] + tokens_trun[0]
tokens, segment_ids, _ = _concat_seqs(tokens_trun, [[tokenizer.sep_token]] *
len(tokens_trun))
# Convert the token to ids
input_ids = tokenizer.convert_tokens_to_ids(tokens)
valid_length = len(input_ids)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
# input_mask = [1] * len(input_ids)
if not is_test:
return input_ids, segment_ids, valid_length, label
else:
return input_ids, segment_ids, valid_length
def do_train(args):
paddle.set_device(args.select_device)
if paddle.distributed.get_world_size() > 1:
paddle.distributed.init_parallel_env()
set_seed(args)
args.task_name = args.task_name.lower()
dataset_class, metric_class = TASK_CLASSES[args.task_name]
args.model_type = args.model_type.lower()
model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
train_dataset = dataset_class.get_datasets(["train"])
tokenizer = tokenizer_class.from_pretrained(args.model_name_or_path)
trans_func = partial(
convert_example,
tokenizer=tokenizer,
label_list=train_dataset.get_labels(),
max_seq_length=args.max_seq_length)
train_dataset = train_dataset.apply(trans_func, lazy=True)
train_batch_sampler = paddle.io.DistributedBatchSampler(
train_dataset, batch_size=args.batch_size, shuffle=True)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=tokenizer.pad_token_id), # input
Pad(axis=0, pad_val=tokenizer.pad_token_id), # segment
Stack(), # length
Stack(dtype="int64" if train_dataset.get_labels() else "float32") # label
): [data for i, data in enumerate(fn(samples)) if i != 2]
train_data_loader = DataLoader(
dataset=train_dataset,
batch_sampler=train_batch_sampler,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
if args.task_name == "mnli":
dev_dataset_matched, dev_dataset_mismatched = dataset_class.get_datasets(
["dev_matched", "dev_mismatched"])
dev_dataset_matched = dev_dataset_matched.apply(trans_func, lazy=True)
dev_dataset_mismatched = dev_dataset_mismatched.apply(
trans_func, lazy=True)
dev_batch_sampler_matched = paddle.io.BatchSampler(
dev_dataset_matched, batch_size=args.batch_size, shuffle=False)
dev_data_loader_matched = DataLoader(
dataset=dev_dataset_matched,
batch_sampler=dev_batch_sampler_matched,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
dev_batch_sampler_mismatched = paddle.io.BatchSampler(
dev_dataset_mismatched, batch_size=args.batch_size, shuffle=False)
dev_data_loader_mismatched = DataLoader(
dataset=dev_dataset_mismatched,
batch_sampler=dev_batch_sampler_mismatched,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
else:
dev_dataset = dataset_class.get_datasets(["dev"])
dev_dataset = dev_dataset.apply(trans_func, lazy=True)
dev_batch_sampler = paddle.io.BatchSampler(
dev_dataset, batch_size=args.batch_size, shuffle=False)
dev_data_loader = DataLoader(
dataset=dev_dataset,
batch_sampler=dev_batch_sampler,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
num_classes = 1 if train_dataset.get_labels() == None else len(
train_dataset.get_labels())
model = model_class.from_pretrained(
args.model_name_or_path, num_classes=num_classes)
if paddle.distributed.get_world_size() > 1:
model = paddle.DataParallel(model)
num_training_steps = args.max_steps if args.max_steps > 0 else (
len(train_data_loader) * args.num_train_epochs)
warmup = args.warmup_steps if args.warmup_steps > 0 else args.warmup_proportion
lr_scheduler = LinearDecayWithWarmup(args.learning_rate, num_training_steps,
warmup)
optimizer = paddle.optimizer.AdamW(
learning_rate=lr_scheduler,
beta1=0.9,
beta2=0.999,
epsilon=args.adam_epsilon,
parameters=model.parameters(),
weight_decay=args.weight_decay,
apply_decay_param_fun=lambda x: x in [
p.name for n, p in model.named_parameters()
if not any(nd in n for nd in ["bias", "norm"])
])
loss_fct = paddle.nn.loss.CrossEntropyLoss() if train_dataset.get_labels(
) else paddle.nn.loss.MSELoss()
metric = metric_class()
if args.use_amp:
scaler = paddle.amp.GradScaler(init_loss_scaling=args.scale_loss)
global_step = 0
tic_train = time.time()
for epoch in range(args.num_train_epochs):
for step, batch in enumerate(train_data_loader):
global_step += 1
input_ids, segment_ids, labels = batch
with paddle.amp.auto_cast(
args.use_amp,
custom_white_list=["layer_norm", "softmax", "gelu"]):
logits = model(input_ids, segment_ids)
loss = loss_fct(logits, labels)
if args.use_amp:
scaler.scale(loss).backward()
scaler.minimize(optimizer, loss)
else:
loss.backward()
optimizer.step()
lr_scheduler.step()
optimizer.clear_gradients()
if global_step % args.logging_steps == 0:
logger.info(
"global step %d/%d, epoch: %d, batch: %d, rank_id: %s, loss: %f, lr: %.10f, speed: %.4f step/s"
% (global_step, num_training_steps, epoch, step,
paddle.distributed.get_rank(), loss, optimizer.get_lr(),
args.logging_steps / (time.time() - tic_train)))
tic_train = time.time()
if global_step % args.save_steps == 0:
tic_eval = time.time()
if args.task_name == "mnli":
evaluate(model, loss_fct, metric, dev_data_loader_matched)
evaluate(model, loss_fct, metric,
dev_data_loader_mismatched)
logger.info("eval done total : %s s" %
(time.time() - tic_eval))
else:
evaluate(model, loss_fct, metric, dev_data_loader)
logger.info("eval done total : %s s" %
(time.time() - tic_eval))
if (not args.n_cards > 1) or paddle.distributed.get_rank() == 0:
output_dir = os.path.join(args.output_dir,
"%s_ft_model_%d.pdparams" %
(args.task_name, global_step))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Need better way to get inner model of DataParallel
model_to_save = model._layers if isinstance(
model, paddle.DataParallel) else model
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
def print_arguments(args):
"""print arguments"""
print('----------- Configuration Arguments -----------')
for arg, value in sorted(vars(args).items()):
print('%s: %s' % (arg, value))
print('------------------------------------------------')
if __name__ == "__main__":
args = parse_args()
print_arguments(args)
if args.n_cards > 1 and args.select_device == "gpu":
paddle.distributed.spawn(do_train, args=(args, ), nprocs=args.n_cards)
else:
do_train(args)
| 38.037199 | 115 | 0.611805 |
import argparse
import logging
import os
import sys
import random
import time
import math
import distutils.util
from functools import partial
import numpy as np
import paddle
from paddle.io import DataLoader
from paddle.metric import Metric, Accuracy, Precision, Recall
from paddlenlp.datasets import GlueCoLA, GlueSST2, GlueMRPC, GlueSTSB, GlueQQP, GlueMNLI, GlueQNLI, GlueRTE
from paddlenlp.data import Stack, Tuple, Pad
from paddlenlp.transformers import BertForSequenceClassification, BertTokenizer
from paddlenlp.transformers import ElectraForSequenceClassification, ElectraTokenizer
from paddlenlp.transformers import ErnieForSequenceClassification, ErnieTokenizer
from paddlenlp.transformers import LinearDecayWithWarmup
from paddlenlp.metrics import AccuracyAndF1, Mcc, PearsonAndSpearman
FORMAT = '%(asctime)s-%(levelname)s: %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
logger = logging.getLogger(__name__)
TASK_CLASSES = {
"cola": (GlueCoLA, Mcc),
"sst-2": (GlueSST2, Accuracy),
"mrpc": (GlueMRPC, AccuracyAndF1),
"sts-b": (GlueSTSB, PearsonAndSpearman),
"qqp": (GlueQQP, AccuracyAndF1),
"mnli": (GlueMNLI, Accuracy),
"qnli": (GlueQNLI, Accuracy),
"rte": (GlueRTE, Accuracy),
}
MODEL_CLASSES = {
"bert": (BertForSequenceClassification, BertTokenizer),
"ernie": (ErnieForSequenceClassification, ErnieTokenizer)
}
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--task_name",
default=None,
type=str,
required=True,
help="The name of the task to train selected in the list: " +
", ".join(TASK_CLASSES.keys()), )
parser.add_argument(
"--model_type",
default=None,
type=str,
required=True,
help="Model type selected in the list: " +
", ".join(MODEL_CLASSES.keys()), )
parser.add_argument(
"--model_name_or_path",
default=None,
type=str,
required=True,
help="Path to pre-trained model or shortcut name selected in the list: "
+ ", ".join(
sum([
list(classes[-1].pretrained_init_configuration.keys())
for classes in MODEL_CLASSES.values()
], [])), )
parser.add_argument(
"--output_dir",
default=None,
type=str,
required=True,
help="The output directory where the model predictions and checkpoints will be written.",
)
parser.add_argument(
"--max_seq_length",
default=128,
type=int,
help="The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded.", )
parser.add_argument(
"--learning_rate",
default=1e-4,
type=float,
help="The initial learning rate for Adam.")
parser.add_argument(
"--num_train_epochs",
default=3,
type=int,
help="Total number of training epochs to perform.", )
parser.add_argument(
"--logging_steps",
type=int,
default=100,
help="Log every X updates steps.")
parser.add_argument(
"--save_steps",
type=int,
default=100,
help="Save checkpoint every X updates steps.")
parser.add_argument(
"--batch_size",
default=32,
type=int,
help="Batch size per GPU/CPU for training.", )
parser.add_argument(
"--weight_decay",
default=0.0,
type=float,
help="Weight decay if we apply some.")
parser.add_argument(
"--warmup_steps",
default=0,
type=int,
help="Linear warmup over warmup_steps. If > 0: Override warmup_proportion"
)
parser.add_argument(
"--warmup_proportion",
default=0.,
type=float,
help="Linear warmup proportion over total steps.")
parser.add_argument(
"--adam_epsilon",
default=1e-6,
type=float,
help="Epsilon for Adam optimizer.")
parser.add_argument(
"--max_steps",
default=-1,
type=int,
help="If > 0: set total number of training steps to perform. Override num_train_epochs.",
)
parser.add_argument(
"--seed", default=42, type=int, help="random seed for initialization")
parser.add_argument(
"--n_cards",
default=1,
type=int,
help="Number cards for the training, only support multi cards in the gpu."
)
parser.add_argument(
"--select_device",
type=str,
default="gpu",
help="Device for selecting for the training.")
parser.add_argument(
"--use_amp",
type=distutils.util.strtobool,
default=False,
help="Enable mixed precision training.")
parser.add_argument(
"--scale_loss",
type=float,
default=2**15,
help="The value of scale_loss for fp16.")
args = parser.parse_args()
return args
def set_seed(args):
random.seed(args.seed)
np.random.seed(args.seed)
paddle.seed(args.seed)
def evaluate(model, loss_fct, metric, data_loader):
model.eval()
metric.reset()
for batch in data_loader:
input_ids, segment_ids, labels = batch
logits = model(input_ids, segment_ids)
loss = loss_fct(logits, labels)
correct = metric.compute(logits, labels)
metric.update(correct)
res = metric.accumulate()
if isinstance(metric, AccuracyAndF1):
logger.info(
"eval loss: %f, acc: %s, precision: %s, recall: %s, f1: %s, acc and f1: %s."
% (loss.numpy(), res[0], res[1], res[2], res[3], res[4]))
elif isinstance(metric, Mcc):
logger.info("eval loss: %f, mcc: %s." % (loss.numpy(), res[0]))
elif isinstance(metric, PearsonAndSpearman):
logger.info(
"eval loss: %f, pearson: %s, spearman: %s, pearson and spearman: %s."
% (loss.numpy(), res[0], res[1], res[2]))
else:
logger.info("eval loss: %f, acc: %s." % (loss.numpy(), res))
model.train()
def convert_example(example,
tokenizer,
label_list,
max_seq_length=512,
is_test=False):
def _truncate_seqs(seqs, max_seq_length):
if len(seqs) == 1:
seqs[0] = seqs[0][0:(max_seq_length - 2)]
else:
tokens_a, tokens_b = seqs
max_seq_length -= 3
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_seq_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
return seqs
def _concat_seqs(seqs, separators, seq_mask=0, separator_mask=1):
concat = sum((seq + sep for sep, seq in zip(separators, seqs)), [])
segment_ids = sum(
([i] * (len(seq) + len(sep))
for i, (sep, seq) in enumerate(zip(separators, seqs))), [])
if isinstance(seq_mask, int):
seq_mask = [[seq_mask] * len(seq) for seq in seqs]
if isinstance(separator_mask, int):
separator_mask = [[separator_mask] * len(sep) for sep in separators]
p_mask = sum((s_mask + mask
for sep, seq, s_mask, mask in zip(
separators, seqs, seq_mask, separator_mask)), [])
return concat, segment_ids, p_mask
if not is_test:
label_dtype = "int64" if label_list else "float32"
label = example[-1]
example = example[:-1]
if label_list:
label_map = {}
for (i, l) in enumerate(label_list):
label_map[l] = i
label = label_map[label]
label = np.array([label], dtype=label_dtype)
tokens_raw = [tokenizer(l) for l in example]
tokens_trun = _truncate_seqs(tokens_raw, max_seq_length)
tokens_trun[0] = [tokenizer.cls_token] + tokens_trun[0]
tokens, segment_ids, _ = _concat_seqs(tokens_trun, [[tokenizer.sep_token]] *
len(tokens_trun))
input_ids = tokenizer.convert_tokens_to_ids(tokens)
valid_length = len(input_ids)
if not is_test:
return input_ids, segment_ids, valid_length, label
else:
return input_ids, segment_ids, valid_length
def do_train(args):
paddle.set_device(args.select_device)
if paddle.distributed.get_world_size() > 1:
paddle.distributed.init_parallel_env()
set_seed(args)
args.task_name = args.task_name.lower()
dataset_class, metric_class = TASK_CLASSES[args.task_name]
args.model_type = args.model_type.lower()
model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
train_dataset = dataset_class.get_datasets(["train"])
tokenizer = tokenizer_class.from_pretrained(args.model_name_or_path)
trans_func = partial(
convert_example,
tokenizer=tokenizer,
label_list=train_dataset.get_labels(),
max_seq_length=args.max_seq_length)
train_dataset = train_dataset.apply(trans_func, lazy=True)
train_batch_sampler = paddle.io.DistributedBatchSampler(
train_dataset, batch_size=args.batch_size, shuffle=True)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=tokenizer.pad_token_id),
Pad(axis=0, pad_val=tokenizer.pad_token_id),
Stack(),
Stack(dtype="int64" if train_dataset.get_labels() else "float32")
): [data for i, data in enumerate(fn(samples)) if i != 2]
train_data_loader = DataLoader(
dataset=train_dataset,
batch_sampler=train_batch_sampler,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
if args.task_name == "mnli":
dev_dataset_matched, dev_dataset_mismatched = dataset_class.get_datasets(
["dev_matched", "dev_mismatched"])
dev_dataset_matched = dev_dataset_matched.apply(trans_func, lazy=True)
dev_dataset_mismatched = dev_dataset_mismatched.apply(
trans_func, lazy=True)
dev_batch_sampler_matched = paddle.io.BatchSampler(
dev_dataset_matched, batch_size=args.batch_size, shuffle=False)
dev_data_loader_matched = DataLoader(
dataset=dev_dataset_matched,
batch_sampler=dev_batch_sampler_matched,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
dev_batch_sampler_mismatched = paddle.io.BatchSampler(
dev_dataset_mismatched, batch_size=args.batch_size, shuffle=False)
dev_data_loader_mismatched = DataLoader(
dataset=dev_dataset_mismatched,
batch_sampler=dev_batch_sampler_mismatched,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
else:
dev_dataset = dataset_class.get_datasets(["dev"])
dev_dataset = dev_dataset.apply(trans_func, lazy=True)
dev_batch_sampler = paddle.io.BatchSampler(
dev_dataset, batch_size=args.batch_size, shuffle=False)
dev_data_loader = DataLoader(
dataset=dev_dataset,
batch_sampler=dev_batch_sampler,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
num_classes = 1 if train_dataset.get_labels() == None else len(
train_dataset.get_labels())
model = model_class.from_pretrained(
args.model_name_or_path, num_classes=num_classes)
if paddle.distributed.get_world_size() > 1:
model = paddle.DataParallel(model)
num_training_steps = args.max_steps if args.max_steps > 0 else (
len(train_data_loader) * args.num_train_epochs)
warmup = args.warmup_steps if args.warmup_steps > 0 else args.warmup_proportion
lr_scheduler = LinearDecayWithWarmup(args.learning_rate, num_training_steps,
warmup)
optimizer = paddle.optimizer.AdamW(
learning_rate=lr_scheduler,
beta1=0.9,
beta2=0.999,
epsilon=args.adam_epsilon,
parameters=model.parameters(),
weight_decay=args.weight_decay,
apply_decay_param_fun=lambda x: x in [
p.name for n, p in model.named_parameters()
if not any(nd in n for nd in ["bias", "norm"])
])
loss_fct = paddle.nn.loss.CrossEntropyLoss() if train_dataset.get_labels(
) else paddle.nn.loss.MSELoss()
metric = metric_class()
if args.use_amp:
scaler = paddle.amp.GradScaler(init_loss_scaling=args.scale_loss)
global_step = 0
tic_train = time.time()
for epoch in range(args.num_train_epochs):
for step, batch in enumerate(train_data_loader):
global_step += 1
input_ids, segment_ids, labels = batch
with paddle.amp.auto_cast(
args.use_amp,
custom_white_list=["layer_norm", "softmax", "gelu"]):
logits = model(input_ids, segment_ids)
loss = loss_fct(logits, labels)
if args.use_amp:
scaler.scale(loss).backward()
scaler.minimize(optimizer, loss)
else:
loss.backward()
optimizer.step()
lr_scheduler.step()
optimizer.clear_gradients()
if global_step % args.logging_steps == 0:
logger.info(
"global step %d/%d, epoch: %d, batch: %d, rank_id: %s, loss: %f, lr: %.10f, speed: %.4f step/s"
% (global_step, num_training_steps, epoch, step,
paddle.distributed.get_rank(), loss, optimizer.get_lr(),
args.logging_steps / (time.time() - tic_train)))
tic_train = time.time()
if global_step % args.save_steps == 0:
tic_eval = time.time()
if args.task_name == "mnli":
evaluate(model, loss_fct, metric, dev_data_loader_matched)
evaluate(model, loss_fct, metric,
dev_data_loader_mismatched)
logger.info("eval done total : %s s" %
(time.time() - tic_eval))
else:
evaluate(model, loss_fct, metric, dev_data_loader)
logger.info("eval done total : %s s" %
(time.time() - tic_eval))
if (not args.n_cards > 1) or paddle.distributed.get_rank() == 0:
output_dir = os.path.join(args.output_dir,
"%s_ft_model_%d.pdparams" %
(args.task_name, global_step))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
model_to_save = model._layers if isinstance(
model, paddle.DataParallel) else model
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
def print_arguments(args):
print('----------- Configuration Arguments -----------')
for arg, value in sorted(vars(args).items()):
print('%s: %s' % (arg, value))
print('------------------------------------------------')
if __name__ == "__main__":
args = parse_args()
print_arguments(args)
if args.n_cards > 1 and args.select_device == "gpu":
paddle.distributed.spawn(do_train, args=(args, ), nprocs=args.n_cards)
else:
do_train(args)
| true | true |
f71b70742d77f2a612297f4412d6829e00b6cebd | 21,406 | py | Python | pypureclient/flasharray/FA_2_13/api/file_systems_api.py | ashahid-ps/py-pure-client | 2e3565d37b2a41db69308769f6f485d08a7c46c3 | [
"BSD-2-Clause"
] | null | null | null | pypureclient/flasharray/FA_2_13/api/file_systems_api.py | ashahid-ps/py-pure-client | 2e3565d37b2a41db69308769f6f485d08a7c46c3 | [
"BSD-2-Clause"
] | null | null | null | pypureclient/flasharray/FA_2_13/api/file_systems_api.py | ashahid-ps/py-pure-client | 2e3565d37b2a41db69308769f6f485d08a7c46c3 | [
"BSD-2-Clause"
] | null | null | null | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.13
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re
# python 2 and python 3 compatibility library
import six
from typing import List, Optional
from .. import models
class FileSystemsApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api213_file_systems_delete_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
ids=None, # type: List[str]
names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> None
"""Delete file system
Deletes a file system that has been destroyed and is pending eradication. Eradicated file systems cannot be recovered. File systems are destroyed using the PATCH method.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api213_file_systems_delete_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param list[str] ids: Performs the operation on the unique resource IDs specified. Enter multiple resource IDs in comma-separated format. The `ids` and `names` parameters cannot be provided together.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_get_with_http_info(
self,
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
destroyed=None, # type: bool
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
total_item_count=None, # type: bool
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemGetResponse
"""List file systems
Displays a list of file systems, including those pending eradication.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api213_file_systems_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param str continuation_token: A token used to retrieve the next page of data with some consistency guaranteed. The token is a Base64 encoded value. Set `continuation_token` to the system-generated token taken from the `x-next-token` header field of the response. A query has reached its last page when the response does not include a token. Pagination requires the `limit` and `continuation_token` query parameters.
:param bool destroyed: If set to `true`, lists only destroyed objects that are in the eradication pending state. If set to `false`, lists only objects that are not destroyed. For destroyed objects, the time remaining is displayed in milliseconds.
:param str filter: Narrows down the results to only the response objects that satisfy the filter criteria.
:param list[str] ids: Performs the operation on the unique resource IDs specified. Enter multiple resource IDs in comma-separated format. The `ids` and `names` parameters cannot be provided together.
:param int limit: Limits the size of the response to the specified number of objects on each page. To return the total number of resources, set `limit=0`. The total number of resources is returned as a `total_item_count` value. If the page size requested is larger than the system maximum limit, the server returns the maximum limit, disregarding the requested page size.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param int offset: The starting position based on the results of the query in relation to the full set of response objects returned.
:param list[str] sort: Returns the response objects in the order specified. Set `sort` to the name in the response by which to sort. Sorting can be performed on any of the names in the response, and the objects can be sorted in ascending or descending order. By default, the response objects are sorted in ascending order. To sort in descending order, append the minus sign (`-`) to the name. A single request can be sorted on multiple objects. For example, you can sort all volumes from largest to smallest volume size, and then sort volumes of the same size in ascending order by volume name. To sort on multiple names, list the names as comma-separated values.
:param bool total_item_count: If set to `true`, the `total_item_count` matching the specified query parameters is calculated and returned in the response. If set to `false`, the `total_item_count` is `null` in the response. This may speed up queries where the `total_item_count` is large. If not specified, defaults to `false`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api213_file_systems_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api213_file_systems_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'destroyed' in params:
query_params.append(('destroyed', params['destroyed']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
if 'total_item_count' in params:
query_params.append(('total_item_count', params['total_item_count']))
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_patch_with_http_info(
self,
file_system=None, # type: models.FileSystemPatch
authorization=None, # type: str
x_request_id=None, # type: str
ids=None, # type: List[str]
names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemResponse
"""Modify a file system
Modifies a file system. You can rename, destroy, move, or recover a file system. To rename a file system, set `name` to the new name. To destroy a file system, set `destroyed=true`. To move a file system, set 'pod' to the destination pod reference. To recover a file system that has been destroyed and is pending eradication, set `destroyed=false`.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api213_file_systems_patch_with_http_info(file_system, async_req=True)
>>> result = thread.get()
:param FileSystemPatch file_system: (required)
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param list[str] ids: Performs the operation on the unique resource IDs specified. Enter multiple resource IDs in comma-separated format. The `ids` and `names` parameters cannot be provided together.
:param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemResponse
If the method is called asynchronously,
returns the request thread.
"""
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'file_system' is set
if file_system is None:
raise TypeError("Missing the required parameter `file_system` when calling `api213_file_systems_patch`")
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
if 'file_system' in params:
body_params = params['file_system']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_post_with_http_info(
self,
names=None, # type: List[str]
authorization=None, # type: str
x_request_id=None, # type: str
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemResponse
"""Create file system
Creates one or more file systems.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api213_file_systems_post_with_http_info(names, async_req=True)
>>> result = thread.get()
:param list[str] names: Performs the operation on the unique name specified. For example, `name01`. Enter multiple names in comma-separated format. (required)
:param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`)
:param str x_request_id: Supplied by client during request or generated by server.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: FileSystemResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'names' is set
if names is None:
raise TypeError("Missing the required parameter `names` when calling `api213_file_systems_post`")
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
| 47.568889 | 671 | 0.640989 |
from __future__ import absolute_import
import re
import six
from typing import List, Optional
from .. import models
class FileSystemsApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api213_file_systems_delete_with_http_info(
self,
authorization=None,
x_request_id=None,
ids=None,
names=None,
async_req=False,
_return_http_data_only=False,
_preload_content=True,
_request_timeout=None,
):
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_get_with_http_info(
self,
authorization=None,
x_request_id=None,
continuation_token=None,
destroyed=None,
filter=None,
ids=None,
limit=None,
names=None,
offset=None,
sort=None,
total_item_count=None,
async_req=False,
_return_http_data_only=False,
_preload_content=True,
_request_timeout=None,
):
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api213_file_systems_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api213_file_systems_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'destroyed' in params:
query_params.append(('destroyed', params['destroyed']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
if 'total_item_count' in params:
query_params.append(('total_item_count', params['total_item_count']))
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_patch_with_http_info(
self,
file_system=None,
authorization=None,
x_request_id=None,
ids=None,
names=None,
async_req=False,
_return_http_data_only=False,
_preload_content=True,
_request_timeout=None,
):
if ids is not None:
if not isinstance(ids, list):
ids = [ids]
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if file_system is None:
raise TypeError("Missing the required parameter `file_system` when calling `api213_file_systems_patch`")
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('ids', params['ids']))
collection_formats['ids'] = 'csv'
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
if 'file_system' in params:
body_params = params['file_system']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api213_file_systems_post_with_http_info(
self,
names=None,
authorization=None,
x_request_id=None,
async_req=False,
_return_http_data_only=False,
_preload_content=True,
_request_timeout=None,
):
if names is not None:
if not isinstance(names, list):
names = [names]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if names is None:
raise TypeError("Missing the required parameter `names` when calling `api213_file_systems_post`")
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization']
if 'x_request_id' in params:
header_params['X-Request-ID'] = params['x_request_id']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = []
return self.api_client.call_api(
'/api/2.13/file-systems', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileSystemResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
| true | true |
f71b714d70924c77c72ecc7b8ec3e29d445e7a15 | 1,829 | py | Python | tests/test_servicer.py | yangtt0509/sea | f62bcdff00ef71e8c5b92bd5fc5f63d41b753ce2 | [
"MIT"
] | null | null | null | tests/test_servicer.py | yangtt0509/sea | f62bcdff00ef71e8c5b92bd5fc5f63d41b753ce2 | [
"MIT"
] | null | null | null | tests/test_servicer.py | yangtt0509/sea | f62bcdff00ef71e8c5b92bd5fc5f63d41b753ce2 | [
"MIT"
] | null | null | null | import grpc
from sea.servicer import ServicerMeta, msg2dict, stream2dict
from sea import exceptions
from sea.pb2 import default_pb2
from tests.wd.protos import helloworld_pb2
def test_meta_servicer(app, logstream):
class HelloContext():
def __init__(self):
self.code = None
self.details = None
def set_code(self, code):
self.code = code
def set_details(self, details):
self.details = details
class HelloServicer(metaclass=ServicerMeta):
def return_error(self, request, context):
raise exceptions.BadRequestException('error')
def return_normal(self, request, context):
return 'Got it!'
logstream.truncate(0)
logstream.seek(0)
servicer = HelloServicer()
context = HelloContext()
ret = servicer.return_error(None, context)
assert isinstance(ret, default_pb2.Empty)
assert context.code is grpc.StatusCode.INVALID_ARGUMENT
assert context.details == 'error'
p = logstream.tell()
assert p > 0
content = logstream.getvalue()
assert 'HelloServicer.return_error' in content
ret = servicer.return_normal(None, context)
assert ret == 'Got it!'
assert logstream.tell() > p
def test_msg2dict(app):
app.name = 'v-name'
app.msg = 'v-msg'
ret = msg2dict(app, ['name', 'msg', 'tz'])
assert ret == {'name': 'v-name', 'msg': 'v-msg', 'tz': 'Asia/Shanghai'}
request = helloworld_pb2.HelloRequest(name="value")
ret = msg2dict(request)
assert ret == {"name": "value"}
def test_stream2dict():
def stream_generator():
for i in range(5):
yield helloworld_pb2.HelloRequest(name=str(i))
ret = stream2dict(stream_generator())
for i, part in enumerate(ret):
assert part == {"name": str(i)}
| 25.760563 | 75 | 0.645708 | import grpc
from sea.servicer import ServicerMeta, msg2dict, stream2dict
from sea import exceptions
from sea.pb2 import default_pb2
from tests.wd.protos import helloworld_pb2
def test_meta_servicer(app, logstream):
class HelloContext():
def __init__(self):
self.code = None
self.details = None
def set_code(self, code):
self.code = code
def set_details(self, details):
self.details = details
class HelloServicer(metaclass=ServicerMeta):
def return_error(self, request, context):
raise exceptions.BadRequestException('error')
def return_normal(self, request, context):
return 'Got it!'
logstream.truncate(0)
logstream.seek(0)
servicer = HelloServicer()
context = HelloContext()
ret = servicer.return_error(None, context)
assert isinstance(ret, default_pb2.Empty)
assert context.code is grpc.StatusCode.INVALID_ARGUMENT
assert context.details == 'error'
p = logstream.tell()
assert p > 0
content = logstream.getvalue()
assert 'HelloServicer.return_error' in content
ret = servicer.return_normal(None, context)
assert ret == 'Got it!'
assert logstream.tell() > p
def test_msg2dict(app):
app.name = 'v-name'
app.msg = 'v-msg'
ret = msg2dict(app, ['name', 'msg', 'tz'])
assert ret == {'name': 'v-name', 'msg': 'v-msg', 'tz': 'Asia/Shanghai'}
request = helloworld_pb2.HelloRequest(name="value")
ret = msg2dict(request)
assert ret == {"name": "value"}
def test_stream2dict():
def stream_generator():
for i in range(5):
yield helloworld_pb2.HelloRequest(name=str(i))
ret = stream2dict(stream_generator())
for i, part in enumerate(ret):
assert part == {"name": str(i)}
| true | true |
f71b721716046fe128e3f99bbf0b9f20f56d1f2c | 22,880 | py | Python | venv/Lib/site-packages/sklearn/linear_model/_base.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 7 | 2021-01-30T17:42:00.000Z | 2022-01-09T08:08:48.000Z | venv/Lib/site-packages/sklearn/linear_model/_base.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 25 | 2020-11-16T15:36:41.000Z | 2021-06-01T05:15:31.000Z | venv/Lib/site-packages/sklearn/linear_model/_base.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 2 | 2021-09-13T17:20:56.000Z | 2021-11-21T16:05:16.000Z | """
Generalized Linear Models.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Vincent Michel <vincent.michel@inria.fr>
# Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck
# Maryan Morel <maryan.morel@polytechnique.edu>
# Giorgio Patrini <giorgio.patrini@anu.edu.au>
# License: BSD 3 clause
from abc import ABCMeta, abstractmethod
import numbers
import warnings
import numpy as np
import scipy.sparse as sp
from scipy import linalg
from scipy import optimize
from scipy import sparse
from scipy.special import expit
from joblib import Parallel
from ..base import (BaseEstimator, ClassifierMixin, RegressorMixin,
MultiOutputMixin)
from ..utils import check_array
from ..utils.validation import FLOAT_DTYPES
from ..utils.validation import _deprecate_positional_args
from ..utils import check_random_state
from ..utils.extmath import safe_sparse_dot
from ..utils.sparsefuncs import mean_variance_axis, inplace_column_scale
from ..utils.fixes import sparse_lsqr
from ..utils._seq_dataset import ArrayDataset32, CSRDataset32
from ..utils._seq_dataset import ArrayDataset64, CSRDataset64
from ..utils.validation import check_is_fitted, _check_sample_weight
from ..utils.fixes import delayed
from ..preprocessing import normalize as f_normalize
# TODO: bayesian_ridge_regression and bayesian_regression_ard
# should be squashed into its respective objects.
SPARSE_INTERCEPT_DECAY = 0.01
# For sparse data intercept updates are scaled by this decay factor to avoid
# intercept oscillation.
def make_dataset(X, y, sample_weight, random_state=None):
"""Create ``Dataset`` abstraction for sparse and dense inputs.
This also returns the ``intercept_decay`` which is different
for sparse datasets.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data
y : array-like, shape (n_samples, )
Target values.
sample_weight : numpy array of shape (n_samples,)
The weight of each sample
random_state : int, RandomState instance or None (default)
Determines random number generation for dataset shuffling and noise.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Returns
-------
dataset
The ``Dataset`` abstraction
intercept_decay
The intercept decay
"""
rng = check_random_state(random_state)
# seed should never be 0 in SequentialDataset64
seed = rng.randint(1, np.iinfo(np.int32).max)
if X.dtype == np.float32:
CSRData = CSRDataset32
ArrayData = ArrayDataset32
else:
CSRData = CSRDataset64
ArrayData = ArrayDataset64
if sp.issparse(X):
dataset = CSRData(X.data, X.indptr, X.indices, y, sample_weight,
seed=seed)
intercept_decay = SPARSE_INTERCEPT_DECAY
else:
X = np.ascontiguousarray(X)
dataset = ArrayData(X, y, sample_weight, seed=seed)
intercept_decay = 1.0
return dataset, intercept_decay
def _preprocess_data(X, y, fit_intercept, normalize=False, copy=True,
sample_weight=None, return_mean=False, check_input=True):
"""Center and scale data.
Centers data to have mean zero along axis 0. If fit_intercept=False or if
the X is a sparse matrix, no centering is done, but normalization can still
be applied. The function returns the statistics necessary to reconstruct
the input data, which are X_offset, y_offset, X_scale, such that the output
X = (X - X_offset) / X_scale
X_scale is the L2 norm of X - X_offset. If sample_weight is not None,
then the weighted mean of X and y is zero, and not the mean itself. If
return_mean=True, the mean, eventually weighted, is returned, independently
of whether X was centered (option used for optimization with sparse data in
coordinate_descend).
This is here because nearly all linear models will want their data to be
centered. This function also systematically makes y consistent with X.dtype
"""
if isinstance(sample_weight, numbers.Number):
sample_weight = None
if sample_weight is not None:
sample_weight = np.asarray(sample_weight)
if check_input:
X = check_array(X, copy=copy, accept_sparse=['csr', 'csc'],
dtype=FLOAT_DTYPES)
elif copy:
if sp.issparse(X):
X = X.copy()
else:
X = X.copy(order='K')
y = np.asarray(y, dtype=X.dtype)
if fit_intercept:
if sp.issparse(X):
X_offset, X_var = mean_variance_axis(X, axis=0)
if not return_mean:
X_offset[:] = X.dtype.type(0)
if normalize:
# TODO: f_normalize could be used here as well but the function
# inplace_csr_row_normalize_l2 must be changed such that it
# can return also the norms computed internally
# transform variance to norm in-place
X_var *= X.shape[0]
X_scale = np.sqrt(X_var, X_var)
del X_var
X_scale[X_scale == 0] = 1
inplace_column_scale(X, 1. / X_scale)
else:
X_scale = np.ones(X.shape[1], dtype=X.dtype)
else:
X_offset = np.average(X, axis=0, weights=sample_weight)
X -= X_offset
if normalize:
X, X_scale = f_normalize(X, axis=0, copy=False,
return_norm=True)
else:
X_scale = np.ones(X.shape[1], dtype=X.dtype)
y_offset = np.average(y, axis=0, weights=sample_weight)
y = y - y_offset
else:
X_offset = np.zeros(X.shape[1], dtype=X.dtype)
X_scale = np.ones(X.shape[1], dtype=X.dtype)
if y.ndim == 1:
y_offset = X.dtype.type(0)
else:
y_offset = np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_offset, y_offset, X_scale
# TODO: _rescale_data should be factored into _preprocess_data.
# Currently, the fact that sag implements its own way to deal with
# sample_weight makes the refactoring tricky.
def _rescale_data(X, y, sample_weight):
"""Rescale data sample-wise by square root of sample_weight.
For many linear models, this enables easy support for sample_weight.
Returns
-------
X_rescaled : {array-like, sparse matrix}
y_rescaled : {array-like, sparse matrix}
"""
n_samples = X.shape[0]
sample_weight = np.asarray(sample_weight)
if sample_weight.ndim == 0:
sample_weight = np.full(n_samples, sample_weight,
dtype=sample_weight.dtype)
sample_weight = np.sqrt(sample_weight)
sw_matrix = sparse.dia_matrix((sample_weight, 0),
shape=(n_samples, n_samples))
X = safe_sparse_dot(sw_matrix, X)
y = safe_sparse_dot(sw_matrix, y)
return X, y
class LinearModel(BaseEstimator, metaclass=ABCMeta):
"""Base class for Linear Models"""
@abstractmethod
def fit(self, X, y):
"""Fit model."""
def _decision_function(self, X):
check_is_fitted(self)
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
return safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
def predict(self, X):
"""
Predict using the linear model.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Samples.
Returns
-------
C : array, shape (n_samples,)
Returns predicted values.
"""
return self._decision_function(X)
_preprocess_data = staticmethod(_preprocess_data)
def _set_intercept(self, X_offset, y_offset, X_scale):
"""Set the intercept_
"""
if self.fit_intercept:
self.coef_ = self.coef_ / X_scale
self.intercept_ = y_offset - np.dot(X_offset, self.coef_.T)
else:
self.intercept_ = 0.
def _more_tags(self):
return {'requires_y': True}
# XXX Should this derive from LinearModel? It should be a mixin, not an ABC.
# Maybe the n_features checking can be moved to LinearModel.
class LinearClassifierMixin(ClassifierMixin):
"""Mixin for linear classifiers.
Handles prediction for sparse and dense X.
"""
def decision_function(self, X):
"""
Predict confidence scores for samples.
The confidence score for a sample is proportional to the signed
distance of that sample to the hyperplane.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Samples.
Returns
-------
array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes)
Confidence scores per (sample, class) combination. In the binary
case, confidence score for self.classes_[1] where >0 means this
class would be predicted.
"""
check_is_fitted(self)
X = check_array(X, accept_sparse='csr')
n_features = self.coef_.shape[1]
if X.shape[1] != n_features:
raise ValueError("X has %d features per sample; expecting %d"
% (X.shape[1], n_features))
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel() if scores.shape[1] == 1 else scores
def predict(self, X):
"""
Predict class labels for samples in X.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Samples.
Returns
-------
C : array, shape [n_samples]
Predicted class label per sample.
"""
scores = self.decision_function(X)
if len(scores.shape) == 1:
indices = (scores > 0).astype(int)
else:
indices = scores.argmax(axis=1)
return self.classes_[indices]
def _predict_proba_lr(self, X):
"""Probability estimation for OvR logistic regression.
Positive class probabilities are computed as
1. / (1. + np.exp(-self.decision_function(X)));
multiclass is handled by normalizing that over all classes.
"""
prob = self.decision_function(X)
expit(prob, out=prob)
if prob.ndim == 1:
return np.vstack([1 - prob, prob]).T
else:
# OvR normalization, like LibLinear's predict_probability
prob /= prob.sum(axis=1).reshape((prob.shape[0], -1))
return prob
class SparseCoefMixin:
"""Mixin for converting coef_ to and from CSR format.
L1-regularizing estimators should inherit this.
"""
def densify(self):
"""
Convert coefficient matrix to dense array format.
Converts the ``coef_`` member (back) to a numpy.ndarray. This is the
default format of ``coef_`` and is required for fitting, so calling
this method is only required on models that have previously been
sparsified; otherwise, it is a no-op.
Returns
-------
self
Fitted estimator.
"""
msg = "Estimator, %(name)s, must be fitted before densifying."
check_is_fitted(self, msg=msg)
if sp.issparse(self.coef_):
self.coef_ = self.coef_.toarray()
return self
def sparsify(self):
"""
Convert coefficient matrix to sparse format.
Converts the ``coef_`` member to a scipy.sparse matrix, which for
L1-regularized models can be much more memory- and storage-efficient
than the usual numpy.ndarray representation.
The ``intercept_`` member is not converted.
Returns
-------
self
Fitted estimator.
Notes
-----
For non-sparse models, i.e. when there are not many zeros in ``coef_``,
this may actually *increase* memory usage, so use this method with
care. A rule of thumb is that the number of zero elements, which can
be computed with ``(coef_ == 0).sum()``, must be more than 50% for this
to provide significant benefits.
After calling this method, further fitting with the partial_fit
method (if any) will not work until you call densify.
"""
msg = "Estimator, %(name)s, must be fitted before sparsifying."
check_is_fitted(self, msg=msg)
self.coef_ = sp.csr_matrix(self.coef_)
return self
class LinearRegression(MultiOutputMixin, RegressorMixin, LinearModel):
"""
Ordinary least squares Linear Regression.
LinearRegression fits a linear model with coefficients w = (w1, ..., wp)
to minimize the residual sum of squares between the observed targets in
the dataset, and the targets predicted by the linear approximation.
Parameters
----------
fit_intercept : bool, default=True
Whether to calculate the intercept for this model. If set
to False, no intercept will be used in calculations
(i.e. data is expected to be centered).
normalize : bool, default=False
This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
If you wish to standardize, please use
:class:`~sklearn.preprocessing.StandardScaler` before calling ``fit``
on an estimator with ``normalize=False``.
copy_X : bool, default=True
If True, X will be copied; else, it may be overwritten.
n_jobs : int, default=None
The number of jobs to use for the computation. This will only provide
speedup for n_targets > 1 and sufficient large problems.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
positive : bool, default=False
When set to ``True``, forces the coefficients to be positive. This
option is only supported for dense arrays.
.. versionadded:: 0.24
Attributes
----------
coef_ : array of shape (n_features, ) or (n_targets, n_features)
Estimated coefficients for the linear regression problem.
If multiple targets are passed during the fit (y 2D), this
is a 2D array of shape (n_targets, n_features), while if only
one target is passed, this is a 1D array of length n_features.
rank_ : int
Rank of matrix `X`. Only available when `X` is dense.
singular_ : array of shape (min(X, y),)
Singular values of `X`. Only available when `X` is dense.
intercept_ : float or array of shape (n_targets,)
Independent term in the linear model. Set to 0.0 if
`fit_intercept = False`.
See Also
--------
Ridge : Ridge regression addresses some of the
problems of Ordinary Least Squares by imposing a penalty on the
size of the coefficients with l2 regularization.
Lasso : The Lasso is a linear model that estimates
sparse coefficients with l1 regularization.
ElasticNet : Elastic-Net is a linear regression
model trained with both l1 and l2 -norm regularization of the
coefficients.
Notes
-----
From the implementation point of view, this is just plain Ordinary
Least Squares (scipy.linalg.lstsq) or Non Negative Least Squares
(scipy.optimize.nnls) wrapped as a predictor object.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import LinearRegression
>>> X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
>>> # y = 1 * x_0 + 2 * x_1 + 3
>>> y = np.dot(X, np.array([1, 2])) + 3
>>> reg = LinearRegression().fit(X, y)
>>> reg.score(X, y)
1.0
>>> reg.coef_
array([1., 2.])
>>> reg.intercept_
3.0...
>>> reg.predict(np.array([[3, 5]]))
array([16.])
"""
@_deprecate_positional_args
def __init__(self, *, fit_intercept=True, normalize=False, copy_X=True,
n_jobs=None, positive=False):
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.n_jobs = n_jobs
self.positive = positive
def fit(self, X, y, sample_weight=None):
"""
Fit linear model.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training data
y : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values. Will be cast to X's dtype if necessary
sample_weight : array-like of shape (n_samples,), default=None
Individual weights for each sample
.. versionadded:: 0.17
parameter *sample_weight* support to LinearRegression.
Returns
-------
self : returns an instance of self.
"""
n_jobs_ = self.n_jobs
accept_sparse = False if self.positive else ['csr', 'csc', 'coo']
X, y = self._validate_data(X, y, accept_sparse=accept_sparse,
y_numeric=True, multi_output=True)
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X,
dtype=X.dtype)
X, y, X_offset, y_offset, X_scale = self._preprocess_data(
X, y, fit_intercept=self.fit_intercept, normalize=self.normalize,
copy=self.copy_X, sample_weight=sample_weight,
return_mean=True)
if sample_weight is not None:
# Sample weight can be implemented via a simple rescaling.
X, y = _rescale_data(X, y, sample_weight)
if self.positive:
if y.ndim < 2:
self.coef_, self._residues = optimize.nnls(X, y)
else:
# scipy.optimize.nnls cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(optimize.nnls)(X, y[:, j])
for j in range(y.shape[1]))
self.coef_, self._residues = map(np.vstack, zip(*outs))
elif sp.issparse(X):
X_offset_scale = X_offset / X_scale
def matvec(b):
return X.dot(b) - b.dot(X_offset_scale)
def rmatvec(b):
return X.T.dot(b) - X_offset_scale * np.sum(b)
X_centered = sparse.linalg.LinearOperator(shape=X.shape,
matvec=matvec,
rmatvec=rmatvec)
if y.ndim < 2:
out = sparse_lsqr(X_centered, y)
self.coef_ = out[0]
self._residues = out[3]
else:
# sparse_lstsq cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(sparse_lsqr)(X_centered, y[:, j].ravel())
for j in range(y.shape[1]))
self.coef_ = np.vstack([out[0] for out in outs])
self._residues = np.vstack([out[3] for out in outs])
else:
self.coef_, self._residues, self.rank_, self.singular_ = \
linalg.lstsq(X, y)
self.coef_ = self.coef_.T
if y.ndim == 1:
self.coef_ = np.ravel(self.coef_)
self._set_intercept(X_offset, y_offset, X_scale)
return self
def _pre_fit(X, y, Xy, precompute, normalize, fit_intercept, copy,
check_input=True, sample_weight=None):
"""Aux function used at beginning of fit in linear models
Parameters
----------
order : 'F', 'C' or None, default=None
Whether X and y will be forced to be fortran or c-style. Only relevant
if sample_weight is not None.
"""
n_samples, n_features = X.shape
if sparse.isspmatrix(X):
# copy is not needed here as X is not modified inplace when X is sparse
precompute = False
X, y, X_offset, y_offset, X_scale = _preprocess_data(
X, y, fit_intercept=fit_intercept, normalize=normalize,
copy=False, return_mean=True, check_input=check_input)
else:
# copy was done in fit if necessary
X, y, X_offset, y_offset, X_scale = _preprocess_data(
X, y, fit_intercept=fit_intercept, normalize=normalize, copy=copy,
check_input=check_input, sample_weight=sample_weight)
if sample_weight is not None:
X, y = _rescale_data(X, y, sample_weight=sample_weight)
if hasattr(precompute, '__array__') and (
fit_intercept and not np.allclose(X_offset, np.zeros(n_features)) or
normalize and not np.allclose(X_scale, np.ones(n_features))):
warnings.warn("Gram matrix was provided but X was centered"
" to fit intercept, "
"or X was normalized : recomputing Gram matrix.",
UserWarning)
# recompute Gram
precompute = 'auto'
Xy = None
# precompute if n_samples > n_features
if isinstance(precompute, str) and precompute == 'auto':
precompute = (n_samples > n_features)
if precompute is True:
# make sure that the 'precompute' array is contiguous.
precompute = np.empty(shape=(n_features, n_features), dtype=X.dtype,
order='C')
np.dot(X.T, X, out=precompute)
if not hasattr(precompute, '__array__'):
Xy = None # cannot use Xy if precompute is not Gram
if hasattr(precompute, '__array__') and Xy is None:
common_dtype = np.find_common_type([X.dtype, y.dtype], [])
if y.ndim == 1:
# Xy is 1d, make sure it is contiguous.
Xy = np.empty(shape=n_features, dtype=common_dtype, order='C')
np.dot(X.T, y, out=Xy)
else:
# Make sure that Xy is always F contiguous even if X or y are not
# contiguous: the goal is to make it fast to extract the data for a
# specific target.
n_targets = y.shape[1]
Xy = np.empty(shape=(n_features, n_targets), dtype=common_dtype,
order='F')
np.dot(y.T, X, out=Xy.T)
return X, y, X_offset, y_offset, X_scale, precompute, Xy
| 35.583204 | 79 | 0.612981 |
from abc import ABCMeta, abstractmethod
import numbers
import warnings
import numpy as np
import scipy.sparse as sp
from scipy import linalg
from scipy import optimize
from scipy import sparse
from scipy.special import expit
from joblib import Parallel
from ..base import (BaseEstimator, ClassifierMixin, RegressorMixin,
MultiOutputMixin)
from ..utils import check_array
from ..utils.validation import FLOAT_DTYPES
from ..utils.validation import _deprecate_positional_args
from ..utils import check_random_state
from ..utils.extmath import safe_sparse_dot
from ..utils.sparsefuncs import mean_variance_axis, inplace_column_scale
from ..utils.fixes import sparse_lsqr
from ..utils._seq_dataset import ArrayDataset32, CSRDataset32
from ..utils._seq_dataset import ArrayDataset64, CSRDataset64
from ..utils.validation import check_is_fitted, _check_sample_weight
from ..utils.fixes import delayed
from ..preprocessing import normalize as f_normalize
SPARSE_INTERCEPT_DECAY = 0.01
def make_dataset(X, y, sample_weight, random_state=None):
rng = check_random_state(random_state)
seed = rng.randint(1, np.iinfo(np.int32).max)
if X.dtype == np.float32:
CSRData = CSRDataset32
ArrayData = ArrayDataset32
else:
CSRData = CSRDataset64
ArrayData = ArrayDataset64
if sp.issparse(X):
dataset = CSRData(X.data, X.indptr, X.indices, y, sample_weight,
seed=seed)
intercept_decay = SPARSE_INTERCEPT_DECAY
else:
X = np.ascontiguousarray(X)
dataset = ArrayData(X, y, sample_weight, seed=seed)
intercept_decay = 1.0
return dataset, intercept_decay
def _preprocess_data(X, y, fit_intercept, normalize=False, copy=True,
sample_weight=None, return_mean=False, check_input=True):
if isinstance(sample_weight, numbers.Number):
sample_weight = None
if sample_weight is not None:
sample_weight = np.asarray(sample_weight)
if check_input:
X = check_array(X, copy=copy, accept_sparse=['csr', 'csc'],
dtype=FLOAT_DTYPES)
elif copy:
if sp.issparse(X):
X = X.copy()
else:
X = X.copy(order='K')
y = np.asarray(y, dtype=X.dtype)
if fit_intercept:
if sp.issparse(X):
X_offset, X_var = mean_variance_axis(X, axis=0)
if not return_mean:
X_offset[:] = X.dtype.type(0)
if normalize:
X_var *= X.shape[0]
X_scale = np.sqrt(X_var, X_var)
del X_var
X_scale[X_scale == 0] = 1
inplace_column_scale(X, 1. / X_scale)
else:
X_scale = np.ones(X.shape[1], dtype=X.dtype)
else:
X_offset = np.average(X, axis=0, weights=sample_weight)
X -= X_offset
if normalize:
X, X_scale = f_normalize(X, axis=0, copy=False,
return_norm=True)
else:
X_scale = np.ones(X.shape[1], dtype=X.dtype)
y_offset = np.average(y, axis=0, weights=sample_weight)
y = y - y_offset
else:
X_offset = np.zeros(X.shape[1], dtype=X.dtype)
X_scale = np.ones(X.shape[1], dtype=X.dtype)
if y.ndim == 1:
y_offset = X.dtype.type(0)
else:
y_offset = np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_offset, y_offset, X_scale
def _rescale_data(X, y, sample_weight):
n_samples = X.shape[0]
sample_weight = np.asarray(sample_weight)
if sample_weight.ndim == 0:
sample_weight = np.full(n_samples, sample_weight,
dtype=sample_weight.dtype)
sample_weight = np.sqrt(sample_weight)
sw_matrix = sparse.dia_matrix((sample_weight, 0),
shape=(n_samples, n_samples))
X = safe_sparse_dot(sw_matrix, X)
y = safe_sparse_dot(sw_matrix, y)
return X, y
class LinearModel(BaseEstimator, metaclass=ABCMeta):
@abstractmethod
def fit(self, X, y):
def _decision_function(self, X):
check_is_fitted(self)
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
return safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
def predict(self, X):
return self._decision_function(X)
_preprocess_data = staticmethod(_preprocess_data)
def _set_intercept(self, X_offset, y_offset, X_scale):
if self.fit_intercept:
self.coef_ = self.coef_ / X_scale
self.intercept_ = y_offset - np.dot(X_offset, self.coef_.T)
else:
self.intercept_ = 0.
def _more_tags(self):
return {'requires_y': True}
class LinearClassifierMixin(ClassifierMixin):
def decision_function(self, X):
check_is_fitted(self)
X = check_array(X, accept_sparse='csr')
n_features = self.coef_.shape[1]
if X.shape[1] != n_features:
raise ValueError("X has %d features per sample; expecting %d"
% (X.shape[1], n_features))
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel() if scores.shape[1] == 1 else scores
def predict(self, X):
scores = self.decision_function(X)
if len(scores.shape) == 1:
indices = (scores > 0).astype(int)
else:
indices = scores.argmax(axis=1)
return self.classes_[indices]
def _predict_proba_lr(self, X):
prob = self.decision_function(X)
expit(prob, out=prob)
if prob.ndim == 1:
return np.vstack([1 - prob, prob]).T
else:
prob /= prob.sum(axis=1).reshape((prob.shape[0], -1))
return prob
class SparseCoefMixin:
def densify(self):
msg = "Estimator, %(name)s, must be fitted before densifying."
check_is_fitted(self, msg=msg)
if sp.issparse(self.coef_):
self.coef_ = self.coef_.toarray()
return self
def sparsify(self):
msg = "Estimator, %(name)s, must be fitted before sparsifying."
check_is_fitted(self, msg=msg)
self.coef_ = sp.csr_matrix(self.coef_)
return self
class LinearRegression(MultiOutputMixin, RegressorMixin, LinearModel):
@_deprecate_positional_args
def __init__(self, *, fit_intercept=True, normalize=False, copy_X=True,
n_jobs=None, positive=False):
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.n_jobs = n_jobs
self.positive = positive
def fit(self, X, y, sample_weight=None):
n_jobs_ = self.n_jobs
accept_sparse = False if self.positive else ['csr', 'csc', 'coo']
X, y = self._validate_data(X, y, accept_sparse=accept_sparse,
y_numeric=True, multi_output=True)
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X,
dtype=X.dtype)
X, y, X_offset, y_offset, X_scale = self._preprocess_data(
X, y, fit_intercept=self.fit_intercept, normalize=self.normalize,
copy=self.copy_X, sample_weight=sample_weight,
return_mean=True)
if sample_weight is not None:
# Sample weight can be implemented via a simple rescaling.
X, y = _rescale_data(X, y, sample_weight)
if self.positive:
if y.ndim < 2:
self.coef_, self._residues = optimize.nnls(X, y)
else:
# scipy.optimize.nnls cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(optimize.nnls)(X, y[:, j])
for j in range(y.shape[1]))
self.coef_, self._residues = map(np.vstack, zip(*outs))
elif sp.issparse(X):
X_offset_scale = X_offset / X_scale
def matvec(b):
return X.dot(b) - b.dot(X_offset_scale)
def rmatvec(b):
return X.T.dot(b) - X_offset_scale * np.sum(b)
X_centered = sparse.linalg.LinearOperator(shape=X.shape,
matvec=matvec,
rmatvec=rmatvec)
if y.ndim < 2:
out = sparse_lsqr(X_centered, y)
self.coef_ = out[0]
self._residues = out[3]
else:
# sparse_lstsq cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(sparse_lsqr)(X_centered, y[:, j].ravel())
for j in range(y.shape[1]))
self.coef_ = np.vstack([out[0] for out in outs])
self._residues = np.vstack([out[3] for out in outs])
else:
self.coef_, self._residues, self.rank_, self.singular_ = \
linalg.lstsq(X, y)
self.coef_ = self.coef_.T
if y.ndim == 1:
self.coef_ = np.ravel(self.coef_)
self._set_intercept(X_offset, y_offset, X_scale)
return self
def _pre_fit(X, y, Xy, precompute, normalize, fit_intercept, copy,
check_input=True, sample_weight=None):
n_samples, n_features = X.shape
if sparse.isspmatrix(X):
# copy is not needed here as X is not modified inplace when X is sparse
precompute = False
X, y, X_offset, y_offset, X_scale = _preprocess_data(
X, y, fit_intercept=fit_intercept, normalize=normalize,
copy=False, return_mean=True, check_input=check_input)
else:
# copy was done in fit if necessary
X, y, X_offset, y_offset, X_scale = _preprocess_data(
X, y, fit_intercept=fit_intercept, normalize=normalize, copy=copy,
check_input=check_input, sample_weight=sample_weight)
if sample_weight is not None:
X, y = _rescale_data(X, y, sample_weight=sample_weight)
if hasattr(precompute, '__array__') and (
fit_intercept and not np.allclose(X_offset, np.zeros(n_features)) or
normalize and not np.allclose(X_scale, np.ones(n_features))):
warnings.warn("Gram matrix was provided but X was centered"
" to fit intercept, "
"or X was normalized : recomputing Gram matrix.",
UserWarning)
# recompute Gram
precompute = 'auto'
Xy = None
# precompute if n_samples > n_features
if isinstance(precompute, str) and precompute == 'auto':
precompute = (n_samples > n_features)
if precompute is True:
# make sure that the 'precompute' array is contiguous.
precompute = np.empty(shape=(n_features, n_features), dtype=X.dtype,
order='C')
np.dot(X.T, X, out=precompute)
if not hasattr(precompute, '__array__'):
Xy = None # cannot use Xy if precompute is not Gram
if hasattr(precompute, '__array__') and Xy is None:
common_dtype = np.find_common_type([X.dtype, y.dtype], [])
if y.ndim == 1:
# Xy is 1d, make sure it is contiguous.
Xy = np.empty(shape=n_features, dtype=common_dtype, order='C')
np.dot(X.T, y, out=Xy)
else:
# Make sure that Xy is always F contiguous even if X or y are not
# contiguous: the goal is to make it fast to extract the data for a
# specific target.
n_targets = y.shape[1]
Xy = np.empty(shape=(n_features, n_targets), dtype=common_dtype,
order='F')
np.dot(y.T, X, out=Xy.T)
return X, y, X_offset, y_offset, X_scale, precompute, Xy
| true | true |
f71b728f6b7ee0bb3e520d6e3e1bb4a53edb161c | 595 | py | Python | website/system.py | timlyo/timlyo.github.io | fb3e3b65822351e49e3ba4ee17ba4ed5151c969a | [
"Apache-2.0"
] | 1 | 2016-01-14T13:52:25.000Z | 2016-01-14T13:52:25.000Z | website/system.py | timlyo/personalWebsite | fb3e3b65822351e49e3ba4ee17ba4ed5151c969a | [
"Apache-2.0"
] | null | null | null | website/system.py | timlyo/personalWebsite | fb3e3b65822351e49e3ba4ee17ba4ed5151c969a | [
"Apache-2.0"
] | null | null | null | import os
import psutil
COEFFICIENT = 2 ** 20
def get_other_ram() -> int:
"""Ram used by other processes"""
return get_ram_used() - get_process_ram()
def get_total_ram() -> int:
mem = psutil.virtual_memory()
return mem[0] / COEFFICIENT
def get_process_ram() -> int:
process = psutil.Process(os.getpid())
return process.memory_info()[0] / COEFFICIENT
def get_ram_used() -> int:
"""ram used by all processes"""
mem = psutil.virtual_memory()
return mem[4] / COEFFICIENT
def get_cpu() -> list:
"""get all cpu core usage"""
percentage = psutil.cpu_percent()
return percentage
| 18.030303 | 46 | 0.697479 | import os
import psutil
COEFFICIENT = 2 ** 20
def get_other_ram() -> int:
return get_ram_used() - get_process_ram()
def get_total_ram() -> int:
mem = psutil.virtual_memory()
return mem[0] / COEFFICIENT
def get_process_ram() -> int:
process = psutil.Process(os.getpid())
return process.memory_info()[0] / COEFFICIENT
def get_ram_used() -> int:
mem = psutil.virtual_memory()
return mem[4] / COEFFICIENT
def get_cpu() -> list:
percentage = psutil.cpu_percent()
return percentage
| true | true |
f71b72b888e77e3334994d29892f03d292b9f189 | 1,820 | py | Python | libweasyl/libweasyl/configuration.py | hyena/weasyl | a43ad885eb07ae89d6639f289a5b95f3a177439c | [
"Apache-2.0"
] | 1 | 2019-02-15T04:21:48.000Z | 2019-02-15T04:21:48.000Z | libweasyl/libweasyl/configuration.py | hyena/weasyl | a43ad885eb07ae89d6639f289a5b95f3a177439c | [
"Apache-2.0"
] | 254 | 2017-12-23T19:36:43.000Z | 2020-04-14T21:46:13.000Z | libweasyl/libweasyl/configuration.py | hyena/weasyl | a43ad885eb07ae89d6639f289a5b95f3a177439c | [
"Apache-2.0"
] | 1 | 2017-12-23T18:42:16.000Z | 2017-12-23T18:42:16.000Z | """
Configuration of libweasyl.
libweasyl depends on some global state to be set up in order for e.g. database
access to work correctly. This might be nicer if python had a way of
parameterizing modules, but we can't, so this is what we have. It does mean
that only one libweasyl configuration can exist in a running python process.
"""
from libweasyl.models.media import DiskMediaItem, MediaItem
from libweasyl.models.meta import BaseQuery, _configure_dbsession
from libweasyl.staff import _init_staff
def configure_libweasyl(
dbsession, not_found_exception, base_file_path,
staff_config_dict, media_link_formatter_callback):
"""
Configure libweasyl for the current application. This sets up some
global state around libweasyl.
This function can be called multiple times without issues; each call will
replace the values set by the previous call.
Parameters:
dbsession: A SQLAlchemy ``scoped_session`` instance configured for the
application's database usage.
not_found_exception: An exception to be raised on the ``*_or_404``
methods of queries.
base_file_path: The path to where static content lives on disk.
staff_config_dict: A dictionary of staff levels and user IDs.
media_link_formatter_callback: A callback to format the URL for a media
link. The callback will be called as ``callback(media_item, link)``
and is expected to return a URL or ``None`` to use the default.
"""
_configure_dbsession(dbsession)
BaseQuery._not_found_exception = staticmethod(not_found_exception)
DiskMediaItem._base_file_path = staticmethod(base_file_path)
_init_staff(**staff_config_dict)
MediaItem._media_link_formatter_callback = staticmethod(media_link_formatter_callback)
| 44.390244 | 90 | 0.752198 |
from libweasyl.models.media import DiskMediaItem, MediaItem
from libweasyl.models.meta import BaseQuery, _configure_dbsession
from libweasyl.staff import _init_staff
def configure_libweasyl(
dbsession, not_found_exception, base_file_path,
staff_config_dict, media_link_formatter_callback):
_configure_dbsession(dbsession)
BaseQuery._not_found_exception = staticmethod(not_found_exception)
DiskMediaItem._base_file_path = staticmethod(base_file_path)
_init_staff(**staff_config_dict)
MediaItem._media_link_formatter_callback = staticmethod(media_link_formatter_callback)
| true | true |
f71b731df78a211a9b978d951f533de530a3905f | 3,840 | py | Python | tensorforce/core/memories/latest.py | zysilence/tensorforce | 7539e5dde66f3a93b881006f9b7f38c926ced21b | [
"Apache-2.0"
] | 2 | 2021-11-14T12:28:24.000Z | 2022-02-14T19:23:51.000Z | tensorforce/core/memories/latest.py | zysilence/tensorforce | 7539e5dde66f3a93b881006f9b7f38c926ced21b | [
"Apache-2.0"
] | null | null | null | tensorforce/core/memories/latest.py | zysilence/tensorforce | 7539e5dde66f3a93b881006f9b7f38c926ced21b | [
"Apache-2.0"
] | 3 | 2021-03-04T17:26:43.000Z | 2021-03-04T17:27:10.000Z | # Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import tensorflow as tf
from tensorforce.core.memories import Queue
class Latest(Queue):
"""
Memory which always retrieves most recent experiences.
"""
def __init__(self, states, internals, actions, include_next_states, capacity, scope='latest', summary_labels=None):
"""
Latest memory.
Args:
states: States specifiction.
internals: Internal states specification.
actions: Actions specification.
include_next_states: Include subsequent state if true.
capacity: Memory capacity.
"""
super(Latest, self).__init__(
states=states,
internals=internals,
actions=actions,
include_next_states=include_next_states,
capacity=capacity,
scope=scope,
summary_labels=summary_labels
)
def tf_retrieve_timesteps(self, n):
num_timesteps = (self.memory_index - self.episode_indices[-1] - 2) % self.capacity + 1
n = tf.minimum(x=n, y=num_timesteps)
indices = tf.range(
start=(self.memory_index - n),
limit=self.memory_index
) % self.capacity
return self.retrieve_indices(indices=indices)
def tf_retrieve_episodes(self, n):
n = tf.minimum(x=n, y=self.episode_count)
start = self.episode_indices[self.episode_count - n - 1] + 1
limit = self.episode_indices[self.episode_count - 1] + 1
limit += tf.where(condition=(start < limit), x=0, y=self.capacity)
indices = tf.range(start=start, limit=limit) % self.capacity
return self.retrieve_indices(indices=indices)
def tf_retrieve_sequences(self, n, sequence_length):
# Remove once #128 is resolved
tf.logging.warn("Sampling sequences is not validated yet. Use timesteps or episodes instead.")
num_sequences = (self.memory_index - self.episode_indices[-1] - 2 - sequence_length + 1) % self.capacity + 1
n = tf.minimum(x=n, y=num_sequences)
indices = tf.range(
start=(self.memory_index - n - sequence_length), # or '- 1' implied in sequence length?
limit=self.memory_index
) % self.capacity
# sequence_indices = [tf.range(start=indices[n], limit=(indices[n] + sequence_length)) for k in range(n)]
# sequence_indices = [indices[k: k + sequence_length] for k in tf.unstack(value=tf.range(start=0, limit=n), num=n)]
sequence_indices = tf.expand_dims(input=tf.range(start=0, limit=n), axis=1) + tf.expand_dims(input=tf.constant(value=list(range(sequence_length))), axis=0)
sequence_indices = tf.reshape(tensor=sequence_indices, shape=(n * sequence_length,))
# sequence_indices = tf.concat(values=sequence_indices, axis=0) # tf.stack !!!!!
terminal = tf.gather(params=self.terminal_memory, indices=indices)
sequence_indices = tf.boolean_mask(tensor=sequence_indices, mask=tf.logical_not(x=terminal))
return self.retrieve_indices(indices=sequence_indices)
| 45.176471 | 163 | 0.663542 |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import tensorflow as tf
from tensorforce.core.memories import Queue
class Latest(Queue):
def __init__(self, states, internals, actions, include_next_states, capacity, scope='latest', summary_labels=None):
super(Latest, self).__init__(
states=states,
internals=internals,
actions=actions,
include_next_states=include_next_states,
capacity=capacity,
scope=scope,
summary_labels=summary_labels
)
def tf_retrieve_timesteps(self, n):
num_timesteps = (self.memory_index - self.episode_indices[-1] - 2) % self.capacity + 1
n = tf.minimum(x=n, y=num_timesteps)
indices = tf.range(
start=(self.memory_index - n),
limit=self.memory_index
) % self.capacity
return self.retrieve_indices(indices=indices)
def tf_retrieve_episodes(self, n):
n = tf.minimum(x=n, y=self.episode_count)
start = self.episode_indices[self.episode_count - n - 1] + 1
limit = self.episode_indices[self.episode_count - 1] + 1
limit += tf.where(condition=(start < limit), x=0, y=self.capacity)
indices = tf.range(start=start, limit=limit) % self.capacity
return self.retrieve_indices(indices=indices)
def tf_retrieve_sequences(self, n, sequence_length):
ing.warn("Sampling sequences is not validated yet. Use timesteps or episodes instead.")
num_sequences = (self.memory_index - self.episode_indices[-1] - 2 - sequence_length + 1) % self.capacity + 1
n = tf.minimum(x=n, y=num_sequences)
indices = tf.range(
start=(self.memory_index - n - sequence_length),
limit=self.memory_index
) % self.capacity
sequence_indices = tf.expand_dims(input=tf.range(start=0, limit=n), axis=1) + tf.expand_dims(input=tf.constant(value=list(range(sequence_length))), axis=0)
sequence_indices = tf.reshape(tensor=sequence_indices, shape=(n * sequence_length,))
l = tf.gather(params=self.terminal_memory, indices=indices)
sequence_indices = tf.boolean_mask(tensor=sequence_indices, mask=tf.logical_not(x=terminal))
return self.retrieve_indices(indices=sequence_indices)
| true | true |
f71b73477d2f539f36cc389b2a439621a3f79453 | 18,562 | py | Python | osa/scripts/provprocess.py | gae-ucm/LSTOSA | d44df4dc1daa87f57d95272014f05908d2c9a211 | [
"BSD-3-Clause"
] | 2 | 2022-02-21T17:45:38.000Z | 2022-03-25T11:48:52.000Z | osa/scripts/provprocess.py | gae-ucm/LSTOSA | d44df4dc1daa87f57d95272014f05908d2c9a211 | [
"BSD-3-Clause"
] | 79 | 2021-12-02T10:37:42.000Z | 2022-03-29T23:56:44.000Z | osa/scripts/provprocess.py | cta-observatory/lstosa | dd7a3a4967f265217929a1271c3f9be559a122ac | [
"BSD-3-Clause"
] | 1 | 2021-11-25T09:56:12.000Z | 2021-11-25T09:56:12.000Z | #!/usr/bin/env python
"""Provenance post processing script for OSA pipeline."""
import copy
import logging
import shutil
import sys
from pathlib import Path, PurePath
import yaml
from osa.configs import options
from osa.configs.config import cfg
from osa.provenance.capture import get_activity_id, get_file_hash
from osa.provenance.io import provdoc2graph, provdoc2json, provlist2provdoc, read_prov
from osa.provenance.utils import get_log_config
from osa.utils.cliopts import provprocessparsing
from osa.utils.logging import myLogger
__all__ = ["copy_used_file", "parse_lines_log", "parse_lines_run", "produce_provenance"]
log = myLogger(logging.getLogger())
provconfig = yaml.safe_load(get_log_config())
LOG_FILENAME = provconfig["handlers"]["provHandler"]["filename"]
PROV_PREFIX = provconfig["PREFIX"]
PATH_DL1 = cfg.get("LST1", "DL1_DIR")
PATH_DL2 = cfg.get("LST1", "DL2_DIR")
def copy_used_file(src, outdir):
"""
Copy file used in process.
Parameters
----------
src
outdir
"""
# check src file exists
if not Path(src).is_file():
log.warning(f"{src} file cannot be accessed")
hash_src = get_file_hash(src, buffer="content")
filename = PurePath(src).name
destpath = Path(outdir) / filename
hash_out = ""
# get hash and new name
if destpath.exists():
hash_out = get_file_hash(str(destpath), buffer="content")
filename = filename + "_"
destpath = Path(outdir) / filename
# try copy file
if hash_src != hash_out:
try:
shutil.copyfile(src, str(destpath))
log.info(f"copying {destpath}")
except Exception as ex:
log.warning(f"could not copy {src} file into {destpath}: {ex}")
def parse_lines_log(filter_cut, calib_runs, run_number):
"""
Filter content in log file to produce a run/process wise session log.
Parameters
----------
filter_cut
calib_runs
run_number
Returns
-------
filtered
"""
filtered = []
if not filter_cut:
filter_cut = "all"
cuts = {
"calibration": ["drs4_pedestal", "calibrate_charge"],
"r0_to_dl1": ["r0_to_dl1", "dl1ab"],
"dl1_to_dl2": ["dl1_datacheck", "dl1_to_dl2"],
}
cuts["all"] = cuts["calibration"] + cuts["r0_to_dl1"] + cuts["dl1_to_dl2"]
with open(LOG_FILENAME, "r") as f:
for line in f.readlines():
ll = line.split(PROV_PREFIX)
if len(ll) != 3:
log.warning(
f"format {PROV_PREFIX} mismatch in log file {LOG_FILENAME}\n{line}"
)
continue
prov_str = ll.pop()
prov_dict = yaml.safe_load(prov_str)
keep = False
session_tag = prov_dict.get("session_tag", "0:0")
session_id = prov_dict.get("session_id", False)
tag_activity, tag_run = session_tag.split(":")
# filter by run and calib runs
if tag_run in [run_number, calib_runs]:
keep = True
# filter by activity
if tag_activity not in cuts[filter_cut]:
keep = False
# only keep first session start
if session_id and (tag_run in [run_number, calib_runs]):
keep = True
# make session starts with calibration
if session_id and filter_cut == "all" and not filtered:
prov_dict["session_id"] = f"{options.date}{run_number}"
prov_dict["name"] = run_number
prov_dict["observation_run"] = run_number
line = f"{ll[0]}{PROV_PREFIX}{ll[1]}{PROV_PREFIX}{prov_dict}\n"
# remove parallel sessions
if session_id and filtered:
keep = False
if keep:
filtered.append(line)
return filtered
def parse_lines_run(filter_step, prov_lines, out):
"""
Process provenance info to reduce session at run/process wise scope.
Parameters
----------
filter_step
prov_lines
out
Returns
-------
working_lines
"""
size = 0
container = {}
working_lines = []
r0filepath_str = ""
dl1filepath_str = ""
dl2filepath_str = ""
mufilepath_str = ""
ckfilepath_str = ""
id_activity_run = ""
end_time_line = ""
osa_config_copied = False
for line in prov_lines:
# get info
remove = False
endTime = line.get("endTime", "")
session_id = line.get("session_id", "")
activity_id = line.get("activity_id", "")
filepath = line.get("filepath", "")
used_role = line.get("used_role", "")
generated_role = line.get("generated_role", "")
parameters = line.get("parameters", "")
name = line.get("name", "")
content_type = line.get("contentType", "")
used_id = line.get("used_id", "")
osa_cfg = line.get("config_file", "")
# filter grain
session_tag = line.get("session_tag", "0:0")
tag_activity, _ = session_tag.split(":")
if tag_activity != filter_step and not session_id:
continue
# remove subruns info
if name == "DL1CheckSubrunDataset":
ckfilepath_str = filepath
elif name == "DL1SubrunDataset":
dl1filepath_str = filepath
elif name == "DL2SubrunDataset":
dl2filepath_str = filepath
elif name == "MuonsSubrunDataset":
mufilepath_str = filepath
elif name == "R0SubrunDataset":
r0filepath_str = filepath
if "Subrun" in name or "subrun" in used_role or "subrun" in generated_role:
remove = True
if parameters and "ObservationSubRun" in parameters:
del line["parameters"]["ObservationSubRun"]
# remove sub-runs activities and info
if name == filter_step and not id_activity_run:
id_activity_run = get_activity_id()
if name in container or used_id in container:
remove = True
if parameters and "parameters" in container:
remove = True
if name:
container[name] = True
if used_id:
container[used_id] = True
if parameters:
container["parameters"] = True
if endTime:
remove = True
end_time_line = line
size += 1
# remove duplicated produced files
if generated_role in container:
remove = True
if name == "DL2MergedFile":
container[name] = True
if "merged" in generated_role:
container[generated_role] = True
if name == "DL1CheckHDF5File":
container[name] = True
if "DL1Check HDF5 file" in generated_role:
container[generated_role] = True
if name == "DL1CheckPDFFile":
container[name] = True
if "DL1Check PDF file" in generated_role:
container[generated_role] = True
# replace with new run-wise activity_id
if activity_id:
line["activity_id"] = id_activity_run
# copy used files not subruns not RFs not mergedDL2
if (
filepath
and content_type != "application/x-spss-sav"
and name != "DL2MergedFile"
and not name.startswith("DL1Check")
and not remove
):
copy_used_file(filepath, out)
if session_id and osa_cfg and not osa_config_copied:
copy_used_file(osa_cfg, out)
osa_config_copied = True
if not remove:
working_lines.append(line)
# append collections used and generated at endtime line of last activity
if end_time_line:
working_lines.append(end_time_line)
if r0filepath_str and filter_step == "r0_to_dl1":
r0_entity_id = get_file_hash(r0filepath_str + "r0", buffer="path")
r0filepath_str = r0filepath_str.replace(PurePath(r0filepath_str).name, "")
used = {"entity_id": r0_entity_id}
used.update({"name": "R0Collection"})
used.update({"type": "SetCollection"})
used.update({"size": size})
used.update({"filepath": r0filepath_str})
working_lines.append(used)
used = {"activity_id": id_activity_run}
used.update({"used_id": r0_entity_id})
used.update({"used_role": "R0 Collection"})
working_lines.append(used)
if dl1filepath_str:
dl1filepath_str = dl1filepath_str.replace(PurePath(dl1filepath_str).name, "")
dl1_entity_id = get_file_hash(dl1filepath_str + "dl1", buffer="path")
dl1 = {"entity_id": dl1_entity_id}
dl1.update({"name": "DL1Collection"})
dl1.update({"type": "SetCollection"})
dl1.update({"size": size})
dl1.update({"filepath": dl1filepath_str})
working_lines.append(dl1)
if mufilepath_str:
mufilepath_str = mufilepath_str.replace(PurePath(mufilepath_str).name, "")
mu_entity_id = get_file_hash(mufilepath_str + "muons", buffer="path")
muons = {"entity_id": mu_entity_id}
muons.update({"name": "MuonsCollection"})
muons.update({"type": "SetCollection"})
muons.update({"size": size})
muons.update({"filepath": mufilepath_str})
working_lines.append(muons)
if mufilepath_str and filter_step == "r0_to_dl1":
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": mu_entity_id})
generated.update({"generated_role": "Muons Collection"})
working_lines.append(generated)
if dl1filepath_str and filter_step in ["r0_to_dl1", "dl1ab"]:
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": dl1_entity_id})
generated.update({"generated_role": "DL1 Collection"})
working_lines.append(generated)
if dl1filepath_str and filter_step in ["dl1_to_dl2", "dl1ab"]:
used = {"activity_id": id_activity_run}
used.update({"used_id": dl1_entity_id})
used.update({"used_role": "DL1 Collection"})
working_lines.append(used)
if dl1filepath_str and filter_step == "dl1_datacheck":
used = {"activity_id": id_activity_run}
used.update({"used_id": dl1_entity_id})
used.update({"used_role": "DL1 Collection"})
working_lines.append(used)
if mufilepath_str and filter_step == "dl1_datacheck":
used = {"activity_id": id_activity_run}
used.update({"used_id": mu_entity_id})
used.update({"used_role": "Muons Collection"})
working_lines.append(used)
if ckfilepath_str and filter_step == "dl1_datacheck":
ckfilepath_str = ckfilepath_str.replace(PurePath(ckfilepath_str).name, "")
chk_entity_id = get_file_hash(ckfilepath_str + "check", buffer="path")
dl1check = {"entity_id": chk_entity_id}
dl1check.update({"name": "DL1CheckCollection"})
dl1check.update({"type": "SetCollection"})
dl1check.update({"size": size})
dl1check.update({"filepath": ckfilepath_str})
working_lines.append(dl1check)
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": chk_entity_id})
generated.update({"generated_role": "DL1Checks Collection"})
working_lines.append(generated)
if dl2filepath_str and filter_step == "dl1_to_dl2":
dl2_entity_id = get_file_hash(dl2filepath_str + "dl2", buffer="path")
dl2filepath_str = dl2filepath_str.replace(PurePath(dl2filepath_str).name, "")
used = {"entity_id": dl2_entity_id}
used.update({"name": "DL2Collection"})
used.update({"type": "SetCollection"})
used.update({"size": size})
used.update({"filepath": dl2filepath_str})
working_lines.append(used)
used = {"activity_id": id_activity_run}
used.update({"generated_id": dl2_entity_id})
used.update({"generated_role": "DL2 Collection"})
working_lines.append(used)
else:
working_lines = []
return working_lines
def define_paths(grain, start_path, end_path, base_filename):
"""Define target folders according to granularity."""
paths = {}
# check destination folder exists
step_path = Path(start_path) / options.date / options.prod_id / end_path
if not step_path.exists():
log.error(f"Path {step_path} does not exist")
# make folder log/ if does not exist
paths["out_path"] = step_path / "log"
paths["out_path"].mkdir(parents=True, exist_ok=True)
# define paths for prov products
paths["log_path"] = paths["out_path"] / f"{grain}_{base_filename}.log"
paths["json_filepath"] = paths["out_path"] / f"{grain}_{base_filename}.json"
paths["graph_filepath"] = paths["out_path"] / f"{grain}_{base_filename}.pdf"
return paths
def produce_provenance_files(processed_lines, paths):
"""Create provenance products as JSON logs and graphs."""
with open(paths["log_path"], "w") as f:
for line in processed_lines:
f.write(f"{line}\n")
log.info(f"creating {paths['log_path']}")
provdoc = provlist2provdoc(processed_lines)
# make json
try:
provdoc2json(provdoc, str(paths["json_filepath"]))
log.info(f"creating {paths['json_filepath']}")
except Exception as ex:
log.exception(f"problem while creating json: {ex}")
# make graph
try:
provdoc2graph(provdoc, str(paths["graph_filepath"]), "pdf")
log.info(f"creating {paths['graph_filepath']}")
except Exception as ex:
log.exception(f"problem while creating graph: {ex}")
def produce_provenance(session_log_filename, base_filename):
"""
Create run-wise provenance products as JSON logs
and graphs according to granularity.
"""
if options.filter == "calibration" or not options.filter:
paths_calibration = define_paths(
"calibration_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
plines_drs4 = parse_lines_run(
"drs4_pedestal",
read_prov(filename=session_log_filename),
str(paths_calibration["out_path"]),
)
plines_calib = parse_lines_run(
"calibrate_charge",
read_prov(filename=session_log_filename),
str(paths_calibration["out_path"]),
)
calibration_lines = plines_drs4 + plines_calib[1:]
# TODO
# create calibration prov files only if filtering
if options.filter == "calibration":
pass
if options.filter == "r0_to_dl1" or not options.filter:
paths_r0_dl1 = define_paths(
"r0_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
plines_r0 = parse_lines_run(
"r0_to_dl1",
read_prov(filename=session_log_filename),
str(paths_r0_dl1["out_path"]),
)
plines_ab = parse_lines_run(
"dl1ab",
read_prov(filename=session_log_filename),
str(paths_r0_dl1["out_path"]),
)
dl1_lines = plines_r0 + plines_ab[1:]
# create r0_to_dl1 prov files only if filtering
if options.filter == "r0_to_dl1":
produce_provenance_files(plines_r0 + plines_ab[1:], paths_r0_dl1)
if options.filter == "dl1_to_dl2" or not options.filter:
paths_dl1_dl2 = define_paths(
"dl1_to_dl2", PATH_DL2, options.dl2_prod_id, base_filename
)
plines_check = parse_lines_run(
"dl1_datacheck",
read_prov(filename=session_log_filename),
str(paths_dl1_dl2["out_path"]),
)
plines_dl2 = parse_lines_run(
"dl1_to_dl2",
read_prov(filename=session_log_filename),
str(paths_dl1_dl2["out_path"]),
)
dl1_dl2_lines = plines_check + plines_dl2[1:]
# create dl1_to_dl2 prov files only if filtering
if options.filter == "dl1_to_dl2":
produce_provenance_files(plines_check + plines_dl2[1:], paths_dl1_dl2)
# create calibration_to_dl1 and calibration_to_dl2 prov files
if not options.filter:
calibration_to_dl1 = define_paths(
"calibration_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
calibration_to_dl2 = define_paths(
"calibration_to_dl2", PATH_DL2, options.dl2_prod_id, base_filename
)
calibration_to_dl1_lines = calibration_lines + dl1_lines[1:]
lines_dl1 = copy.deepcopy(calibration_to_dl1_lines)
calibration_to_dl2_lines = calibration_to_dl1_lines + dl1_dl2_lines[1:]
lines_dl2 = copy.deepcopy(calibration_to_dl2_lines)
produce_provenance_files(lines_dl1, calibration_to_dl1)
produce_provenance_files(lines_dl2, calibration_to_dl2)
def main():
"""Extract the provenance information."""
provprocessparsing()
# Logging
if options.verbose:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
# check LOG_FILENAME exists
if not Path(LOG_FILENAME).exists():
log.error(f"file {LOG_FILENAME} does not exist")
# check LOG_FILENAME is not empty
if not Path(LOG_FILENAME).stat().st_size:
log.warning(f"file {LOG_FILENAME} is empty")
sys.exit(1)
# build base_filename
base_filename = f"{options.run}_prov"
session_log_filename = f"{base_filename}.log"
# parse LOG_FILENAME content for a specific run / process
calib_runs = f"{options.drs4_pedestal_run_id}-{options.pedcal_run_id}"
parsed_content = parse_lines_log(options.filter, calib_runs, options.run)
# create temporal session log file
with open(session_log_filename, "w") as f:
for line in parsed_content:
f.write(line)
try:
# create run-wise JSON logs and graphs for each
produce_provenance(session_log_filename, base_filename)
finally:
# remove temporal session log file
remove_session_log_file = Path(session_log_filename)
remove_session_log_file.unlink()
# remove LOG_FILENAME
if options.quit:
remove_log_file = Path(LOG_FILENAME)
remove_log_file.unlink()
if __name__ == "__main__":
main()
| 36.396078 | 89 | 0.619815 |
import copy
import logging
import shutil
import sys
from pathlib import Path, PurePath
import yaml
from osa.configs import options
from osa.configs.config import cfg
from osa.provenance.capture import get_activity_id, get_file_hash
from osa.provenance.io import provdoc2graph, provdoc2json, provlist2provdoc, read_prov
from osa.provenance.utils import get_log_config
from osa.utils.cliopts import provprocessparsing
from osa.utils.logging import myLogger
__all__ = ["copy_used_file", "parse_lines_log", "parse_lines_run", "produce_provenance"]
log = myLogger(logging.getLogger())
provconfig = yaml.safe_load(get_log_config())
LOG_FILENAME = provconfig["handlers"]["provHandler"]["filename"]
PROV_PREFIX = provconfig["PREFIX"]
PATH_DL1 = cfg.get("LST1", "DL1_DIR")
PATH_DL2 = cfg.get("LST1", "DL2_DIR")
def copy_used_file(src, outdir):
if not Path(src).is_file():
log.warning(f"{src} file cannot be accessed")
hash_src = get_file_hash(src, buffer="content")
filename = PurePath(src).name
destpath = Path(outdir) / filename
hash_out = ""
if destpath.exists():
hash_out = get_file_hash(str(destpath), buffer="content")
filename = filename + "_"
destpath = Path(outdir) / filename
if hash_src != hash_out:
try:
shutil.copyfile(src, str(destpath))
log.info(f"copying {destpath}")
except Exception as ex:
log.warning(f"could not copy {src} file into {destpath}: {ex}")
def parse_lines_log(filter_cut, calib_runs, run_number):
filtered = []
if not filter_cut:
filter_cut = "all"
cuts = {
"calibration": ["drs4_pedestal", "calibrate_charge"],
"r0_to_dl1": ["r0_to_dl1", "dl1ab"],
"dl1_to_dl2": ["dl1_datacheck", "dl1_to_dl2"],
}
cuts["all"] = cuts["calibration"] + cuts["r0_to_dl1"] + cuts["dl1_to_dl2"]
with open(LOG_FILENAME, "r") as f:
for line in f.readlines():
ll = line.split(PROV_PREFIX)
if len(ll) != 3:
log.warning(
f"format {PROV_PREFIX} mismatch in log file {LOG_FILENAME}\n{line}"
)
continue
prov_str = ll.pop()
prov_dict = yaml.safe_load(prov_str)
keep = False
session_tag = prov_dict.get("session_tag", "0:0")
session_id = prov_dict.get("session_id", False)
tag_activity, tag_run = session_tag.split(":")
if tag_run in [run_number, calib_runs]:
keep = True
if tag_activity not in cuts[filter_cut]:
keep = False
if session_id and (tag_run in [run_number, calib_runs]):
keep = True
if session_id and filter_cut == "all" and not filtered:
prov_dict["session_id"] = f"{options.date}{run_number}"
prov_dict["name"] = run_number
prov_dict["observation_run"] = run_number
line = f"{ll[0]}{PROV_PREFIX}{ll[1]}{PROV_PREFIX}{prov_dict}\n"
if session_id and filtered:
keep = False
if keep:
filtered.append(line)
return filtered
def parse_lines_run(filter_step, prov_lines, out):
size = 0
container = {}
working_lines = []
r0filepath_str = ""
dl1filepath_str = ""
dl2filepath_str = ""
mufilepath_str = ""
ckfilepath_str = ""
id_activity_run = ""
end_time_line = ""
osa_config_copied = False
for line in prov_lines:
remove = False
endTime = line.get("endTime", "")
session_id = line.get("session_id", "")
activity_id = line.get("activity_id", "")
filepath = line.get("filepath", "")
used_role = line.get("used_role", "")
generated_role = line.get("generated_role", "")
parameters = line.get("parameters", "")
name = line.get("name", "")
content_type = line.get("contentType", "")
used_id = line.get("used_id", "")
osa_cfg = line.get("config_file", "")
session_tag = line.get("session_tag", "0:0")
tag_activity, _ = session_tag.split(":")
if tag_activity != filter_step and not session_id:
continue
if name == "DL1CheckSubrunDataset":
ckfilepath_str = filepath
elif name == "DL1SubrunDataset":
dl1filepath_str = filepath
elif name == "DL2SubrunDataset":
dl2filepath_str = filepath
elif name == "MuonsSubrunDataset":
mufilepath_str = filepath
elif name == "R0SubrunDataset":
r0filepath_str = filepath
if "Subrun" in name or "subrun" in used_role or "subrun" in generated_role:
remove = True
if parameters and "ObservationSubRun" in parameters:
del line["parameters"]["ObservationSubRun"]
if name == filter_step and not id_activity_run:
id_activity_run = get_activity_id()
if name in container or used_id in container:
remove = True
if parameters and "parameters" in container:
remove = True
if name:
container[name] = True
if used_id:
container[used_id] = True
if parameters:
container["parameters"] = True
if endTime:
remove = True
end_time_line = line
size += 1
if generated_role in container:
remove = True
if name == "DL2MergedFile":
container[name] = True
if "merged" in generated_role:
container[generated_role] = True
if name == "DL1CheckHDF5File":
container[name] = True
if "DL1Check HDF5 file" in generated_role:
container[generated_role] = True
if name == "DL1CheckPDFFile":
container[name] = True
if "DL1Check PDF file" in generated_role:
container[generated_role] = True
if activity_id:
line["activity_id"] = id_activity_run
if (
filepath
and content_type != "application/x-spss-sav"
and name != "DL2MergedFile"
and not name.startswith("DL1Check")
and not remove
):
copy_used_file(filepath, out)
if session_id and osa_cfg and not osa_config_copied:
copy_used_file(osa_cfg, out)
osa_config_copied = True
if not remove:
working_lines.append(line)
if end_time_line:
working_lines.append(end_time_line)
if r0filepath_str and filter_step == "r0_to_dl1":
r0_entity_id = get_file_hash(r0filepath_str + "r0", buffer="path")
r0filepath_str = r0filepath_str.replace(PurePath(r0filepath_str).name, "")
used = {"entity_id": r0_entity_id}
used.update({"name": "R0Collection"})
used.update({"type": "SetCollection"})
used.update({"size": size})
used.update({"filepath": r0filepath_str})
working_lines.append(used)
used = {"activity_id": id_activity_run}
used.update({"used_id": r0_entity_id})
used.update({"used_role": "R0 Collection"})
working_lines.append(used)
if dl1filepath_str:
dl1filepath_str = dl1filepath_str.replace(PurePath(dl1filepath_str).name, "")
dl1_entity_id = get_file_hash(dl1filepath_str + "dl1", buffer="path")
dl1 = {"entity_id": dl1_entity_id}
dl1.update({"name": "DL1Collection"})
dl1.update({"type": "SetCollection"})
dl1.update({"size": size})
dl1.update({"filepath": dl1filepath_str})
working_lines.append(dl1)
if mufilepath_str:
mufilepath_str = mufilepath_str.replace(PurePath(mufilepath_str).name, "")
mu_entity_id = get_file_hash(mufilepath_str + "muons", buffer="path")
muons = {"entity_id": mu_entity_id}
muons.update({"name": "MuonsCollection"})
muons.update({"type": "SetCollection"})
muons.update({"size": size})
muons.update({"filepath": mufilepath_str})
working_lines.append(muons)
if mufilepath_str and filter_step == "r0_to_dl1":
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": mu_entity_id})
generated.update({"generated_role": "Muons Collection"})
working_lines.append(generated)
if dl1filepath_str and filter_step in ["r0_to_dl1", "dl1ab"]:
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": dl1_entity_id})
generated.update({"generated_role": "DL1 Collection"})
working_lines.append(generated)
if dl1filepath_str and filter_step in ["dl1_to_dl2", "dl1ab"]:
used = {"activity_id": id_activity_run}
used.update({"used_id": dl1_entity_id})
used.update({"used_role": "DL1 Collection"})
working_lines.append(used)
if dl1filepath_str and filter_step == "dl1_datacheck":
used = {"activity_id": id_activity_run}
used.update({"used_id": dl1_entity_id})
used.update({"used_role": "DL1 Collection"})
working_lines.append(used)
if mufilepath_str and filter_step == "dl1_datacheck":
used = {"activity_id": id_activity_run}
used.update({"used_id": mu_entity_id})
used.update({"used_role": "Muons Collection"})
working_lines.append(used)
if ckfilepath_str and filter_step == "dl1_datacheck":
ckfilepath_str = ckfilepath_str.replace(PurePath(ckfilepath_str).name, "")
chk_entity_id = get_file_hash(ckfilepath_str + "check", buffer="path")
dl1check = {"entity_id": chk_entity_id}
dl1check.update({"name": "DL1CheckCollection"})
dl1check.update({"type": "SetCollection"})
dl1check.update({"size": size})
dl1check.update({"filepath": ckfilepath_str})
working_lines.append(dl1check)
generated = {"activity_id": id_activity_run}
generated.update({"generated_id": chk_entity_id})
generated.update({"generated_role": "DL1Checks Collection"})
working_lines.append(generated)
if dl2filepath_str and filter_step == "dl1_to_dl2":
dl2_entity_id = get_file_hash(dl2filepath_str + "dl2", buffer="path")
dl2filepath_str = dl2filepath_str.replace(PurePath(dl2filepath_str).name, "")
used = {"entity_id": dl2_entity_id}
used.update({"name": "DL2Collection"})
used.update({"type": "SetCollection"})
used.update({"size": size})
used.update({"filepath": dl2filepath_str})
working_lines.append(used)
used = {"activity_id": id_activity_run}
used.update({"generated_id": dl2_entity_id})
used.update({"generated_role": "DL2 Collection"})
working_lines.append(used)
else:
working_lines = []
return working_lines
def define_paths(grain, start_path, end_path, base_filename):
paths = {}
step_path = Path(start_path) / options.date / options.prod_id / end_path
if not step_path.exists():
log.error(f"Path {step_path} does not exist")
paths["out_path"] = step_path / "log"
paths["out_path"].mkdir(parents=True, exist_ok=True)
paths["log_path"] = paths["out_path"] / f"{grain}_{base_filename}.log"
paths["json_filepath"] = paths["out_path"] / f"{grain}_{base_filename}.json"
paths["graph_filepath"] = paths["out_path"] / f"{grain}_{base_filename}.pdf"
return paths
def produce_provenance_files(processed_lines, paths):
with open(paths["log_path"], "w") as f:
for line in processed_lines:
f.write(f"{line}\n")
log.info(f"creating {paths['log_path']}")
provdoc = provlist2provdoc(processed_lines)
try:
provdoc2json(provdoc, str(paths["json_filepath"]))
log.info(f"creating {paths['json_filepath']}")
except Exception as ex:
log.exception(f"problem while creating json: {ex}")
try:
provdoc2graph(provdoc, str(paths["graph_filepath"]), "pdf")
log.info(f"creating {paths['graph_filepath']}")
except Exception as ex:
log.exception(f"problem while creating graph: {ex}")
def produce_provenance(session_log_filename, base_filename):
if options.filter == "calibration" or not options.filter:
paths_calibration = define_paths(
"calibration_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
plines_drs4 = parse_lines_run(
"drs4_pedestal",
read_prov(filename=session_log_filename),
str(paths_calibration["out_path"]),
)
plines_calib = parse_lines_run(
"calibrate_charge",
read_prov(filename=session_log_filename),
str(paths_calibration["out_path"]),
)
calibration_lines = plines_drs4 + plines_calib[1:]
if options.filter == "calibration":
pass
if options.filter == "r0_to_dl1" or not options.filter:
paths_r0_dl1 = define_paths(
"r0_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
plines_r0 = parse_lines_run(
"r0_to_dl1",
read_prov(filename=session_log_filename),
str(paths_r0_dl1["out_path"]),
)
plines_ab = parse_lines_run(
"dl1ab",
read_prov(filename=session_log_filename),
str(paths_r0_dl1["out_path"]),
)
dl1_lines = plines_r0 + plines_ab[1:]
if options.filter == "r0_to_dl1":
produce_provenance_files(plines_r0 + plines_ab[1:], paths_r0_dl1)
if options.filter == "dl1_to_dl2" or not options.filter:
paths_dl1_dl2 = define_paths(
"dl1_to_dl2", PATH_DL2, options.dl2_prod_id, base_filename
)
plines_check = parse_lines_run(
"dl1_datacheck",
read_prov(filename=session_log_filename),
str(paths_dl1_dl2["out_path"]),
)
plines_dl2 = parse_lines_run(
"dl1_to_dl2",
read_prov(filename=session_log_filename),
str(paths_dl1_dl2["out_path"]),
)
dl1_dl2_lines = plines_check + plines_dl2[1:]
if options.filter == "dl1_to_dl2":
produce_provenance_files(plines_check + plines_dl2[1:], paths_dl1_dl2)
if not options.filter:
calibration_to_dl1 = define_paths(
"calibration_to_dl1", PATH_DL1, options.dl1_prod_id, base_filename
)
calibration_to_dl2 = define_paths(
"calibration_to_dl2", PATH_DL2, options.dl2_prod_id, base_filename
)
calibration_to_dl1_lines = calibration_lines + dl1_lines[1:]
lines_dl1 = copy.deepcopy(calibration_to_dl1_lines)
calibration_to_dl2_lines = calibration_to_dl1_lines + dl1_dl2_lines[1:]
lines_dl2 = copy.deepcopy(calibration_to_dl2_lines)
produce_provenance_files(lines_dl1, calibration_to_dl1)
produce_provenance_files(lines_dl2, calibration_to_dl2)
def main():
provprocessparsing()
if options.verbose:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
if not Path(LOG_FILENAME).exists():
log.error(f"file {LOG_FILENAME} does not exist")
if not Path(LOG_FILENAME).stat().st_size:
log.warning(f"file {LOG_FILENAME} is empty")
sys.exit(1)
base_filename = f"{options.run}_prov"
session_log_filename = f"{base_filename}.log"
calib_runs = f"{options.drs4_pedestal_run_id}-{options.pedcal_run_id}"
parsed_content = parse_lines_log(options.filter, calib_runs, options.run)
with open(session_log_filename, "w") as f:
for line in parsed_content:
f.write(line)
try:
produce_provenance(session_log_filename, base_filename)
finally:
remove_session_log_file = Path(session_log_filename)
remove_session_log_file.unlink()
if options.quit:
remove_log_file = Path(LOG_FILENAME)
remove_log_file.unlink()
if __name__ == "__main__":
main()
| true | true |
f71b75660891063679e5531c8f4789ea15fdf36c | 17,910 | py | Python | skimage/transform/radon_transform.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | 2 | 2017-03-30T11:22:11.000Z | 2019-03-03T05:18:01.000Z | skimage/transform/radon_transform.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | 3 | 2021-03-19T14:27:58.000Z | 2022-03-12T00:42:39.000Z | skimage/transform/radon_transform.py | jjhelmus/scikit-image | b9b5fde0821fe8bcece2528b30d012c65c64ad6f | [
"BSD-3-Clause"
] | 1 | 2019-12-17T14:53:28.000Z | 2019-12-17T14:53:28.000Z | # -*- coding: utf-8 -*-
"""
radon.py - Radon and inverse radon transforms
Based on code of Justin K. Romberg
(http://www.clear.rice.edu/elec431/projects96/DSP/bpanalysis.html)
J. Gillam and Chris Griffin.
References:
-B.R. Ramesh, N. Srinivasa, K. Rajgopal, "An Algorithm for Computing
the Discrete Radon Transform With Some Applications", Proceedings of
the Fourth IEEE Region 10 International Conference, TENCON '89, 1989.
-A. C. Kak, Malcolm Slaney, "Principles of Computerized Tomographic
Imaging", IEEE Press 1988.
"""
from __future__ import division
import numpy as np
from scipy.fftpack import fft, ifft, fftfreq
from scipy.interpolate import interp1d
from ._warps_cy import _warp_fast
from ._radon_transform import sart_projection_update
from .. import util
from warnings import warn
__all__ = ["radon", "iradon", "iradon_sart"]
def radon(image, theta=None, circle=False):
"""
Calculates the radon transform of an image given specified
projection angles.
Parameters
----------
image : array_like, dtype=float
Input image. The rotation axis will be located in the pixel with
indices ``(image.shape[0] // 2, image.shape[1] // 2)``.
theta : array_like, dtype=float, optional (default np.arange(180))
Projection angles (in degrees).
circle : boolean, optional
Assume image is zero outside the inscribed circle, making the
width of each projection (the first dimension of the sinogram)
equal to ``min(image.shape)``.
Returns
-------
radon_image : ndarray
Radon transform (sinogram). The tomography rotation axis will lie
at the pixel index ``radon_image.shape[0] // 2`` along the 0th
dimension of ``radon_image``.
"""
if image.ndim != 2:
raise ValueError('The input image must be 2-D')
if theta is None:
theta = np.arange(180)
if circle:
radius = min(image.shape) // 2
c0, c1 = np.ogrid[0:image.shape[0], 0:image.shape[1]]
reconstruction_circle = ((c0 - image.shape[0] // 2) ** 2
+ (c1 - image.shape[1] // 2) ** 2)
reconstruction_circle = reconstruction_circle <= radius ** 2
if not np.all(reconstruction_circle | (image == 0)):
warn('Radon transform: image must be zero outside the '
'reconstruction circle')
# Crop image to make it square
slices = []
for d in (0, 1):
if image.shape[d] > min(image.shape):
excess = image.shape[d] - min(image.shape)
slices.append(slice(int(np.ceil(excess / 2)),
int(np.ceil(excess / 2)
+ min(image.shape))))
else:
slices.append(slice(None))
slices = tuple(slices)
padded_image = image[slices]
else:
diagonal = np.sqrt(2) * max(image.shape)
pad = [int(np.ceil(diagonal - s)) for s in image.shape]
new_center = [(s + p) // 2 for s, p in zip(image.shape, pad)]
old_center = [s // 2 for s in image.shape]
pad_before = [nc - oc for oc, nc in zip(old_center, new_center)]
pad_width = [(pb, p - pb) for pb, p in zip(pad_before, pad)]
padded_image = util.pad(image, pad_width, mode='constant',
constant_values=0)
# padded_image is always square
assert padded_image.shape[0] == padded_image.shape[1]
radon_image = np.zeros((padded_image.shape[0], len(theta)))
center = padded_image.shape[0] // 2
shift0 = np.array([[1, 0, -center],
[0, 1, -center],
[0, 0, 1]])
shift1 = np.array([[1, 0, center],
[0, 1, center],
[0, 0, 1]])
def build_rotation(theta):
T = np.deg2rad(theta)
R = np.array([[np.cos(T), np.sin(T), 0],
[-np.sin(T), np.cos(T), 0],
[0, 0, 1]])
return shift1.dot(R).dot(shift0)
for i in range(len(theta)):
rotated = _warp_fast(padded_image, build_rotation(theta[i]))
radon_image[:, i] = rotated.sum(0)
return radon_image
def _sinogram_circle_to_square(sinogram):
diagonal = int(np.ceil(np.sqrt(2) * sinogram.shape[0]))
pad = diagonal - sinogram.shape[0]
old_center = sinogram.shape[0] // 2
new_center = diagonal // 2
pad_before = new_center - old_center
pad_width = ((pad_before, pad - pad_before), (0, 0))
return util.pad(sinogram, pad_width, mode='constant', constant_values=0)
def iradon(radon_image, theta=None, output_size=None,
filter="ramp", interpolation="linear", circle=False):
"""
Inverse radon transform.
Reconstruct an image from the radon transform, using the filtered
back projection algorithm.
Parameters
----------
radon_image : array_like, dtype=float
Image containing radon transform (sinogram). Each column of
the image corresponds to a projection along a different angle. The
tomography rotation axis should lie at the pixel index
``radon_image.shape[0] // 2`` along the 0th dimension of
``radon_image``.
theta : array_like, dtype=float, optional
Reconstruction angles (in degrees). Default: m angles evenly spaced
between 0 and 180 (if the shape of `radon_image` is (N, M)).
output_size : int
Number of rows and columns in the reconstruction.
filter : str, optional (default ramp)
Filter used in frequency domain filtering. Ramp filter used by default.
Filters available: ramp, shepp-logan, cosine, hamming, hann.
Assign None to use no filter.
interpolation : str, optional (default 'linear')
Interpolation method used in reconstruction. Methods available:
'linear', 'nearest', and 'cubic' ('cubic' is slow).
circle : boolean, optional
Assume the reconstructed image is zero outside the inscribed circle.
Also changes the default output_size to match the behaviour of
``radon`` called with ``circle=True``.
Returns
-------
reconstructed : ndarray
Reconstructed image. The rotation axis will be located in the pixel
with indices
``(reconstructed.shape[0] // 2, reconstructed.shape[1] // 2)``.
Notes
-----
It applies the Fourier slice theorem to reconstruct an image by
multiplying the frequency domain of the filter with the FFT of the
projection data. This algorithm is called filtered back projection.
"""
if radon_image.ndim != 2:
raise ValueError('The input image must be 2-D')
if theta is None:
m, n = radon_image.shape
theta = np.linspace(0, 180, n, endpoint=False)
else:
theta = np.asarray(theta)
if len(theta) != radon_image.shape[1]:
raise ValueError("The given ``theta`` does not match the number of "
"projections in ``radon_image``.")
interpolation_types = ('linear', 'nearest', 'cubic')
if not interpolation in interpolation_types:
raise ValueError("Unknown interpolation: %s" % interpolation)
if not output_size:
# If output size not specified, estimate from input radon image
if circle:
output_size = radon_image.shape[0]
else:
output_size = int(np.floor(np.sqrt((radon_image.shape[0]) ** 2
/ 2.0)))
if circle:
radon_image = _sinogram_circle_to_square(radon_image)
th = (np.pi / 180.0) * theta
# resize image to next power of two (but no less than 64) for
# Fourier analysis; speeds up Fourier and lessens artifacts
projection_size_padded = \
max(64, int(2 ** np.ceil(np.log2(2 * radon_image.shape[0]))))
pad_width = ((0, projection_size_padded - radon_image.shape[0]), (0, 0))
img = util.pad(radon_image, pad_width, mode='constant', constant_values=0)
# Construct the Fourier filter
f = fftfreq(projection_size_padded).reshape(-1, 1) # digital frequency
omega = 2 * np.pi * f # angular frequency
fourier_filter = 2 * np.abs(f) # ramp filter
if filter == "ramp":
pass
elif filter == "shepp-logan":
# Start from first element to avoid divide by zero
fourier_filter[1:] = fourier_filter[1:] * np.sin(omega[1:]) / omega[1:]
elif filter == "cosine":
fourier_filter *= np.cos(omega)
elif filter == "hamming":
fourier_filter *= (0.54 + 0.46 * np.cos(omega / 2))
elif filter == "hann":
fourier_filter *= (1 + np.cos(omega / 2)) / 2
elif filter is None:
fourier_filter[:] = 1
else:
raise ValueError("Unknown filter: %s" % filter)
# Apply filter in Fourier domain
projection = fft(img, axis=0) * fourier_filter
radon_filtered = np.real(ifft(projection, axis=0))
# Resize filtered image back to original size
radon_filtered = radon_filtered[:radon_image.shape[0], :]
reconstructed = np.zeros((output_size, output_size))
# Determine the center of the projections (= center of sinogram)
mid_index = radon_image.shape[0] // 2
[X, Y] = np.mgrid[0:output_size, 0:output_size]
xpr = X - int(output_size) // 2
ypr = Y - int(output_size) // 2
# Reconstruct image by interpolation
for i in range(len(theta)):
t = ypr * np.cos(th[i]) - xpr * np.sin(th[i])
x = np.arange(radon_filtered.shape[0]) - mid_index
if interpolation == 'linear':
backprojected = np.interp(t, x, radon_filtered[:, i],
left=0, right=0)
else:
interpolant = interp1d(x, radon_filtered[:, i], kind=interpolation,
bounds_error=False, fill_value=0)
backprojected = interpolant(t)
reconstructed += backprojected
if circle:
radius = output_size // 2
reconstruction_circle = (xpr ** 2 + ypr ** 2) <= radius ** 2
reconstructed[~reconstruction_circle] = 0.
return reconstructed * np.pi / (2 * len(th))
def order_angles_golden_ratio(theta):
"""
Order angles to reduce the amount of correlated information
in subsequent projections.
Parameters
----------
theta : 1D array of floats
Projection angles in degrees. Duplicate angles are not allowed.
Returns
-------
indices_generator : generator yielding unsigned integers
The returned generator yields indices into ``theta`` such that
``theta[indices]`` gives the approximate golden ratio ordering
of the projections. In total, ``len(theta)`` indices are yielded.
All non-negative integers < ``len(theta)`` are yielded exactly once.
Notes
-----
The method used here is that of the golden ratio introduced
by T. Kohler.
References
----------
.. [1] Kohler, T. "A projection access scheme for iterative
reconstruction based on the golden section." Nuclear Science
Symposium Conference Record, 2004 IEEE. Vol. 6. IEEE, 2004.
.. [2] Winkelmann, Stefanie, et al. "An optimal radial profile order
based on the Golden Ratio for time-resolved MRI."
Medical Imaging, IEEE Transactions on 26.1 (2007): 68-76.
"""
interval = 180
def angle_distance(a, b):
difference = a - b
return min(abs(difference % interval), abs(difference % -interval))
remaining = list(np.argsort(theta)) # indices into theta
# yield an arbitrary angle to start things off
index = remaining.pop(0)
angle = theta[index]
yield index
# determine subsequent angles using the golden ratio method
angle_increment = interval * (1 - (np.sqrt(5) - 1) / 2)
while remaining:
angle = (angle + angle_increment) % interval
insert_point = np.searchsorted(theta[remaining], angle)
index_below = insert_point - 1
index_above = 0 if insert_point == len(remaining) else insert_point
distance_below = angle_distance(angle, theta[remaining[index_below]])
distance_above = angle_distance(angle, theta[remaining[index_above]])
if distance_below < distance_above:
yield remaining.pop(index_below)
else:
yield remaining.pop(index_above)
def iradon_sart(radon_image, theta=None, image=None, projection_shifts=None,
clip=None, relaxation=0.15):
"""
Inverse radon transform
Reconstruct an image from the radon transform, using a single iteration of
the Simultaneous Algebraic Reconstruction Technique (SART) algorithm.
Parameters
----------
radon_image : 2D array, dtype=float
Image containing radon transform (sinogram). Each column of
the image corresponds to a projection along a different angle. The
tomography rotation axis should lie at the pixel index
``radon_image.shape[0] // 2`` along the 0th dimension of
``radon_image``.
theta : 1D array, dtype=float, optional
Reconstruction angles (in degrees). Default: m angles evenly spaced
between 0 and 180 (if the shape of `radon_image` is (N, M)).
image : 2D array, dtype=float, optional
Image containing an initial reconstruction estimate. Shape of this
array should be ``(radon_image.shape[0], radon_image.shape[0])``. The
default is an array of zeros.
projection_shifts : 1D array, dtype=float
Shift the projections contained in ``radon_image`` (the sinogram) by
this many pixels before reconstructing the image. The i'th value
defines the shift of the i'th column of ``radon_image``.
clip : length-2 sequence of floats
Force all values in the reconstructed tomogram to lie in the range
``[clip[0], clip[1]]``
relaxation : float
Relaxation parameter for the update step. A higher value can
improve the convergence rate, but one runs the risk of instabilities.
Values close to or higher than 1 are not recommended.
Returns
-------
reconstructed : ndarray
Reconstructed image. The rotation axis will be located in the pixel
with indices
``(reconstructed.shape[0] // 2, reconstructed.shape[1] // 2)``.
Notes
-----
Algebraic Reconstruction Techniques are based on formulating the tomography
reconstruction problem as a set of linear equations. Along each ray,
the projected value is the sum of all the values of the cross section along
the ray. A typical feature of SART (and a few other variants of algebraic
techniques) is that it samples the cross section at equidistant points
along the ray, using linear interpolation between the pixel values of the
cross section. The resulting set of linear equations are then solved using
a slightly modified Kaczmarz method.
When using SART, a single iteration is usually sufficient to obtain a good
reconstruction. Further iterations will tend to enhance high-frequency
information, but will also often increase the noise.
References
----------
.. [1] AC Kak, M Slaney, "Principles of Computerized Tomographic
Imaging", IEEE Press 1988.
.. [2] AH Andersen, AC Kak, "Simultaneous algebraic reconstruction
technique (SART): a superior implementation of the ART algorithm",
Ultrasonic Imaging 6 pp 81--94 (1984)
.. [3] S Kaczmarz, "Angenäherte auflösung von systemen linearer
gleichungen", Bulletin International de l’Academie Polonaise des
Sciences et des Lettres 35 pp 355--357 (1937)
.. [4] Kohler, T. "A projection access scheme for iterative
reconstruction based on the golden section." Nuclear Science
Symposium Conference Record, 2004 IEEE. Vol. 6. IEEE, 2004.
.. [5] Kaczmarz' method, Wikipedia,
http://en.wikipedia.org/wiki/Kaczmarz_method
"""
if radon_image.ndim != 2:
raise ValueError('radon_image must be two dimensional')
reconstructed_shape = (radon_image.shape[0], radon_image.shape[0])
if theta is None:
theta = np.linspace(0, 180, radon_image.shape[1], endpoint=False)
elif theta.shape != (radon_image.shape[1],):
raise ValueError('Shape of theta (%s) does not match the '
'number of projections (%d)'
% (projection_shifts.shape, radon_image.shape[1]))
if image is None:
image = np.zeros(reconstructed_shape, dtype=np.float)
elif image.shape != reconstructed_shape:
raise ValueError('Shape of image (%s) does not match first dimension '
'of radon_image (%s)'
% (image.shape, reconstructed_shape))
if projection_shifts is None:
projection_shifts = np.zeros((radon_image.shape[1],), dtype=np.float)
elif projection_shifts.shape != (radon_image.shape[1],):
raise ValueError('Shape of projection_shifts (%s) does not match the '
'number of projections (%d)'
% (projection_shifts.shape, radon_image.shape[1]))
if not clip is None:
if len(clip) != 2:
raise ValueError('clip must be a length-2 sequence')
clip = (float(clip[0]), float(clip[1]))
relaxation = float(relaxation)
for angle_index in order_angles_golden_ratio(theta):
image_update = sart_projection_update(image, theta[angle_index],
radon_image[:, angle_index],
projection_shifts[angle_index])
image += relaxation * image_update
if not clip is None:
image = np.clip(image, clip[0], clip[1])
return image
| 42.541568 | 79 | 0.629202 |
from __future__ import division
import numpy as np
from scipy.fftpack import fft, ifft, fftfreq
from scipy.interpolate import interp1d
from ._warps_cy import _warp_fast
from ._radon_transform import sart_projection_update
from .. import util
from warnings import warn
__all__ = ["radon", "iradon", "iradon_sart"]
def radon(image, theta=None, circle=False):
if image.ndim != 2:
raise ValueError('The input image must be 2-D')
if theta is None:
theta = np.arange(180)
if circle:
radius = min(image.shape) // 2
c0, c1 = np.ogrid[0:image.shape[0], 0:image.shape[1]]
reconstruction_circle = ((c0 - image.shape[0] // 2) ** 2
+ (c1 - image.shape[1] // 2) ** 2)
reconstruction_circle = reconstruction_circle <= radius ** 2
if not np.all(reconstruction_circle | (image == 0)):
warn('Radon transform: image must be zero outside the '
'reconstruction circle')
slices = []
for d in (0, 1):
if image.shape[d] > min(image.shape):
excess = image.shape[d] - min(image.shape)
slices.append(slice(int(np.ceil(excess / 2)),
int(np.ceil(excess / 2)
+ min(image.shape))))
else:
slices.append(slice(None))
slices = tuple(slices)
padded_image = image[slices]
else:
diagonal = np.sqrt(2) * max(image.shape)
pad = [int(np.ceil(diagonal - s)) for s in image.shape]
new_center = [(s + p) // 2 for s, p in zip(image.shape, pad)]
old_center = [s // 2 for s in image.shape]
pad_before = [nc - oc for oc, nc in zip(old_center, new_center)]
pad_width = [(pb, p - pb) for pb, p in zip(pad_before, pad)]
padded_image = util.pad(image, pad_width, mode='constant',
constant_values=0)
assert padded_image.shape[0] == padded_image.shape[1]
radon_image = np.zeros((padded_image.shape[0], len(theta)))
center = padded_image.shape[0] // 2
shift0 = np.array([[1, 0, -center],
[0, 1, -center],
[0, 0, 1]])
shift1 = np.array([[1, 0, center],
[0, 1, center],
[0, 0, 1]])
def build_rotation(theta):
T = np.deg2rad(theta)
R = np.array([[np.cos(T), np.sin(T), 0],
[-np.sin(T), np.cos(T), 0],
[0, 0, 1]])
return shift1.dot(R).dot(shift0)
for i in range(len(theta)):
rotated = _warp_fast(padded_image, build_rotation(theta[i]))
radon_image[:, i] = rotated.sum(0)
return radon_image
def _sinogram_circle_to_square(sinogram):
diagonal = int(np.ceil(np.sqrt(2) * sinogram.shape[0]))
pad = diagonal - sinogram.shape[0]
old_center = sinogram.shape[0] // 2
new_center = diagonal // 2
pad_before = new_center - old_center
pad_width = ((pad_before, pad - pad_before), (0, 0))
return util.pad(sinogram, pad_width, mode='constant', constant_values=0)
def iradon(radon_image, theta=None, output_size=None,
filter="ramp", interpolation="linear", circle=False):
if radon_image.ndim != 2:
raise ValueError('The input image must be 2-D')
if theta is None:
m, n = radon_image.shape
theta = np.linspace(0, 180, n, endpoint=False)
else:
theta = np.asarray(theta)
if len(theta) != radon_image.shape[1]:
raise ValueError("The given ``theta`` does not match the number of "
"projections in ``radon_image``.")
interpolation_types = ('linear', 'nearest', 'cubic')
if not interpolation in interpolation_types:
raise ValueError("Unknown interpolation: %s" % interpolation)
if not output_size:
if circle:
output_size = radon_image.shape[0]
else:
output_size = int(np.floor(np.sqrt((radon_image.shape[0]) ** 2
/ 2.0)))
if circle:
radon_image = _sinogram_circle_to_square(radon_image)
th = (np.pi / 180.0) * theta
projection_size_padded = \
max(64, int(2 ** np.ceil(np.log2(2 * radon_image.shape[0]))))
pad_width = ((0, projection_size_padded - radon_image.shape[0]), (0, 0))
img = util.pad(radon_image, pad_width, mode='constant', constant_values=0)
f = fftfreq(projection_size_padded).reshape(-1, 1)
omega = 2 * np.pi * f
fourier_filter = 2 * np.abs(f)
if filter == "ramp":
pass
elif filter == "shepp-logan":
fourier_filter[1:] = fourier_filter[1:] * np.sin(omega[1:]) / omega[1:]
elif filter == "cosine":
fourier_filter *= np.cos(omega)
elif filter == "hamming":
fourier_filter *= (0.54 + 0.46 * np.cos(omega / 2))
elif filter == "hann":
fourier_filter *= (1 + np.cos(omega / 2)) / 2
elif filter is None:
fourier_filter[:] = 1
else:
raise ValueError("Unknown filter: %s" % filter)
projection = fft(img, axis=0) * fourier_filter
radon_filtered = np.real(ifft(projection, axis=0))
radon_filtered = radon_filtered[:radon_image.shape[0], :]
reconstructed = np.zeros((output_size, output_size))
mid_index = radon_image.shape[0] // 2
[X, Y] = np.mgrid[0:output_size, 0:output_size]
xpr = X - int(output_size) // 2
ypr = Y - int(output_size) // 2
for i in range(len(theta)):
t = ypr * np.cos(th[i]) - xpr * np.sin(th[i])
x = np.arange(radon_filtered.shape[0]) - mid_index
if interpolation == 'linear':
backprojected = np.interp(t, x, radon_filtered[:, i],
left=0, right=0)
else:
interpolant = interp1d(x, radon_filtered[:, i], kind=interpolation,
bounds_error=False, fill_value=0)
backprojected = interpolant(t)
reconstructed += backprojected
if circle:
radius = output_size // 2
reconstruction_circle = (xpr ** 2 + ypr ** 2) <= radius ** 2
reconstructed[~reconstruction_circle] = 0.
return reconstructed * np.pi / (2 * len(th))
def order_angles_golden_ratio(theta):
interval = 180
def angle_distance(a, b):
difference = a - b
return min(abs(difference % interval), abs(difference % -interval))
remaining = list(np.argsort(theta))
index = remaining.pop(0)
angle = theta[index]
yield index
angle_increment = interval * (1 - (np.sqrt(5) - 1) / 2)
while remaining:
angle = (angle + angle_increment) % interval
insert_point = np.searchsorted(theta[remaining], angle)
index_below = insert_point - 1
index_above = 0 if insert_point == len(remaining) else insert_point
distance_below = angle_distance(angle, theta[remaining[index_below]])
distance_above = angle_distance(angle, theta[remaining[index_above]])
if distance_below < distance_above:
yield remaining.pop(index_below)
else:
yield remaining.pop(index_above)
def iradon_sart(radon_image, theta=None, image=None, projection_shifts=None,
clip=None, relaxation=0.15):
if radon_image.ndim != 2:
raise ValueError('radon_image must be two dimensional')
reconstructed_shape = (radon_image.shape[0], radon_image.shape[0])
if theta is None:
theta = np.linspace(0, 180, radon_image.shape[1], endpoint=False)
elif theta.shape != (radon_image.shape[1],):
raise ValueError('Shape of theta (%s) does not match the '
'number of projections (%d)'
% (projection_shifts.shape, radon_image.shape[1]))
if image is None:
image = np.zeros(reconstructed_shape, dtype=np.float)
elif image.shape != reconstructed_shape:
raise ValueError('Shape of image (%s) does not match first dimension '
'of radon_image (%s)'
% (image.shape, reconstructed_shape))
if projection_shifts is None:
projection_shifts = np.zeros((radon_image.shape[1],), dtype=np.float)
elif projection_shifts.shape != (radon_image.shape[1],):
raise ValueError('Shape of projection_shifts (%s) does not match the '
'number of projections (%d)'
% (projection_shifts.shape, radon_image.shape[1]))
if not clip is None:
if len(clip) != 2:
raise ValueError('clip must be a length-2 sequence')
clip = (float(clip[0]), float(clip[1]))
relaxation = float(relaxation)
for angle_index in order_angles_golden_ratio(theta):
image_update = sart_projection_update(image, theta[angle_index],
radon_image[:, angle_index],
projection_shifts[angle_index])
image += relaxation * image_update
if not clip is None:
image = np.clip(image, clip[0], clip[1])
return image
| true | true |
f71b763ae51e39e2fc92c256e70e59add5287950 | 12,944 | py | Python | stix2/datastore/memory.py | 2xyo/cti-python-stix2 | cffee92c7ed18c4cdd54c4370c6a17878dfd36cd | [
"BSD-3-Clause"
] | 1 | 2020-08-17T23:53:48.000Z | 2020-08-17T23:53:48.000Z | stix2/datastore/memory.py | 2xyo/cti-python-stix2 | cffee92c7ed18c4cdd54c4370c6a17878dfd36cd | [
"BSD-3-Clause"
] | null | null | null | stix2/datastore/memory.py | 2xyo/cti-python-stix2 | cffee92c7ed18c4cdd54c4370c6a17878dfd36cd | [
"BSD-3-Clause"
] | null | null | null | """Python STIX2 Memory Source/Sink"""
import io
import itertools
import json
import os
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import FilterSet, apply_common_filters
from stix2.parsing import parse
def _add(store, stix_data, allow_custom=True, version=None):
"""Add STIX objects to MemoryStore/Sink.
Adds STIX objects to an in-memory dictionary for fast lookup.
Recursive function, breaks down STIX Bundles and lists.
Args:
store: A MemoryStore, MemorySink or MemorySource object.
stix_data (list OR dict OR STIX object): STIX objects to be added
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to lock the parser to. (e.g. "2.0",
"2.1"). If None, the library makes the best effort to figure
out the spec representation of the object.
"""
if isinstance(stix_data, list):
# STIX objects are in a list- recurse on each object
for stix_obj in stix_data:
_add(store, stix_obj, allow_custom, version)
elif stix_data["type"] == "bundle":
# adding a json bundle - so just grab STIX objects
for stix_obj in stix_data.get("objects", []):
_add(store, stix_obj, allow_custom, version)
else:
# Adding a single non-bundle object
if isinstance(stix_data, _STIXBase):
stix_obj = stix_data
else:
stix_obj = parse(stix_data, allow_custom, version)
# Map ID to a _ObjectFamily if the object is versioned, so we can track
# multiple versions. Otherwise, map directly to the object. All
# versioned objects should have a "modified" property.
if "modified" in stix_obj:
if stix_obj["id"] in store._data:
obj_family = store._data[stix_obj["id"]]
else:
obj_family = _ObjectFamily()
store._data[stix_obj["id"]] = obj_family
obj_family.add(stix_obj)
else:
store._data[stix_obj["id"]] = stix_obj
class _ObjectFamily(object):
"""
An internal implementation detail of memory sources/sinks/stores.
Represents a "family" of STIX objects: all objects with a particular
ID. (I.e. all versions.) The latest version is also tracked so that it
can be obtained quickly.
"""
def __init__(self):
self.all_versions = {}
self.latest_version = None
def add(self, obj):
self.all_versions[obj["modified"]] = obj
if (self.latest_version is None or
obj["modified"] > self.latest_version["modified"]):
self.latest_version = obj
def __str__(self):
return "<<{}; latest={}>>".format(
self.all_versions,
self.latest_version["modified"],
)
def __repr__(self):
return str(self)
class MemoryStore(DataStoreMixin):
"""Interface to an in-memory dictionary of STIX objects.
MemoryStore is a wrapper around a paired MemorySink and MemorySource.
Note: It doesn't make sense to create a MemoryStore by passing
in existing MemorySource and MemorySink because there could
be data concurrency issues. As well, just as easy to create new MemoryStore.
Args:
stix_data (list OR dict OR STIX object): STIX content to be added
allow_custom (bool): whether to allow custom STIX content.
Only applied when export/input functions called, i.e.
load_from_file() and save_to_file(). Defaults to True.
Attributes:
_data (dict): the in-memory dict that holds STIX objects
source (MemorySource): MemorySource
sink (MemorySink): MemorySink
"""
def __init__(self, stix_data=None, allow_custom=True, version=None):
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
super(MemoryStore, self).__init__(
source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
)
def save_to_file(self, *args, **kwargs):
"""Write SITX objects from in-memory dictionary to JSON file, as a STIX
Bundle. If a directory is given, the Bundle 'id' will be used as
filename. Otherwise, the provided value will be used.
Args:
path (str): file path to write STIX data to.
encoding (str): The file encoding. Default utf-8.
"""
return self.sink.save_to_file(*args, **kwargs)
def load_from_file(self, *args, **kwargs):
"""Load STIX data from JSON file.
File format is expected to be a single JSON STIX object or JSON STIX
bundle.
Args:
path (str): file path to load STIX data from
"""
return self.source.load_from_file(*args, **kwargs)
class MemorySink(DataSink):
"""Interface for adding/pushing STIX objects to an in-memory dictionary.
Designed to be paired with a MemorySource, together as the two
components of a MemoryStore.
Args:
stix_data (dict OR list): valid STIX 2.0 content in
bundle or a list.
_store (bool): whether the MemorySink is a part of a MemoryStore,
in which case "stix_data" is a direct reference to
shared memory with DataSource. Not user supplied
allow_custom (bool): whether to allow custom objects/properties
when exporting STIX content to file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
If part of a MemoryStore, the dict is shared with a MemorySource
"""
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySink, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def add(self, stix_data, version=None):
_add(self, stix_data, self.allow_custom, version)
add.__doc__ = _add.__doc__
def save_to_file(self, path, encoding="utf-8"):
path = os.path.abspath(path)
all_objs = list(itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
))
if any("spec_version" in x for x in all_objs):
bundle = v21.Bundle(all_objs, allow_custom=self.allow_custom)
else:
bundle = v20.Bundle(all_objs, allow_custom=self.allow_custom)
if path.endswith(".json"):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
else:
if not os.path.exists(path):
os.makedirs(path)
# if the user only provided a directory, use the bundle id for filename
path = os.path.join(path, bundle["id"] + ".json")
with io.open(path, "w", encoding=encoding) as f:
bundle = bundle.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(bundle)
return path
save_to_file.__doc__ = MemoryStore.save_to_file.__doc__
class MemorySource(DataSource):
"""Interface for searching/retrieving STIX objects from an in-memory
dictionary.
Designed to be paired with a MemorySink, together as the two
components of a MemoryStore.
Args:
stix_data (dict OR list OR STIX object): valid STIX 2.0 content in
bundle or list.
_store (bool): if the MemorySource is a part of a MemoryStore,
in which case "stix_data" is a direct reference to shared
memory with DataSink. Not user supplied
allow_custom (bool): whether to allow custom objects/properties
when importing STIX content from file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
If part of a MemoryStore, the dict is shared with a MemorySink
"""
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySource, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def get(self, stix_id, _composite_filters=None):
"""Retrieve STIX object from in-memory dict via STIX ID.
Args:
stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
Returns:
(STIX object): STIX object that has the supplied ID.
"""
stix_obj = None
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_obj = mapped_value.latest_version
else:
stix_obj = mapped_value
if stix_obj:
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
stix_obj = next(apply_common_filters([stix_obj], all_filters), None)
return stix_obj
def all_versions(self, stix_id, _composite_filters=None):
"""Retrieve STIX objects from in-memory dict via STIX ID, all versions
of it.
Args:
stix_id (str): The STIX ID of the STIX 2 object to retrieve.
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that have the supplied ID.
"""
results = []
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_objs_to_filter = mapped_value.all_versions.values()
else:
stix_objs_to_filter = [mapped_value]
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
results.extend(
apply_common_filters(stix_objs_to_filter, all_filters),
)
return results
def query(self, query=None, _composite_filters=None):
"""Search and retrieve STIX objects based on the complete query.
A "complete query" includes the filters from the query, the filters
attached to this MemorySource, and any filters passed from a
CompositeDataSource (i.e. _composite_filters).
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that match the supplied query.
"""
query = FilterSet(query)
# combine all query filters
if self.filters:
query.add(self.filters)
if _composite_filters:
query.add(_composite_filters)
all_objs = itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
)
# Apply STIX common property filters.
all_data = list(apply_common_filters(all_objs, query))
return all_data
def load_from_file(self, file_path, version=None, encoding='utf-8'):
with io.open(os.path.abspath(file_path), "r", encoding=encoding) as f:
stix_data = json.load(f)
_add(self, stix_data, self.allow_custom, version)
load_from_file.__doc__ = MemoryStore.load_from_file.__doc__
| 35.56044 | 111 | 0.627704 |
import io
import itertools
import json
import os
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import FilterSet, apply_common_filters
from stix2.parsing import parse
def _add(store, stix_data, allow_custom=True, version=None):
if isinstance(stix_data, list):
for stix_obj in stix_data:
_add(store, stix_obj, allow_custom, version)
elif stix_data["type"] == "bundle":
for stix_obj in stix_data.get("objects", []):
_add(store, stix_obj, allow_custom, version)
else:
if isinstance(stix_data, _STIXBase):
stix_obj = stix_data
else:
stix_obj = parse(stix_data, allow_custom, version)
if "modified" in stix_obj:
if stix_obj["id"] in store._data:
obj_family = store._data[stix_obj["id"]]
else:
obj_family = _ObjectFamily()
store._data[stix_obj["id"]] = obj_family
obj_family.add(stix_obj)
else:
store._data[stix_obj["id"]] = stix_obj
class _ObjectFamily(object):
def __init__(self):
self.all_versions = {}
self.latest_version = None
def add(self, obj):
self.all_versions[obj["modified"]] = obj
if (self.latest_version is None or
obj["modified"] > self.latest_version["modified"]):
self.latest_version = obj
def __str__(self):
return "<<{}; latest={}>>".format(
self.all_versions,
self.latest_version["modified"],
)
def __repr__(self):
return str(self)
class MemoryStore(DataStoreMixin):
def __init__(self, stix_data=None, allow_custom=True, version=None):
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
super(MemoryStore, self).__init__(
source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
)
def save_to_file(self, *args, **kwargs):
return self.sink.save_to_file(*args, **kwargs)
def load_from_file(self, *args, **kwargs):
return self.source.load_from_file(*args, **kwargs)
class MemorySink(DataSink):
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySink, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def add(self, stix_data, version=None):
_add(self, stix_data, self.allow_custom, version)
add.__doc__ = _add.__doc__
def save_to_file(self, path, encoding="utf-8"):
path = os.path.abspath(path)
all_objs = list(itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
))
if any("spec_version" in x for x in all_objs):
bundle = v21.Bundle(all_objs, allow_custom=self.allow_custom)
else:
bundle = v20.Bundle(all_objs, allow_custom=self.allow_custom)
if path.endswith(".json"):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
else:
if not os.path.exists(path):
os.makedirs(path)
path = os.path.join(path, bundle["id"] + ".json")
with io.open(path, "w", encoding=encoding) as f:
bundle = bundle.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(bundle)
return path
save_to_file.__doc__ = MemoryStore.save_to_file.__doc__
class MemorySource(DataSource):
def __init__(self, stix_data=None, allow_custom=True, version=None, _store=False):
super(MemorySource, self).__init__()
self.allow_custom = allow_custom
if _store:
self._data = stix_data
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version)
def get(self, stix_id, _composite_filters=None):
stix_obj = None
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_obj = mapped_value.latest_version
else:
stix_obj = mapped_value
if stix_obj:
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
stix_obj = next(apply_common_filters([stix_obj], all_filters), None)
return stix_obj
def all_versions(self, stix_id, _composite_filters=None):
results = []
mapped_value = self._data.get(stix_id)
if mapped_value:
if isinstance(mapped_value, _ObjectFamily):
stix_objs_to_filter = mapped_value.all_versions.values()
else:
stix_objs_to_filter = [mapped_value]
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters,
),
)
results.extend(
apply_common_filters(stix_objs_to_filter, all_filters),
)
return results
def query(self, query=None, _composite_filters=None):
query = FilterSet(query)
if self.filters:
query.add(self.filters)
if _composite_filters:
query.add(_composite_filters)
all_objs = itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
)
all_data = list(apply_common_filters(all_objs, query))
return all_data
def load_from_file(self, file_path, version=None, encoding='utf-8'):
with io.open(os.path.abspath(file_path), "r", encoding=encoding) as f:
stix_data = json.load(f)
_add(self, stix_data, self.allow_custom, version)
load_from_file.__doc__ = MemoryStore.load_from_file.__doc__
| true | true |
f71b769c9bfe02547922c7632d7e21d4cba88350 | 6,126 | py | Python | aoc/solutions/day08/solution.py | SebastiaanZ/aoc-2020 | e5480be10da053a6ad382dc27fcea7890986cd8e | [
"MIT"
] | 3 | 2020-12-08T13:36:32.000Z | 2020-12-15T11:37:25.000Z | aoc/solutions/day08/solution.py | SebastiaanZ/aoc-2020 | e5480be10da053a6ad382dc27fcea7890986cd8e | [
"MIT"
] | null | null | null | aoc/solutions/day08/solution.py | SebastiaanZ/aoc-2020 | e5480be10da053a6ad382dc27fcea7890986cd8e | [
"MIT"
] | null | null | null | from __future__ import annotations
import collections
import logging
import typing
from aoc.helpers import Puzzle
__all__ = ["part_one", "part_two", "prepare_puzzle"]
log = logging.getLogger(__name__)
class Instruction(typing.NamedTuple):
"""A ConsoleApplication instruction."""
operation: str
argument: int
@classmethod
def from_text(cls, instruction: str) -> Instruction:
"""Parse a raw text instruction and return an Instruction instance."""
operation, raw_argument = instruction.split(" ")
return cls(operation=operation, argument=int(raw_argument))
class ApplicationState(typing.NamedTuple):
"""An application exit state."""
success: bool
value: int
class ConsoleApplication:
"""A virtual handheld game console."""
def __init__(self, instructions: dict[int, Instruction]) -> None:
"""Parse the instructions and load the application into memory."""
self.instructions = dict(instructions)
self.pointer = 0
self.accumulator = 0
@classmethod
def from_raw_instructions(
cls: type[ConsoleApplication],
instructions: list[str]
) -> ConsoleApplication:
"""Create an application from a raw instruction set."""
instructions = {
i: Instruction.from_text(instruction) for i, instruction in enumerate(instructions)
}
return cls(instructions=instructions)
def copy(self) -> ConsoleApplication:
"""Create a copy of the application."""
return type(self)(self.instructions)
def run(self, debug_mode: bool = False) -> ApplicationState:
"""
Run the application and return the final accumulator value as the exit code.
If run in safe mode, the application returns whenever it detects it has
entered an infinite loop by keeping track of the instructions it has
executed previously.
"""
if debug_mode:
seen = set()
while True:
if self.pointer in seen:
return ApplicationState(success=False, value=self.accumulator)
if self.pointer == len(self.instructions):
return ApplicationState(success=True, value=self.accumulator)
seen.add(self.pointer)
self.step()
else:
while True:
self.step()
if self.pointer == len(self.instructions):
return ApplicationState(success=True, value=self.accumulator)
def step(self) -> None:
"""Perform a single step in the application."""
operation, argument = self.instructions[self.pointer]
getattr(self, operation)(argument)
def acc(self, value: int) -> None:
"""Add a `value` to the accumulator and increase the pointer by one."""
self.accumulator += value
self.pointer += 1
def jmp(self, steps: int) -> None:
"""Execute a jump to another instruction relative to its own location."""
self.pointer += steps
def nop(self, _argument: int) -> None:
"""Do not do anything at all except going to the next instruction."""
self.pointer += 1
def debugger(application: ConsoleApplication) -> int:
"""
Debug a ConsoleApplication by tracing terminating paths.
This debugger works by taking the followings steps:
1. For each instruction position, determine which instructions end up there;
2. Use the instruction targets to trace which instructions will end up at
the termination location;
3. Run to the application, checking if an operation flip would make us jump
to a halting path target location.
It returns the final value after the application has halted successfully.
"""
# 1. For each instruction location, determine which instructions end up there.
instruction_destinations = collections.defaultdict(list)
for i, (instruction, value) in reversed(application.instructions.items()):
if instruction == "jmp":
instruction_destinations[i + value].append(i)
else:
instruction_destinations[i + 1].append(i)
# 2. Use the target locations of instructions to determine which
# instructions already lead naturally to the halting position.
targets = {len(application.instructions)}
targets_to_check = {len(application.instructions)}
while True:
new_targets = set()
for target in targets_to_check:
new_targets.update(instruction_destinations[target])
if not new_targets:
# No other instructions end up at an identified target instruction.
break
targets_to_check = new_targets
targets.update(new_targets)
# 3. Run the application, checking for each `jmp` or `nop` instruction if
# flipping it would result in the application hitting a target instruction.
debugged = False
while application.pointer != len(application.instructions):
operation, argument = application.instructions[application.pointer]
if not debugged and operation == "jmp" and application.pointer + 1 in targets:
application.pointer += 1
debugged = True
elif not debugged and operation == "nop" and application.pointer + argument in targets:
application.pointer += argument
debugged = True
else:
getattr(application, operation)(argument)
# Return the final value of the accumulator
return application.accumulator
def prepare_puzzle(puzzle: Puzzle) -> None:
"""Prepare the ConsoleApplication for today's puzzle."""
puzzle["application"] = ConsoleApplication.from_raw_instructions(puzzle.lines)
def part_one(puzzle: Puzzle) -> typing.Optional[typing.Union[str, int]]:
"""Return the solution for part one of this day."""
return puzzle["application"].run(debug_mode=True).value
def part_two(puzzle: Puzzle) -> typing.Optional[typing.Union[str, int]]:
"""Return the solution for part two of this day."""
return debugger(puzzle["application"].copy())
| 36.464286 | 95 | 0.663728 | from __future__ import annotations
import collections
import logging
import typing
from aoc.helpers import Puzzle
__all__ = ["part_one", "part_two", "prepare_puzzle"]
log = logging.getLogger(__name__)
class Instruction(typing.NamedTuple):
operation: str
argument: int
@classmethod
def from_text(cls, instruction: str) -> Instruction:
operation, raw_argument = instruction.split(" ")
return cls(operation=operation, argument=int(raw_argument))
class ApplicationState(typing.NamedTuple):
success: bool
value: int
class ConsoleApplication:
def __init__(self, instructions: dict[int, Instruction]) -> None:
self.instructions = dict(instructions)
self.pointer = 0
self.accumulator = 0
@classmethod
def from_raw_instructions(
cls: type[ConsoleApplication],
instructions: list[str]
) -> ConsoleApplication:
instructions = {
i: Instruction.from_text(instruction) for i, instruction in enumerate(instructions)
}
return cls(instructions=instructions)
def copy(self) -> ConsoleApplication:
return type(self)(self.instructions)
def run(self, debug_mode: bool = False) -> ApplicationState:
if debug_mode:
seen = set()
while True:
if self.pointer in seen:
return ApplicationState(success=False, value=self.accumulator)
if self.pointer == len(self.instructions):
return ApplicationState(success=True, value=self.accumulator)
seen.add(self.pointer)
self.step()
else:
while True:
self.step()
if self.pointer == len(self.instructions):
return ApplicationState(success=True, value=self.accumulator)
def step(self) -> None:
operation, argument = self.instructions[self.pointer]
getattr(self, operation)(argument)
def acc(self, value: int) -> None:
self.accumulator += value
self.pointer += 1
def jmp(self, steps: int) -> None:
self.pointer += steps
def nop(self, _argument: int) -> None:
self.pointer += 1
def debugger(application: ConsoleApplication) -> int:
instruction_destinations = collections.defaultdict(list)
for i, (instruction, value) in reversed(application.instructions.items()):
if instruction == "jmp":
instruction_destinations[i + value].append(i)
else:
instruction_destinations[i + 1].append(i)
targets = {len(application.instructions)}
targets_to_check = {len(application.instructions)}
while True:
new_targets = set()
for target in targets_to_check:
new_targets.update(instruction_destinations[target])
if not new_targets:
break
targets_to_check = new_targets
targets.update(new_targets)
debugged = False
while application.pointer != len(application.instructions):
operation, argument = application.instructions[application.pointer]
if not debugged and operation == "jmp" and application.pointer + 1 in targets:
application.pointer += 1
debugged = True
elif not debugged and operation == "nop" and application.pointer + argument in targets:
application.pointer += argument
debugged = True
else:
getattr(application, operation)(argument)
return application.accumulator
def prepare_puzzle(puzzle: Puzzle) -> None:
puzzle["application"] = ConsoleApplication.from_raw_instructions(puzzle.lines)
def part_one(puzzle: Puzzle) -> typing.Optional[typing.Union[str, int]]:
return puzzle["application"].run(debug_mode=True).value
def part_two(puzzle: Puzzle) -> typing.Optional[typing.Union[str, int]]:
return debugger(puzzle["application"].copy())
| true | true |
f71b78508ce2826de11a533ccf49d10b6b2ff055 | 25,387 | py | Python | pyabc/epsilon/temperature.py | chrhck/pyABC | 731cfdec26bef3898bf6e244daa5c8f83f3fe19d | [
"BSD-3-Clause"
] | null | null | null | pyabc/epsilon/temperature.py | chrhck/pyABC | 731cfdec26bef3898bf6e244daa5c8f83f3fe19d | [
"BSD-3-Clause"
] | null | null | null | pyabc/epsilon/temperature.py | chrhck/pyABC | 731cfdec26bef3898bf6e244daa5c8f83f3fe19d | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import scipy as sp
import pandas as pd
import numbers
from typing import Callable, List, Union
import logging
from .base import Epsilon
from ..distance import SCALE_LIN
from ..sampler import Sampler
from ..storage import save_dict_to_json
logger = logging.getLogger("Epsilon")
class TemperatureBase(Epsilon):
"""
A temperature scheme handles the decrease of the temperatures employed
by a :class:`pyabc.acceptor.StochasticAcceptor` over time.
This class is not functional on its own, its derivatives must be used.
"""
class ListTemperature(TemperatureBase):
"""
Pass a list of temperature values to use successively.
Parameters
----------
values:
The array of temperatures to use successively.
For exact inference, finish with 1.
"""
def __init__(self, values: List[float]):
self.values = values
def __call__(self,
t: int) -> float:
return self.values[t]
class Temperature(TemperatureBase):
"""
This class implements a highly adaptive and configurable temperature
scheme. Via the argument `schemes`, arbitrary temperature schemes can be
passed to calculate the next generation's temperature, via `aggregate_fun`
one can define how to combine multiple guesses, via `initial_temperature`
the initial temperature can be set.
Parameters
----------
schemes: Union[Callable, List[Callable]], optional
Temperature schemes returning proposed
temperatures for the next time point, e.g.
instances of :class:`pyabc.epsilon.TemperatureScheme`.
aggregate_fun: Callable[List[float], float], optional
The function to aggregate the schemes by, of the form
``Callable[List[float], float]``.
Defaults to taking the minimum.
initial_temperature: float, optional
The initial temperature. If None provided, an AcceptanceRateScheme
is used.
enforce_exact_final_temperature: bool, optional
Whether to force the final temperature (if max_nr_populations < inf)
to be 1.0, giving exact inference.
log_file: str, optional
A log file for storing data of the temperature that are currently not
saved in the database. The data are saved in json format.
Properties
----------
max_nr_populations: int
The maximum number of iterations as passed to ABCSMC.
May be inf, but not all schemes can handle that (and will complain).
temperatures: Dict[int, float]
Times as keys and temperatures as values.
"""
def __init__(
self,
schemes: Union[Callable, List[Callable]] = None,
aggregate_fun: Callable[[List[float]], float] = None,
initial_temperature: float = None,
enforce_exact_final_temperature: bool = True,
log_file: str = None):
self.schemes = schemes
if aggregate_fun is None:
# use minimum over all proposed temperature values
aggregate_fun = min
self.aggregate_fun = aggregate_fun
if initial_temperature is None:
initial_temperature = AcceptanceRateScheme()
self.initial_temperature = initial_temperature
self.enforce_exact_final_temperature = enforce_exact_final_temperature
self.log_file = log_file
# to be filled later
self.max_nr_populations = None
self.temperatures = {}
self.temperature_proposals = {}
def initialize(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
acceptor_config: dict):
self.max_nr_populations = max_nr_populations
# set default schemes
if self.schemes is None:
# this combination proved rather stable
acc_rate_scheme = AcceptanceRateScheme()
decay_scheme = (
ExpDecayFixedIterScheme() if np.isfinite(max_nr_populations)
else ExpDecayFixedRatioScheme())
self.schemes = [acc_rate_scheme, decay_scheme]
# set initial temperature for time t
self._update(t, get_weighted_distances, get_all_records,
1.0, acceptor_config)
def configure_sampler(self, sampler: Sampler):
if callable(self.initial_temperature):
self.initial_temperature.configure_sampler(sampler)
for scheme in self.schemes:
scheme.configure_sampler(sampler)
def update(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
acceptance_rate: float,
acceptor_config: dict):
# set temperature for time t
self._update(t, get_weighted_distances,
get_all_records, acceptance_rate,
acceptor_config)
def _update(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
acceptance_rate: float,
acceptor_config):
"""
Compute the temperature for time `t`.
"""
# scheme arguments
kwargs = dict(
t=t,
get_weighted_distances=get_weighted_distances,
get_all_records=get_all_records,
max_nr_populations=self.max_nr_populations,
pdf_norm=acceptor_config['pdf_norm'],
kernel_scale=acceptor_config['kernel_scale'],
prev_temperature=self.temperatures.get(t-1, None),
acceptance_rate=acceptance_rate,
)
if t >= self.max_nr_populations - 1 \
and self.enforce_exact_final_temperature:
# t is last time
temps = [1.0]
elif not self.temperatures: # need an initial value
if callable(self.initial_temperature):
# execute scheme
temps = [self.initial_temperature(**kwargs)]
elif isinstance(self.initial_temperature, numbers.Number):
temps = [self.initial_temperature]
else:
raise ValueError(
"Initial temperature must be a float or a callable")
else:
# evaluate schemes
temps = []
for scheme in self.schemes:
temp = scheme(**kwargs)
temps.append(temp)
# compute next temperature based on proposals and fallback
# should not be higher than before
fallback = self.temperatures[t-1] \
if t-1 in self.temperatures else np.inf
temperature = self.aggregate_fun(temps)
# also a value lower than 1.0 does not make sense
temperature = max(min(temperature, fallback), 1.0)
if not np.isfinite(temperature):
raise ValueError("Temperature must be finite.")
# record found value
self.temperatures[t] = temperature
# logging
logger.debug(f"Proposed temperatures for {t}: {temps}.")
self.temperature_proposals[t] = temps
if self.log_file:
save_dict_to_json(self.temperature_proposals, self.log_file)
def __call__(self,
t: int) -> float:
return self.temperatures[t]
class TemperatureScheme:
"""
A TemperatureScheme suggests the next temperature value. It is used as
one of potentially multiple schemes employed in the Temperature class.
This class is abstract.
Parameters
----------
t:
The time to compute for.
get_weighted_distances:
Callable to obtain the weights and kernel values to be used for
the scheme.
get_all_records:
Callable returning a List[dict] of all recorded particles.
max_nr_populations:
The maximum number of populations that are supposed to be taken.
pdf_norm:
The normalization constant c that will be used in the acceptance step.
kernel_scale:
Scale on which the pdf values are (linear or logarithmic).
prev_temperature:
The temperature that was used last time (or None if not applicable).
acceptance_rate:
The recently obtained rate.
"""
def __init__(self):
pass
def configure_sampler(self, sampler: Sampler):
"""
Modify the sampler. As in, and redirected from,
:func:`pyabc.epsilon.Temperature.configure_sampler`.
"""
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
pass
class AcceptanceRateScheme(TemperatureScheme):
"""
Try to keep the acceptance rate constant at a value of
`target_rate`. Note that this scheme will fail to
reduce the temperature sufficiently in later iterations, if the
problem's inherent acceptance rate is lower, but it has been
observed to give big feasible temperature leaps in early iterations.
In particular, this scheme can be used to propose an initial temperature.
Parameters
----------
target_rate: float, optional
The target acceptance rate to match.
min_rate: float, optional
The minimum rate below which not to apply the acceptance step scheme
any more. Setting this to a value of e.g. 0.05 can make sense
1) because it may be unlikely that the acceptance rate scheme will
propose a useful temperature at such low acceptance levels, and
2) to avoid uneccessary computations.
"""
def __init__(self, target_rate: float = 0.3, min_rate: float = None):
self.target_rate = target_rate
self.min_rate = min_rate
def configure_sampler(self, sampler: Sampler):
sampler.sample_factory.record_rejected = True
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# check minimum rate
if self.min_rate is not None and acceptance_rate < self.min_rate:
return np.inf
# execute function (expensive if in calibration)
records = get_all_records()
# convert to dataframe for easier extraction
records = pd.DataFrame(records)
# previous and current transition densities
t_pd_prev = np.array(records['transition_pd_prev'], dtype=float)
t_pd = np.array(records['transition_pd'], dtype=float)
# acceptance kernel likelihoods
pds = np.array(records['distance'], dtype=float)
# compute importance weights
weights = t_pd / t_pd_prev
# len would suffice, but maybe rather not rely on things to be normed
weights /= sum(weights)
temperature = match_acceptance_rate(
weights, pds, pdf_norm, kernel_scale, self.target_rate)
return temperature
def match_acceptance_rate(
weights, pds, pdf_norm, kernel_scale, target_rate):
"""
For large temperature, changes become effective on an exponential scale,
thus we optimize the logarithm of the inverse temperature beta.
For a temperature close to 1, subtler changes are neccesary, however here
the logarhtm is nearly linear anyway.
"""
# objective function which we wish to find a root for
def obj(b):
beta = np.exp(b)
# compute rescaled posterior densities
if kernel_scale == SCALE_LIN:
acc_probs = (pds / pdf_norm) ** beta
else: # kernel_scale == SCALE_LOG
acc_probs = np.exp((pds - pdf_norm) * beta)
# to acceptance probabilities to be sure
acc_probs = np.minimum(acc_probs, 1.0)
# objective function
val = np.sum(weights * acc_probs) - target_rate
return val
# TODO the lower boundary min_b is somewhat arbitrary
min_b = -100
if obj(0) > 0:
# function is monotonically decreasing
# smallest possible value already > 0
b_opt = 0
elif obj(min_b) < 0:
# it is obj(-inf) > 0 always
logger.info("AcceptanceRateScheme: Numerics limit temperature.")
b_opt = min_b
else:
# perform binary search
b_opt = sp.optimize.bisect(obj, min_b, 0, maxiter=100000)
beta_opt = np.exp(b_opt)
temperature = 1. / beta_opt
return temperature
class ExpDecayFixedIterScheme(TemperatureScheme):
"""
The next temperature is set as
.. math::
T_j = T_{max}^{(n-j)/n}
where n denotes the number of populations, and j=1,...,n the iteration.
This translates to
.. math::
T_j = T_{j-1}^{(n-j)/(n-(j-1))}.
This ensures that a temperature of 1.0 is reached after exactly the
remaining number of steps.
So, in both cases the sequence of temperatures follows an exponential
decay, also known as a geometric progression, or a linear progression
in log-space.
Note that the formula is applied anew in each iteration.
This is advantageous if also other schemes are used s.t. T_{j-1}
is smaller than by the above.
Parameters
----------
alpha: float
Factor by which to reduce the temperature, if `max_nr_populations`
is infinite.
"""
def __init__(self):
pass
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# needs a finite number of iterations
if max_nr_populations == np.inf:
raise ValueError(
"The ExpDecayFixedIterScheme requires a finite "
"`max_nr_populations`.")
# needs a starting temperature
# if not available, return infinite temperature
if prev_temperature is None:
return np.inf
# base temperature
temp_base = prev_temperature
# how many steps left?
t_to_go = max_nr_populations - t
# compute next temperature according to exponential decay
temperature = temp_base ** ((t_to_go - 1) / t_to_go)
return temperature
class ExpDecayFixedRatioScheme(TemperatureScheme):
"""
The next temperature is chosen as
.. math::
T_j = \\alpha \\cdot T_{j-1}.
Like the :class:`pyabc.epsilon.ExpDecayFixedIterScheme`,
this yields a geometric progression, however with a fixed ratio,
irrespective of the number of iterations. If a finite number of
iterations is specified in ABCSMC, there is no influence on the final
jump to a temperature of 1.0.
This is quite similar to the :class:`pyabc.epsilon.DalyScheme`, although
simpler in implementation. The alpha value here corresponds to a value of
1 - alpha there.
Parameters
----------
alpha: float, optional
The ratio of subsequent temperatures.
min_rate: float, optional
A minimum acceptance rate. If this rate has been violated in the
previous iteration, the alpha value is increased.
max_rate: float, optional
Maximum rate to not be exceeded, otherwise the alpha value is
decreased.
"""
def __init__(self, alpha: float = 0.5,
min_rate: float = 1e-4, max_rate: float = 0.5):
self.alpha = alpha
self.min_rate = min_rate
self.max_rate = max_rate
self.alphas = {}
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if prev_temperature is None:
return np.inf
# previous alpha
alpha = self.alphas.get(t-1, self.alpha)
# check if acceptance rate criterion violated
if acceptance_rate > self.max_rate and t > 1:
logger.debug("ExpDecayFixedRatioScheme: "
"Reacting to high acceptance rate.")
alpha = max(alpha / 2, alpha - (1 - alpha) * 2)
if acceptance_rate < self.min_rate:
logger.debug("ExpDecayFixedRatioScheme: "
"Reacting to low acceptance rate.")
# increase alpha
alpha = alpha + (1 - alpha) / 2
# record
self.alphas[t] = alpha
# reduce temperature
temperature = self.alphas[t] * prev_temperature
return temperature
class PolynomialDecayFixedIterScheme(TemperatureScheme):
"""
Compute next temperature as pre-last entry in
>>> np.linspace(1, (temp_base)**(1 / temp_decay_exponent),
>>> t_to_go + 1) ** temp_decay_exponent)
Requires finite `max_nr_populations`.
Note that this is similar to the
:class:`pyabc.epsilon.ExpDecayFixedIterScheme`, which is
indeed the limit for `exponent -> infinity`. For smaller
exponent, the sequence makes larger steps for low temperatures. This
can be useful in cases, where lower temperatures (which are usually
more expensive) can be traversed in few larger steps, however also
the opposite may be true, i.e. that more steps at low temperatures
are advantageous.
Parameters
----------
exponent: float, optional
The exponent to use in the scheme.
"""
def __init__(self, exponent: float = 3):
self.exponent = exponent
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# needs a starting temperature
# if not available, return infinite temperature
if prev_temperature is None:
return np.inf
# base temperature
temp_base = prev_temperature
# check if we can compute a decay step
if max_nr_populations == np.inf:
raise ValueError("Can only perform PolynomialDecayScheme step "
"with a finite max_nr_populations.")
# how many steps left?
t_to_go = max_nr_populations - t
# compute sequence
temps = np.linspace(1, (temp_base)**(1 / self.exponent),
t_to_go+1) ** self.exponent
logger.debug(f"Temperatures proposed by polynomial decay method: "
f"{temps}.")
# pre-last step is the next step
temperature = temps[-2]
return temperature
class DalyScheme(TemperatureScheme):
"""
This scheme is loosely based on [#daly2017]_, however note that it does
not try to replicate it entirely. In particular, the implementation
of pyABC does not allow the sampling to be stopped when encountering
too low acceptance rates, such that this can only be done ex-posteriori
here.
Parameters
----------
alpha: float, optional
The ratio by which to decrease the temperature value. More
specifically, the next temperature is given as
`(1-alpha) * temperature`.
min_rate: float, optional
A minimum acceptance rate. If this rate has been violated in the
previous iteration, the alpha value is decreased.
.. [#daly2017] Daly Aidan C., Cooper Jonathan, Gavaghan David J.,
and Holmes Chris. "Comparing two sequential Monte Carlo samplers
for exact and approximate Bayesian inference on biological
models". Journal of The Royal Society Interface, 2017.
"""
def __init__(self, alpha: float = 0.5, min_rate: float = 1e-4):
self.alpha = alpha
self.min_rate = min_rate
self.k = {}
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# needs a starting temperature
# if not available, return infinite temperature
if prev_temperature is None:
return np.inf
# base temperature
temp_base = prev_temperature
# addressing the std, not the var
eps_base = np.sqrt(temp_base)
if not self.k:
# initial iteration
self.k[t - 1] = eps_base
k_base = self.k[t - 1]
if acceptance_rate < self.min_rate:
logger.debug("DalyScheme: Reacting to low acceptance rate.")
# reduce reduction
k_base = self.alpha * k_base
self.k[t] = min(k_base, self.alpha * eps_base)
eps = eps_base - self.k[t]
temperature = eps**2
return temperature
class FrielPettittScheme(TemperatureScheme):
"""
Basically takes linear steps in log-space. See [#vyshemirsky2008]_.
.. [#vyshemirsky2008] Vyshemirsky, Vladislav, and Mark A. Girolami.
"Bayesian ranking of biochemical system models."
Bioinformatics 24.6 (2007): 833-839.
"""
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# needs a starting temperature
# if not available, return infinite temperature
if prev_temperature is None:
return np.inf
# check if we can compute a decay step
if max_nr_populations == np.inf:
raise ValueError("Can only perform FrielPettittScheme step with a "
"finite max_nr_populations.")
# base temperature
temp_base = prev_temperature
beta_base = 1. / temp_base
# time to go
t_to_go = max_nr_populations - t
beta = beta_base + ((1. - beta_base) * 1 / t_to_go) ** 2
temperature = 1. / beta
return temperature
class EssScheme(TemperatureScheme):
"""
Try to keep the effective sample size (ESS) constant.
Parameters
----------
target_relative_ess: float
Targe relative effective sample size.
"""
def __init__(self, target_relative_ess: float = 0.8):
self.target_relative_ess = target_relative_ess
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
# execute function (expensive if in calibration)
df = get_weighted_distances()
weights = np.array(df['w'], dtype=float)
pdfs = np.array(df['distance'], dtype=float)
# compute rescaled posterior densities
if kernel_scale == SCALE_LIN:
values = pdfs / pdf_norm
else: # kernel_scale == SCALE_LOG
values = np.exp(pdfs - pdf_norm)
# to probability mass function (i.e. normalize)
weights /= np.sum(weights)
target_ess = len(weights) * self.target_relative_ess
if prev_temperature is None:
beta_base = 0.0
else:
beta_base = 1. / prev_temperature
# objective to minimize
def obj(beta):
return (_ess(values, weights, beta) - target_ess)**2
bounds = sp.optimize.Bounds(lb=np.array([beta_base]),
ub=np.array([1.]))
# TODO make more efficient by providing gradients
ret = sp.optimize.minimize(
obj, x0=np.array([0.5 * (1 + beta_base)]),
bounds=bounds)
beta = ret.x
temperature = 1. / beta
return temperature
def _ess(pdfs, weights, beta):
"""
Effective sample size (ESS) of importance samples.
"""
num = np.sum(weights * pdfs**beta)**2
den = np.sum((weights * pdfs**beta)**2)
return num / den
| 34.168237 | 79 | 0.612873 | import numpy as np
import scipy as sp
import pandas as pd
import numbers
from typing import Callable, List, Union
import logging
from .base import Epsilon
from ..distance import SCALE_LIN
from ..sampler import Sampler
from ..storage import save_dict_to_json
logger = logging.getLogger("Epsilon")
class TemperatureBase(Epsilon):
class ListTemperature(TemperatureBase):
def __init__(self, values: List[float]):
self.values = values
def __call__(self,
t: int) -> float:
return self.values[t]
class Temperature(TemperatureBase):
def __init__(
self,
schemes: Union[Callable, List[Callable]] = None,
aggregate_fun: Callable[[List[float]], float] = None,
initial_temperature: float = None,
enforce_exact_final_temperature: bool = True,
log_file: str = None):
self.schemes = schemes
if aggregate_fun is None:
aggregate_fun = min
self.aggregate_fun = aggregate_fun
if initial_temperature is None:
initial_temperature = AcceptanceRateScheme()
self.initial_temperature = initial_temperature
self.enforce_exact_final_temperature = enforce_exact_final_temperature
self.log_file = log_file
self.max_nr_populations = None
self.temperatures = {}
self.temperature_proposals = {}
def initialize(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
acceptor_config: dict):
self.max_nr_populations = max_nr_populations
if self.schemes is None:
acc_rate_scheme = AcceptanceRateScheme()
decay_scheme = (
ExpDecayFixedIterScheme() if np.isfinite(max_nr_populations)
else ExpDecayFixedRatioScheme())
self.schemes = [acc_rate_scheme, decay_scheme]
self._update(t, get_weighted_distances, get_all_records,
1.0, acceptor_config)
def configure_sampler(self, sampler: Sampler):
if callable(self.initial_temperature):
self.initial_temperature.configure_sampler(sampler)
for scheme in self.schemes:
scheme.configure_sampler(sampler)
def update(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
acceptance_rate: float,
acceptor_config: dict):
self._update(t, get_weighted_distances,
get_all_records, acceptance_rate,
acceptor_config)
def _update(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
acceptance_rate: float,
acceptor_config):
kwargs = dict(
t=t,
get_weighted_distances=get_weighted_distances,
get_all_records=get_all_records,
max_nr_populations=self.max_nr_populations,
pdf_norm=acceptor_config['pdf_norm'],
kernel_scale=acceptor_config['kernel_scale'],
prev_temperature=self.temperatures.get(t-1, None),
acceptance_rate=acceptance_rate,
)
if t >= self.max_nr_populations - 1 \
and self.enforce_exact_final_temperature:
temps = [1.0]
elif not self.temperatures:
if callable(self.initial_temperature):
temps = [self.initial_temperature(**kwargs)]
elif isinstance(self.initial_temperature, numbers.Number):
temps = [self.initial_temperature]
else:
raise ValueError(
"Initial temperature must be a float or a callable")
else:
temps = []
for scheme in self.schemes:
temp = scheme(**kwargs)
temps.append(temp)
fallback = self.temperatures[t-1] \
if t-1 in self.temperatures else np.inf
temperature = self.aggregate_fun(temps)
temperature = max(min(temperature, fallback), 1.0)
if not np.isfinite(temperature):
raise ValueError("Temperature must be finite.")
self.temperatures[t] = temperature
logger.debug(f"Proposed temperatures for {t}: {temps}.")
self.temperature_proposals[t] = temps
if self.log_file:
save_dict_to_json(self.temperature_proposals, self.log_file)
def __call__(self,
t: int) -> float:
return self.temperatures[t]
class TemperatureScheme:
def __init__(self):
pass
def configure_sampler(self, sampler: Sampler):
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
pass
class AcceptanceRateScheme(TemperatureScheme):
def __init__(self, target_rate: float = 0.3, min_rate: float = None):
self.target_rate = target_rate
self.min_rate = min_rate
def configure_sampler(self, sampler: Sampler):
sampler.sample_factory.record_rejected = True
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if self.min_rate is not None and acceptance_rate < self.min_rate:
return np.inf
records = get_all_records()
records = pd.DataFrame(records)
t_pd_prev = np.array(records['transition_pd_prev'], dtype=float)
t_pd = np.array(records['transition_pd'], dtype=float)
pds = np.array(records['distance'], dtype=float)
weights = t_pd / t_pd_prev
weights /= sum(weights)
temperature = match_acceptance_rate(
weights, pds, pdf_norm, kernel_scale, self.target_rate)
return temperature
def match_acceptance_rate(
weights, pds, pdf_norm, kernel_scale, target_rate):
def obj(b):
beta = np.exp(b)
if kernel_scale == SCALE_LIN:
acc_probs = (pds / pdf_norm) ** beta
else:
acc_probs = np.exp((pds - pdf_norm) * beta)
acc_probs = np.minimum(acc_probs, 1.0)
val = np.sum(weights * acc_probs) - target_rate
return val
min_b = -100
if obj(0) > 0:
b_opt = 0
elif obj(min_b) < 0:
logger.info("AcceptanceRateScheme: Numerics limit temperature.")
b_opt = min_b
else:
b_opt = sp.optimize.bisect(obj, min_b, 0, maxiter=100000)
beta_opt = np.exp(b_opt)
temperature = 1. / beta_opt
return temperature
class ExpDecayFixedIterScheme(TemperatureScheme):
def __init__(self):
pass
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if max_nr_populations == np.inf:
raise ValueError(
"The ExpDecayFixedIterScheme requires a finite "
"`max_nr_populations`.")
if prev_temperature is None:
return np.inf
temp_base = prev_temperature
t_to_go = max_nr_populations - t
temperature = temp_base ** ((t_to_go - 1) / t_to_go)
return temperature
class ExpDecayFixedRatioScheme(TemperatureScheme):
def __init__(self, alpha: float = 0.5,
min_rate: float = 1e-4, max_rate: float = 0.5):
self.alpha = alpha
self.min_rate = min_rate
self.max_rate = max_rate
self.alphas = {}
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if prev_temperature is None:
return np.inf
alpha = self.alphas.get(t-1, self.alpha)
if acceptance_rate > self.max_rate and t > 1:
logger.debug("ExpDecayFixedRatioScheme: "
"Reacting to high acceptance rate.")
alpha = max(alpha / 2, alpha - (1 - alpha) * 2)
if acceptance_rate < self.min_rate:
logger.debug("ExpDecayFixedRatioScheme: "
"Reacting to low acceptance rate.")
alpha = alpha + (1 - alpha) / 2
self.alphas[t] = alpha
temperature = self.alphas[t] * prev_temperature
return temperature
class PolynomialDecayFixedIterScheme(TemperatureScheme):
def __init__(self, exponent: float = 3):
self.exponent = exponent
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if prev_temperature is None:
return np.inf
temp_base = prev_temperature
if max_nr_populations == np.inf:
raise ValueError("Can only perform PolynomialDecayScheme step "
"with a finite max_nr_populations.")
t_to_go = max_nr_populations - t
temps = np.linspace(1, (temp_base)**(1 / self.exponent),
t_to_go+1) ** self.exponent
logger.debug(f"Temperatures proposed by polynomial decay method: "
f"{temps}.")
temperature = temps[-2]
return temperature
class DalyScheme(TemperatureScheme):
def __init__(self, alpha: float = 0.5, min_rate: float = 1e-4):
self.alpha = alpha
self.min_rate = min_rate
self.k = {}
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if prev_temperature is None:
return np.inf
temp_base = prev_temperature
eps_base = np.sqrt(temp_base)
if not self.k:
self.k[t - 1] = eps_base
k_base = self.k[t - 1]
if acceptance_rate < self.min_rate:
logger.debug("DalyScheme: Reacting to low acceptance rate.")
k_base = self.alpha * k_base
self.k[t] = min(k_base, self.alpha * eps_base)
eps = eps_base - self.k[t]
temperature = eps**2
return temperature
class FrielPettittScheme(TemperatureScheme):
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
if prev_temperature is None:
return np.inf
if max_nr_populations == np.inf:
raise ValueError("Can only perform FrielPettittScheme step with a "
"finite max_nr_populations.")
temp_base = prev_temperature
beta_base = 1. / temp_base
t_to_go = max_nr_populations - t
beta = beta_base + ((1. - beta_base) * 1 / t_to_go) ** 2
temperature = 1. / beta
return temperature
class EssScheme(TemperatureScheme):
def __init__(self, target_relative_ess: float = 0.8):
self.target_relative_ess = target_relative_ess
def __call__(self,
t: int,
get_weighted_distances: Callable[[], pd.DataFrame],
get_all_records: Callable[[], List[dict]],
max_nr_populations: int,
pdf_norm: float,
kernel_scale: str,
prev_temperature: float,
acceptance_rate: float):
df = get_weighted_distances()
weights = np.array(df['w'], dtype=float)
pdfs = np.array(df['distance'], dtype=float)
if kernel_scale == SCALE_LIN:
values = pdfs / pdf_norm
else:
values = np.exp(pdfs - pdf_norm)
weights /= np.sum(weights)
target_ess = len(weights) * self.target_relative_ess
if prev_temperature is None:
beta_base = 0.0
else:
beta_base = 1. / prev_temperature
def obj(beta):
return (_ess(values, weights, beta) - target_ess)**2
bounds = sp.optimize.Bounds(lb=np.array([beta_base]),
ub=np.array([1.]))
ret = sp.optimize.minimize(
obj, x0=np.array([0.5 * (1 + beta_base)]),
bounds=bounds)
beta = ret.x
temperature = 1. / beta
return temperature
def _ess(pdfs, weights, beta):
num = np.sum(weights * pdfs**beta)**2
den = np.sum((weights * pdfs**beta)**2)
return num / den
| true | true |
f71b787fa525196b698da70d6376942add8376c6 | 12,785 | py | Python | tests/db_stats.py | chrisjsewell/aiida-performance | 160606f07fe092a9e2bacdf62bfecec460fac642 | [
"MIT"
] | null | null | null | tests/db_stats.py | chrisjsewell/aiida-performance | 160606f07fe092a9e2bacdf62bfecec460fac642 | [
"MIT"
] | null | null | null | tests/db_stats.py | chrisjsewell/aiida-performance | 160606f07fe092a9e2bacdf62bfecec460fac642 | [
"MIT"
] | null | null | null | """Useful queries for profiling PostgreSQL databases
These queries are mainly adapted from
https://gist.github.com/anvk/475c22cbca1edc5ce94546c871460fdd
"""
from functools import wraps
from pathlib import Path
def execute_raw(raw):
from aiida.manage.manager import get_manager
backend = get_manager()._load_backend(schema_check=False)
return backend.execute_raw(raw)
# ------------------
# -- Memory Size --
# ------------------
def memory_db_df():
import pandas as pd
result = execute_raw(
r"""
SELECT
datname,
pg_database_size(datname)
from pg_database
order by pg_database_size(datname);
"""
)
df = pd.DataFrame(result, columns=["database", "size_mb"])
df["size_mb"] = df["size_mb"] * 1e-6
return df
def memory_pg_classes_df():
"""Return size of `pg_class`'s
`pg_class` catalogs tables and most everything else that has columns,
or is otherwise similar to a table.
See https://www.postgresql.org/docs/9.3/catalog-pg-class.html
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
sum(pg_relation_size(pg_class.oid))::bigint,
nspname,
CASE pg_class.relkind
WHEN 'r' THEN 'table'
WHEN 'i' THEN 'index'
WHEN 'S' THEN 'sequence'
WHEN 'v' THEN 'view'
WHEN 't' THEN 'toast'
ELSE pg_class.relkind::text
END
FROM pg_class
LEFT OUTER JOIN pg_namespace ON (pg_namespace.oid = pg_class.relnamespace)
GROUP BY pg_class.relkind, nspname
ORDER BY sum(pg_relation_size(pg_class.oid)) DESC;
"""
)
df = pd.DataFrame(result, columns=["size_mb", "namespace", "relkind"])
df.sort_index(axis=1, inplace=True)
df["size_mb"] = df.size_mb * 1e-6
return df
def memory_tables_df():
"""Return statistics on indices.
See https://www.postgresql.org/docs/current/monitoring-stats.html
"""
import pandas as pd
result = execute_raw(
r"""
select
relname,
pg_relation_size(relname::regclass) as table_size,
pg_total_relation_size(relname::regclass) - pg_relation_size(relname::regclass) as index_size,
pg_total_relation_size(relname::regclass) as total_size
from pg_stat_user_tables
"""
)
df = pd.DataFrame(result, columns=["name", "table_mb", "indices_mb", "total_mb"])
df.set_index("name", inplace=True)
df = df * 1e-6
df.sort_values("total_mb", ascending=False, inplace=True)
return df
# -------------
# -- Indices --
# -------------
def indices_list_df():
"""Return list of indices by table and columns."""
import pandas as pd
result = execute_raw(
r"""
select
t.relname as table_name,
i.relname as index_name,
string_agg(a.attname, ',') as column_name
from
pg_class t,
pg_class i,
pg_index ix,
pg_attribute a
where
t.oid = ix.indrelid
and i.oid = ix.indexrelid
and a.attrelid = t.oid
and a.attnum = ANY(ix.indkey)
and t.relkind = 'r'
and t.relname not like 'pg_%'
group by
t.relname,
i.relname
order by
t.relname,
i.relname;
"""
)
df = pd.DataFrame(result, columns=["table", "index", "columns"])
df.set_index(["table", "columns"], inplace=True)
return df
def indices_stats_df(sort_size=False, with_sql=False):
"""Return statistics on indices.
See https://www.postgresql.org/docs/current/monitoring-stats.html
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
pt.tablename AS TableName,
t.indexname AS IndexName,
pc.reltuples AS TotalRows,
pg_relation_size(quote_ident(pt.tablename)::text) AS TableSize,
pg_relation_size(quote_ident(t.indexrelname)::text) AS IndexSize,
t.idx_scan AS TotalNumberOfScan,
t.idx_tup_read AS TotalTupleRead,
t.idx_tup_fetch AS TotalTupleFetched,
pgi.indexdef AS IndexDef
FROM pg_tables AS pt
LEFT OUTER JOIN pg_class AS pc
ON pt.tablename=pc.relname
LEFT OUTER JOIN
(
SELECT
pc.relname AS TableName,
pc2.relname AS IndexName,
psai.idx_scan,
psai.idx_tup_read,
psai.idx_tup_fetch,
psai.indexrelname
FROM
pg_index AS pi
JOIN pg_class AS pc
ON pc.oid = pi.indrelid
JOIN pg_class AS pc2
ON pc2.oid = pi.indexrelid
JOIN pg_stat_all_indexes AS psai
ON pi.indexrelid = psai.indexrelid
) AS T
ON pt.tablename = T.TableName
LEFT OUTER JOIN pg_indexes as pgi
ON T.indexname = pgi.indexname
WHERE pt.schemaname='public'
ORDER BY 1;
"""
)
columns = [
"table",
"index",
"rows",
"table_size_mb",
"index_size_mb",
# Number of index scans initiated on this index
"scans",
# Number of index entries returned by scans on this index
"read",
# Number of live rows fetched by index scans
"fetched",
"sql",
]
df = pd.DataFrame(result, columns=columns)
df.set_index(["table", "index"], inplace=True)
df["table_size_mb"] = df.table_size_mb * 10e-6
df["index_size_mb"] = df.index_size_mb * 10e-6
if not with_sql:
df.drop("sql", axis=1, inplace=True)
if sort_size:
df.sort_values("index_size_mb", ascending=False, inplace=True)
else:
df.sort_index(axis=0, inplace=True)
return df
def indices_check_df(min_size_mb=0.1):
"""Check for tables that may require an index."""
import pandas as pd
result = execute_raw(
r"""
SELECT
relname,
seq_scan,
idx_scan,
pg_relation_size(relname::regclass) AS rel_size,
n_live_tup
FROM pg_stat_all_tables
WHERE schemaname='public' AND pg_relation_size(relname::regclass)>{min_size};
""".format(
min_size=int(min_size_mb * 1e6)
)
)
df = pd.DataFrame(
result,
columns=[
"table",
# Number of sequential scans initiated on this table
"seq_scans",
# Number of index scans initiated on this table
"idx_scans",
"size_mb",
"live_rows",
],
)
df["idx_usage"] = 100 * df.idx_scans / (df.seq_scans + df.idx_scans)
df["idx_required"] = (df.seq_scans - df.idx_scans) > 0
df["size_mb"] = df["size_mb"] * 1e-6
df.set_index("table", inplace=True)
return df
# --------------------
# -- Data Integrity --
# --------------------
def cache_hit_ratio():
"""Ideally hit_ration should be > 90%"""
result = execute_raw(
r"""
SELECT
sum(blks_hit)*100/sum(blks_hit+blks_read) as hit_ratio
from pg_stat_database;
"""
)
return float(result[0][0])
def anomalies_df():
"""
- c_commit_ratio should be > 95%
- c_rollback_ratio should be < 5%
- deadlocks should be close to 0
- conflicts should be close to 0
- temp_files and temp_bytes watch out for them
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
datname,
(xact_commit*100)/nullif(xact_commit+xact_rollback,0) as c_commit_ratio,
(xact_rollback*100)/nullif(xact_commit+xact_rollback, 0) as c_rollback_ratio,
deadlocks,
conflicts,
temp_files,
temp_bytes
FROM pg_stat_database;
"""
)
df = pd.DataFrame(
result,
columns=[
"database",
"commit_ratio",
"rollback_ratio",
"deadlocks",
"conflicts",
"temp_files",
"temp_size_mb",
],
)
df["temp_size_mb"] = df["temp_size_mb"] * 1e-6
return df
def write_activity_df(limit=50):
"""
hot_rate = rows HOT updated / total rows updated
(Heap Only Tuple means with no separate index update required)
Heap Only Tuple (HOT) means, creating a new update tuple if possible on the same page as the old tuple.
Ideally hot_rate should be close to 100.
You might be blocking HOT updates with indexes on updated columns. If those are expendable, you might get better overall performance without them.
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
s.relname,
pg_relation_size(relid),
coalesce(n_tup_ins,0) + 2 * coalesce(n_tup_upd,0) -
coalesce(n_tup_hot_upd,0) + coalesce(n_tup_del,0) AS total_writes,
(coalesce(n_tup_hot_upd,0)::float * 100 / (case when n_tup_upd > 0 then n_tup_upd else 1 end)::float) AS hot_rate
/* This returns None
(SELECT v[1] FROM regexp_matches(reloptions::text,E'fillfactor=(d+)') as r(v) limit 1) AS fillfactor
*/
from pg_stat_all_tables
s join pg_class c ON c.oid=relid
order by total_writes desc
limit {limit};
""".format(
limit=limit
)
)
columns = [
"table",
"size_mb",
"writes",
"hot_rate",
# "fill_factor"
]
df = pd.DataFrame(result, columns=columns)
df["size_mb"] = df["size_mb"] * 1e-6
df.set_index("table", inplace=True)
return df
# How many indexes are in cache
def cached_indices():
result = execute_raw(
r"""
SELECT
sum(idx_blks_read) as idx_read,
sum(idx_blks_hit) as idx_hit,
(sum(idx_blks_hit) - sum(idx_blks_read)) / sum(idx_blks_hit) as ratio
FROM pg_statio_user_indexes;
"""
)
return cached_indices
def dirty_pages():
"""maxwritten_clean and buffers_backend_fsyn should be 0"""
import pandas as pd
result = execute_raw(
r"""
SELECT buffers_clean, maxwritten_clean, buffers_backend_fsync from pg_stat_bgwriter;
"""
)
return pd.Series(
dict(
zip(
("buffers_clean", "maxwritten_clean", "buffers_backend_fsync"),
result[0],
)
)
)
# -------------
# -- Queries --
# -------------
def requires_pg_stat(func):
@wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except Exception as err:
if 'relation "pg_stat_statements" does not exist' in str(err):
raise RuntimeError(
"This function requires that the pg_stat_statements extension is initialised on your database"
)
raise
return wrapper
@requires_pg_stat
def query_reset_stats():
return execute_raw("select pg_stat_statements_reset();")
@requires_pg_stat
def query_stats_df(limit=100):
"""Return most CPU intensive queries
See: https://www.postgresql.org/docs/9.4/pgstatstatements.html
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
query,
round(total_time::numeric, 2) AS total_time,
calls,
rows,
round((100 * total_time / sum(total_time::numeric) OVER ())::numeric, 2) AS percentage_cpu
FROM pg_stat_statements
ORDER BY total_time DESC
LIMIT {limit};
""".format(
limit=limit
)
)
# avg_time = total_time / calls
df = pd.DataFrame(
result, columns=["sql", "time_seconds", "calls", "rows", "cpu_percent"]
)
df["time_seconds"] = df["time_seconds"].astype(float) * 1e-6
df["type"] = df.sql.apply(lambda s: s.split()[0].upper())
return df
@requires_pg_stat
def query_write_df():
"""Return most writing (to shared_buffers) queries
See: https://www.postgresql.org/docs/9.4/pgstatstatements.html
"""
import pandas as pd
result = execute_raw(
r"""
SELECT
query,
shared_blks_dirtied
from pg_stat_statements
where shared_blks_dirtied > 0
order by 2 desc;
"""
)
return pd.DataFrame(result, columns=["sql", "blocks_written"])
if __name__ == "__main__":
import argparse, os
parser = argparse.ArgumentParser()
parser.add_argument("commands", choices=["queries", "indices", "reset"], nargs='+')
parser.add_argument("-n", "--name", default="test")
parser.add_argument("-p", "--path", default=os.getcwd())
args = parser.parse_args()
for _command in args.commands:
if _command == "queries":
Path(args.path).joinpath(args.name + "_queries.html").write_text(query_stats_df().to_html())
if _command == "indices":
Path(args.path).joinpath(args.name + "_indices.html").write_text(indices_stats_df().to_html())
elif _command == "reset":
query_reset_stats()
| 27.67316 | 159 | 0.600078 | from functools import wraps
from pathlib import Path
def execute_raw(raw):
from aiida.manage.manager import get_manager
backend = get_manager()._load_backend(schema_check=False)
return backend.execute_raw(raw)
def memory_db_df():
import pandas as pd
result = execute_raw(
r"""
SELECT
datname,
pg_database_size(datname)
from pg_database
order by pg_database_size(datname);
"""
)
df = pd.DataFrame(result, columns=["database", "size_mb"])
df["size_mb"] = df["size_mb"] * 1e-6
return df
def memory_pg_classes_df():
import pandas as pd
result = execute_raw(
r"""
SELECT
sum(pg_relation_size(pg_class.oid))::bigint,
nspname,
CASE pg_class.relkind
WHEN 'r' THEN 'table'
WHEN 'i' THEN 'index'
WHEN 'S' THEN 'sequence'
WHEN 'v' THEN 'view'
WHEN 't' THEN 'toast'
ELSE pg_class.relkind::text
END
FROM pg_class
LEFT OUTER JOIN pg_namespace ON (pg_namespace.oid = pg_class.relnamespace)
GROUP BY pg_class.relkind, nspname
ORDER BY sum(pg_relation_size(pg_class.oid)) DESC;
"""
)
df = pd.DataFrame(result, columns=["size_mb", "namespace", "relkind"])
df.sort_index(axis=1, inplace=True)
df["size_mb"] = df.size_mb * 1e-6
return df
def memory_tables_df():
import pandas as pd
result = execute_raw(
r"""
select
relname,
pg_relation_size(relname::regclass) as table_size,
pg_total_relation_size(relname::regclass) - pg_relation_size(relname::regclass) as index_size,
pg_total_relation_size(relname::regclass) as total_size
from pg_stat_user_tables
"""
)
df = pd.DataFrame(result, columns=["name", "table_mb", "indices_mb", "total_mb"])
df.set_index("name", inplace=True)
df = df * 1e-6
df.sort_values("total_mb", ascending=False, inplace=True)
return df
def indices_list_df():
import pandas as pd
result = execute_raw(
r"""
select
t.relname as table_name,
i.relname as index_name,
string_agg(a.attname, ',') as column_name
from
pg_class t,
pg_class i,
pg_index ix,
pg_attribute a
where
t.oid = ix.indrelid
and i.oid = ix.indexrelid
and a.attrelid = t.oid
and a.attnum = ANY(ix.indkey)
and t.relkind = 'r'
and t.relname not like 'pg_%'
group by
t.relname,
i.relname
order by
t.relname,
i.relname;
"""
)
df = pd.DataFrame(result, columns=["table", "index", "columns"])
df.set_index(["table", "columns"], inplace=True)
return df
def indices_stats_df(sort_size=False, with_sql=False):
import pandas as pd
result = execute_raw(
r"""
SELECT
pt.tablename AS TableName,
t.indexname AS IndexName,
pc.reltuples AS TotalRows,
pg_relation_size(quote_ident(pt.tablename)::text) AS TableSize,
pg_relation_size(quote_ident(t.indexrelname)::text) AS IndexSize,
t.idx_scan AS TotalNumberOfScan,
t.idx_tup_read AS TotalTupleRead,
t.idx_tup_fetch AS TotalTupleFetched,
pgi.indexdef AS IndexDef
FROM pg_tables AS pt
LEFT OUTER JOIN pg_class AS pc
ON pt.tablename=pc.relname
LEFT OUTER JOIN
(
SELECT
pc.relname AS TableName,
pc2.relname AS IndexName,
psai.idx_scan,
psai.idx_tup_read,
psai.idx_tup_fetch,
psai.indexrelname
FROM
pg_index AS pi
JOIN pg_class AS pc
ON pc.oid = pi.indrelid
JOIN pg_class AS pc2
ON pc2.oid = pi.indexrelid
JOIN pg_stat_all_indexes AS psai
ON pi.indexrelid = psai.indexrelid
) AS T
ON pt.tablename = T.TableName
LEFT OUTER JOIN pg_indexes as pgi
ON T.indexname = pgi.indexname
WHERE pt.schemaname='public'
ORDER BY 1;
"""
)
columns = [
"table",
"index",
"rows",
"table_size_mb",
"index_size_mb",
"scans",
"read",
"fetched",
"sql",
]
df = pd.DataFrame(result, columns=columns)
df.set_index(["table", "index"], inplace=True)
df["table_size_mb"] = df.table_size_mb * 10e-6
df["index_size_mb"] = df.index_size_mb * 10e-6
if not with_sql:
df.drop("sql", axis=1, inplace=True)
if sort_size:
df.sort_values("index_size_mb", ascending=False, inplace=True)
else:
df.sort_index(axis=0, inplace=True)
return df
def indices_check_df(min_size_mb=0.1):
import pandas as pd
result = execute_raw(
r"""
SELECT
relname,
seq_scan,
idx_scan,
pg_relation_size(relname::regclass) AS rel_size,
n_live_tup
FROM pg_stat_all_tables
WHERE schemaname='public' AND pg_relation_size(relname::regclass)>{min_size};
""".format(
min_size=int(min_size_mb * 1e6)
)
)
df = pd.DataFrame(
result,
columns=[
"table",
"seq_scans",
"idx_scans",
"size_mb",
"live_rows",
],
)
df["idx_usage"] = 100 * df.idx_scans / (df.seq_scans + df.idx_scans)
df["idx_required"] = (df.seq_scans - df.idx_scans) > 0
df["size_mb"] = df["size_mb"] * 1e-6
df.set_index("table", inplace=True)
return df
def cache_hit_ratio():
result = execute_raw(
r"""
SELECT
sum(blks_hit)*100/sum(blks_hit+blks_read) as hit_ratio
from pg_stat_database;
"""
)
return float(result[0][0])
def anomalies_df():
import pandas as pd
result = execute_raw(
r"""
SELECT
datname,
(xact_commit*100)/nullif(xact_commit+xact_rollback,0) as c_commit_ratio,
(xact_rollback*100)/nullif(xact_commit+xact_rollback, 0) as c_rollback_ratio,
deadlocks,
conflicts,
temp_files,
temp_bytes
FROM pg_stat_database;
"""
)
df = pd.DataFrame(
result,
columns=[
"database",
"commit_ratio",
"rollback_ratio",
"deadlocks",
"conflicts",
"temp_files",
"temp_size_mb",
],
)
df["temp_size_mb"] = df["temp_size_mb"] * 1e-6
return df
def write_activity_df(limit=50):
import pandas as pd
result = execute_raw(
r"""
SELECT
s.relname,
pg_relation_size(relid),
coalesce(n_tup_ins,0) + 2 * coalesce(n_tup_upd,0) -
coalesce(n_tup_hot_upd,0) + coalesce(n_tup_del,0) AS total_writes,
(coalesce(n_tup_hot_upd,0)::float * 100 / (case when n_tup_upd > 0 then n_tup_upd else 1 end)::float) AS hot_rate
/* This returns None
(SELECT v[1] FROM regexp_matches(reloptions::text,E'fillfactor=(d+)') as r(v) limit 1) AS fillfactor
*/
from pg_stat_all_tables
s join pg_class c ON c.oid=relid
order by total_writes desc
limit {limit};
""".format(
limit=limit
)
)
columns = [
"table",
"size_mb",
"writes",
"hot_rate",
]
df = pd.DataFrame(result, columns=columns)
df["size_mb"] = df["size_mb"] * 1e-6
df.set_index("table", inplace=True)
return df
def cached_indices():
result = execute_raw(
r"""
SELECT
sum(idx_blks_read) as idx_read,
sum(idx_blks_hit) as idx_hit,
(sum(idx_blks_hit) - sum(idx_blks_read)) / sum(idx_blks_hit) as ratio
FROM pg_statio_user_indexes;
"""
)
return cached_indices
def dirty_pages():
import pandas as pd
result = execute_raw(
r"""
SELECT buffers_clean, maxwritten_clean, buffers_backend_fsync from pg_stat_bgwriter;
"""
)
return pd.Series(
dict(
zip(
("buffers_clean", "maxwritten_clean", "buffers_backend_fsync"),
result[0],
)
)
)
def requires_pg_stat(func):
@wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except Exception as err:
if 'relation "pg_stat_statements" does not exist' in str(err):
raise RuntimeError(
"This function requires that the pg_stat_statements extension is initialised on your database"
)
raise
return wrapper
@requires_pg_stat
def query_reset_stats():
return execute_raw("select pg_stat_statements_reset();")
@requires_pg_stat
def query_stats_df(limit=100):
import pandas as pd
result = execute_raw(
r"""
SELECT
query,
round(total_time::numeric, 2) AS total_time,
calls,
rows,
round((100 * total_time / sum(total_time::numeric) OVER ())::numeric, 2) AS percentage_cpu
FROM pg_stat_statements
ORDER BY total_time DESC
LIMIT {limit};
""".format(
limit=limit
)
)
df = pd.DataFrame(
result, columns=["sql", "time_seconds", "calls", "rows", "cpu_percent"]
)
df["time_seconds"] = df["time_seconds"].astype(float) * 1e-6
df["type"] = df.sql.apply(lambda s: s.split()[0].upper())
return df
@requires_pg_stat
def query_write_df():
import pandas as pd
result = execute_raw(
r"""
SELECT
query,
shared_blks_dirtied
from pg_stat_statements
where shared_blks_dirtied > 0
order by 2 desc;
"""
)
return pd.DataFrame(result, columns=["sql", "blocks_written"])
if __name__ == "__main__":
import argparse, os
parser = argparse.ArgumentParser()
parser.add_argument("commands", choices=["queries", "indices", "reset"], nargs='+')
parser.add_argument("-n", "--name", default="test")
parser.add_argument("-p", "--path", default=os.getcwd())
args = parser.parse_args()
for _command in args.commands:
if _command == "queries":
Path(args.path).joinpath(args.name + "_queries.html").write_text(query_stats_df().to_html())
if _command == "indices":
Path(args.path).joinpath(args.name + "_indices.html").write_text(indices_stats_df().to_html())
elif _command == "reset":
query_reset_stats()
| true | true |
f71b7bba9bd0bdbe0b8034d90c61427bb3dc3f64 | 16,878 | py | Python | optbinning/binning/piecewise/continuous_binning.py | mnicstruwig/optbinning | 6ce991e1ca75b4d41835f3b3bf8e0f294f6ba780 | [
"Apache-2.0"
] | 1 | 2021-02-09T02:49:32.000Z | 2021-02-09T02:49:32.000Z | optbinning/binning/piecewise/continuous_binning.py | mnicstruwig/optbinning | 6ce991e1ca75b4d41835f3b3bf8e0f294f6ba780 | [
"Apache-2.0"
] | null | null | null | optbinning/binning/piecewise/continuous_binning.py | mnicstruwig/optbinning | 6ce991e1ca75b4d41835f3b3bf8e0f294f6ba780 | [
"Apache-2.0"
] | null | null | null | """
Optimal piecewise binning for continuous target.
"""
# Guillermo Navas-Palencia <g.navas.palencia@gmail.com>
# Copyright (C) 2020
import time
import numpy as np
from .base import _check_parameters
from .base import BasePWBinning
from .binning_statistics import PWContinuousBinningTable
from .metrics import continuous_metrics
from .transformations import transform_continuous_target
class ContinuousOptimalPWBinning(BasePWBinning):
"""Optimal Piecewise binning of a numerical variable with respect to a
binary target.
Parameters
----------
name : str, optional (default="")
The variable name.
objective : str, optional (default="l2")
The objective function. Supported objectives are "l2", "l1", "huber"
and "quantile". Note that "l1", "huber" and "quantile" are robust
objective functions.
degree : int (default=1)
The degree of the polynomials.
* degree = 0: piecewise constant functions.
* degree = 1: piecewise linear functions.
* degree > 1: piecewise polynomial functions.
continuous : bool (default=True)
Whether to fit a continuous or discontinuous piecewise regression.
prebinning_method : str, optional (default="cart")
The pre-binning method. Supported methods are "cart" for a CART
decision tree, "quantile" to generate prebins with approximately same
frequency and "uniform" to generate prebins with equal width. Method
"cart" uses `sklearn.tree.DecistionTreeClassifier
<https://scikit-learn.org/stable/modules/generated/sklearn.tree.
DecisionTreeClassifier.html>`_.
max_n_prebins : int (default=20)
The maximum number of bins after pre-binning (prebins).
min_prebin_size : float (default=0.05)
The fraction of mininum number of records for each prebin.
min_n_bins : int or None, optional (default=None)
The minimum number of bins. If None, then ``min_n_bins`` is
a value in ``[0, max_n_prebins]``.
max_n_bins : int or None, optional (default=None)
The maximum number of bins. If None, then ``max_n_bins`` is
a value in ``[0, max_n_prebins]``.
min_bin_size : float or None, optional (default=None)
The fraction of minimum number of records for each bin. If None,
``min_bin_size = min_prebin_size``.
max_bin_size : float or None, optional (default=None)
The fraction of maximum number of records for each bin. If None,
``max_bin_size = 1.0``.
monotonic_trend : str or None, optional (default="auto")
The monotonic trend. Supported trends are “auto”, "auto_heuristic" and
"auto_asc_desc" to automatically determine the trend maximizing IV
using a machine learning classifier, "ascending", "descending",
"concave", "convex", "peak" and "peak_heuristic" to allow a peak change
point, and "valley" and "valley_heuristic" to allow a valley change
point. Trends "auto_heuristic", "peak_heuristic" and "valley_heuristic"
use a heuristic to determine the change point, and are significantly
faster for large size instances (``max_n_prebins > 20``). Trend
"auto_asc_desc" is used to automatically select the best monotonic
trend between "ascending" and "descending". If None, then the
monotonic constraint is disabled.
n_subsamples : int or None (default=None)
Number of subsamples to fit the piecewise regression algorithm. If
None, all values are considered.
max_pvalue : float or None, optional (default=0.05)
The maximum p-value among bins. The Z-test is used to detect bins
not satisfying the p-value constraint. Option supported by solvers
"cp" and "mip".
max_pvalue_policy : str, optional (default="consecutive")
The method to determine bins not satisfying the p-value constraint.
Supported methods are "consecutive" to compare consecutive bins and
"all" to compare all bins.
outlier_detector : str or None, optional (default=None)
The outlier detection method. Supported methods are "range" to use
the interquartile range based method or "zcore" to use the modified
Z-score method.
outlier_params : dict or None, optional (default=None)
Dictionary of parameters to pass to the outlier detection method.
user_splits : array-like or None, optional (default=None)
The list of pre-binning split points when ``dtype`` is "numerical" or
the list of prebins when ``dtype`` is "categorical".
user_splits_fixed : array-like or None (default=None)
The list of pre-binning split points that must be fixed.
special_codes : array-like or None, optional (default=None)
List of special codes. Use special codes to specify the data values
that must be treated separately.
split_digits : int or None, optional (default=None)
The significant digits of the split points. If ``split_digits`` is set
to 0, the split points are integers. If None, then all significant
digits in the split points are considered.
solver : str, optional (default="auto")
The optimizer to solve the underlying mathematical optimization
problem. Supported solvers are `"ecos"
<https://github.com/embotech/ecos>`_, `"osqp"
<https://github.com/oxfordcontrol/osqp>`_, "direct", to choose the
direct solver, and "auto", to choose the most appropriate solver for
the problem.
h_epsilon: float (default=1.35)
The parameter h_epsilon used when ``objective="huber"``, controls the
number of samples that should be classified as outliers.
quantile : float (default=0.5)
The parameter quantile is the q-th quantile to be used when
``objective="quantile"``.
regularization: str or None (default=None)
Type of regularization. Supported regularization are "l1" (Lasso) and
"l2" (Ridge). If None, no regularization is applied.
reg_l1 : float (default=1.0)
L1 regularization term. Increasing this value will smooth the
regression model. Only applicable if ``regularization="l1"``.
reg_l2 : float (default=1.0)
L2 regularization term. Increasing this value will smooth the
regression model. Only applicable if ``regularization="l2"``.
random_state : int, RandomState instance or None, (default=None)
If ``n_subsamples < n_samples``, controls the shuffling applied to the
data before applying the split.
verbose : bool (default=False)
Enable verbose output.
"""
def __init__(self, name="", objective="l2", degree=1,
continuous=True, prebinning_method="cart", max_n_prebins=20,
min_prebin_size=0.05, min_n_bins=None, max_n_bins=None,
min_bin_size=None, max_bin_size=None, monotonic_trend="auto",
n_subsamples=None, max_pvalue=None,
max_pvalue_policy="consecutive", outlier_detector=None,
outlier_params=None, user_splits=None, user_splits_fixed=None,
special_codes=None, split_digits=None, solver="auto",
h_epsilon=1.35, quantile=0.5, regularization=None, reg_l1=1.0,
reg_l2=1.0, random_state=None, verbose=False):
super().__init__(name, None, objective, degree, continuous,
prebinning_method, max_n_prebins, min_prebin_size,
min_n_bins, max_n_bins, min_bin_size, max_bin_size,
monotonic_trend, n_subsamples, max_pvalue,
max_pvalue_policy, outlier_detector, outlier_params,
user_splits, user_splits_fixed, special_codes,
split_digits, solver, h_epsilon, quantile,
regularization, reg_l1, reg_l2, random_state, verbose)
self._problem_type = "regression"
self._n_records_missing = None
self._n_records_special = None
self._sum_special = None
self._sum_missing = None
self._std_special = None
self._std_missing = None
self._min_target_missing = None
self._min_target_special = None
self._max_target_missing = None
self._max_target_special = None
self._n_zeros_missing = None
self._n_zeros_special = None
def fit_transform(self, x, y, metric_special=0, metric_missing=0,
lb=None, ub=None, check_input=False):
"""Fit the optimal piecewise binning according to the given training
data, then transform it.
Parameters
----------
x : array-like, shape = (n_samples,)
Training vector, where n_samples is the number of samples.
y : array-like, shape = (n_samples,)
Target vector relative to x.
metric_special : float or str (default=0)
The metric value to transform special codes in the input vector.
Supported metrics are "empirical" to use the empirical mean and any
numerical value.
metric_missing : float or str (default=0)
The metric value to transform missing values in the input vector.
Supported metrics are "empirical" to use the empirical mean and any
numerical value.
lb : float or None (default=None)
Avoid values below the lower bound lb.
ub : float or None (default=None)
Avoid values above the upper bound ub.
check_input : bool (default=False)
Whether to check input arrays.
Returns
-------
x_new : numpy array, shape = (n_samples,)
Transformed array.
"""
return self.fit(x, y, check_input).transform(
x, metric_special, metric_missing, lb, ub, check_input)
def transform(self, x, metric_special=0, metric_missing=0,
lb=None, ub=None, check_input=False):
"""Transform given data using bins from the fitted optimal piecewise
binning.
Parameters
----------
x : array-like, shape = (n_samples,)
Training vector, where n_samples is the number of samples.
metric_special : float or str (default=0)
The metric value to transform special codes in the input vector.
Supported metrics are "empirical" to use the empirical mean and any
numerical value.
metric_missing : float or str (default=0)
The metric value to transform missing values in the input vector.
Supported metrics are "empirical" to use the empirical mean and any
numerical value.
lb : float or None (default=None)
Avoid values below the lower bound lb.
ub : float or None (default=None)
Avoid values above the upper bound ub.
check_input : bool (default=False)
Whether to check input arrays.
Returns
-------
x_new : numpy array, shape = (n_samples,)
Transformed array.
"""
self._check_is_fitted()
return transform_continuous_target(
self._optb.splits, x, self._c, lb, ub, self._n_records_special,
self._sum_special, self._n_records_missing, self._sum_missing,
self.special_codes, metric_special, metric_missing, check_input)
def _fit(self, x, y, lb, ub, check_input):
time_init = time.perf_counter()
if self.verbose:
self._logger.info("Optimal piecewise binning started.")
self._logger.info("Options: check parameters.")
_check_parameters(**self.get_params(deep=False), estimator=None,
problem_type=self._problem_type)
# Pre-processing
if self.verbose:
self._logger.info("Pre-processing started.")
self._n_samples = len(x)
if self.verbose:
self._logger.info("Pre-processing: number of samples: {}"
.format(self._n_samples))
time_preprocessing = time.perf_counter()
[x_clean, y_clean, x_missing, y_missing, x_special, y_special,
_, _, _, _, _, _, _] = self._fit_preprocessing(x, y, check_input)
self._time_preprocessing = time.perf_counter() - time_preprocessing
if self.verbose:
n_clean = len(x_clean)
n_missing = len(x_missing)
n_special = len(x_special)
self._logger.info("Pre-processing: number of clean samples: {}"
.format(n_clean))
self._logger.info("Pre-processing: number of missing samples: {}"
.format(n_missing))
self._logger.info("Pre-processing: number of special samples: {}"
.format(n_special))
if self.outlier_detector is not None:
n_outlier = self._n_samples-(n_clean + n_missing + n_special)
self._logger.info("Pre-processing: number of outlier samples: "
"{}".format(n_outlier))
self._logger.info("Pre-processing terminated. Time: {:.4f}s"
.format(self._time_preprocessing))
# Pre-binning
self._time_estimator = 0
# Fit optimal binning algorithm for continuous target. Use optimal
# split points to compute optimal piecewise functions
self._fit_binning(x_clean, y_clean, y_clean, lb, ub)
# Post-processing
if self.verbose:
self._logger.info("Post-processing started.")
self._logger.info("Post-processing: compute binning information.")
time_postprocessing = time.perf_counter()
# Compute n_records and sum for special and missing
self._n_records_special = len(y_special)
self._sum_special = np.sum(y_special)
self._n_zeros_special = np.count_nonzero(y_special == 0)
if len(y_special):
self._std_special = np.std(y_special)
self._min_target_special = np.min(y_special)
self._max_target_special = np.max(y_special)
self._n_records_missing = len(y_missing)
self._sum_missing = np.sum(y_missing)
self._n_zeros_missing = np.count_nonzero(y_missing == 0)
if len(y_missing):
self._std_missing = np.std(y_missing)
self._min_target_missing = np.min(y_missing)
self._max_target_missing = np.max(y_missing)
bt = self._optb.binning_table.build(add_totals=False)
n_records = bt["Count"].values
sums = bt["Sum"].values
stds = bt["Std"].values
min_target = bt["Min"].values
max_target = bt["Max"].values
n_zeros = bt["Zeros count"].values
n_records[self._n_bins] = self._n_records_special
n_records[self._n_bins + 1] = self._n_records_missing
sums[self._n_bins] = self._sum_special
sums[self._n_bins + 1] = self._sum_missing
stds[self._n_bins] = self._std_special
stds[self._n_bins + 1] = self._std_missing
min_target[self._n_bins] = self._min_target_special
min_target[self._n_bins + 1] = self._min_target_missing
max_target[self._n_bins] = self._max_target_special
max_target[self._n_bins + 1] = self._max_target_missing
n_zeros[self._n_bins] = self._n_zeros_special
n_zeros[self._n_bins + 1] = self._n_zeros_missing
# Compute metrics
if self.verbose:
self._logger.info("Post-processing: compute performance metrics.")
d_metrics = continuous_metrics(
x_clean, y_clean, self._optb.splits, self._c, lb, ub,
self._n_records_special, self._sum_special,
self._n_records_missing, self._sum_missing, self.special_codes)
# Binning table
self._binning_table = PWContinuousBinningTable(
self.name, self._optb.splits, self._c, n_records, sums, stds,
min_target, max_target, n_zeros, lb, ub, x_clean.min(),
x_clean.max(), d_metrics)
self._time_postprocessing = time.perf_counter() - time_postprocessing
if self.verbose:
self._logger.info("Post-processing terminated. Time: {:.4f}s"
.format(self._time_postprocessing))
self._time_total = time.perf_counter() - time_init
if self.verbose:
self._logger.info("Optimal piecewise binning terminated. "
"Status: {}. Time: {:.4f}s"
.format(self._status, self._time_total))
# Completed successfully
self._class_logger.close()
self._is_fitted = True
return self
| 41.266504 | 79 | 0.640656 |
import time
import numpy as np
from .base import _check_parameters
from .base import BasePWBinning
from .binning_statistics import PWContinuousBinningTable
from .metrics import continuous_metrics
from .transformations import transform_continuous_target
class ContinuousOptimalPWBinning(BasePWBinning):
def __init__(self, name="", objective="l2", degree=1,
continuous=True, prebinning_method="cart", max_n_prebins=20,
min_prebin_size=0.05, min_n_bins=None, max_n_bins=None,
min_bin_size=None, max_bin_size=None, monotonic_trend="auto",
n_subsamples=None, max_pvalue=None,
max_pvalue_policy="consecutive", outlier_detector=None,
outlier_params=None, user_splits=None, user_splits_fixed=None,
special_codes=None, split_digits=None, solver="auto",
h_epsilon=1.35, quantile=0.5, regularization=None, reg_l1=1.0,
reg_l2=1.0, random_state=None, verbose=False):
super().__init__(name, None, objective, degree, continuous,
prebinning_method, max_n_prebins, min_prebin_size,
min_n_bins, max_n_bins, min_bin_size, max_bin_size,
monotonic_trend, n_subsamples, max_pvalue,
max_pvalue_policy, outlier_detector, outlier_params,
user_splits, user_splits_fixed, special_codes,
split_digits, solver, h_epsilon, quantile,
regularization, reg_l1, reg_l2, random_state, verbose)
self._problem_type = "regression"
self._n_records_missing = None
self._n_records_special = None
self._sum_special = None
self._sum_missing = None
self._std_special = None
self._std_missing = None
self._min_target_missing = None
self._min_target_special = None
self._max_target_missing = None
self._max_target_special = None
self._n_zeros_missing = None
self._n_zeros_special = None
def fit_transform(self, x, y, metric_special=0, metric_missing=0,
lb=None, ub=None, check_input=False):
return self.fit(x, y, check_input).transform(
x, metric_special, metric_missing, lb, ub, check_input)
def transform(self, x, metric_special=0, metric_missing=0,
lb=None, ub=None, check_input=False):
self._check_is_fitted()
return transform_continuous_target(
self._optb.splits, x, self._c, lb, ub, self._n_records_special,
self._sum_special, self._n_records_missing, self._sum_missing,
self.special_codes, metric_special, metric_missing, check_input)
def _fit(self, x, y, lb, ub, check_input):
time_init = time.perf_counter()
if self.verbose:
self._logger.info("Optimal piecewise binning started.")
self._logger.info("Options: check parameters.")
_check_parameters(**self.get_params(deep=False), estimator=None,
problem_type=self._problem_type)
if self.verbose:
self._logger.info("Pre-processing started.")
self._n_samples = len(x)
if self.verbose:
self._logger.info("Pre-processing: number of samples: {}"
.format(self._n_samples))
time_preprocessing = time.perf_counter()
[x_clean, y_clean, x_missing, y_missing, x_special, y_special,
_, _, _, _, _, _, _] = self._fit_preprocessing(x, y, check_input)
self._time_preprocessing = time.perf_counter() - time_preprocessing
if self.verbose:
n_clean = len(x_clean)
n_missing = len(x_missing)
n_special = len(x_special)
self._logger.info("Pre-processing: number of clean samples: {}"
.format(n_clean))
self._logger.info("Pre-processing: number of missing samples: {}"
.format(n_missing))
self._logger.info("Pre-processing: number of special samples: {}"
.format(n_special))
if self.outlier_detector is not None:
n_outlier = self._n_samples-(n_clean + n_missing + n_special)
self._logger.info("Pre-processing: number of outlier samples: "
"{}".format(n_outlier))
self._logger.info("Pre-processing terminated. Time: {:.4f}s"
.format(self._time_preprocessing))
self._time_estimator = 0
self._fit_binning(x_clean, y_clean, y_clean, lb, ub)
if self.verbose:
self._logger.info("Post-processing started.")
self._logger.info("Post-processing: compute binning information.")
time_postprocessing = time.perf_counter()
self._n_records_special = len(y_special)
self._sum_special = np.sum(y_special)
self._n_zeros_special = np.count_nonzero(y_special == 0)
if len(y_special):
self._std_special = np.std(y_special)
self._min_target_special = np.min(y_special)
self._max_target_special = np.max(y_special)
self._n_records_missing = len(y_missing)
self._sum_missing = np.sum(y_missing)
self._n_zeros_missing = np.count_nonzero(y_missing == 0)
if len(y_missing):
self._std_missing = np.std(y_missing)
self._min_target_missing = np.min(y_missing)
self._max_target_missing = np.max(y_missing)
bt = self._optb.binning_table.build(add_totals=False)
n_records = bt["Count"].values
sums = bt["Sum"].values
stds = bt["Std"].values
min_target = bt["Min"].values
max_target = bt["Max"].values
n_zeros = bt["Zeros count"].values
n_records[self._n_bins] = self._n_records_special
n_records[self._n_bins + 1] = self._n_records_missing
sums[self._n_bins] = self._sum_special
sums[self._n_bins + 1] = self._sum_missing
stds[self._n_bins] = self._std_special
stds[self._n_bins + 1] = self._std_missing
min_target[self._n_bins] = self._min_target_special
min_target[self._n_bins + 1] = self._min_target_missing
max_target[self._n_bins] = self._max_target_special
max_target[self._n_bins + 1] = self._max_target_missing
n_zeros[self._n_bins] = self._n_zeros_special
n_zeros[self._n_bins + 1] = self._n_zeros_missing
if self.verbose:
self._logger.info("Post-processing: compute performance metrics.")
d_metrics = continuous_metrics(
x_clean, y_clean, self._optb.splits, self._c, lb, ub,
self._n_records_special, self._sum_special,
self._n_records_missing, self._sum_missing, self.special_codes)
self._binning_table = PWContinuousBinningTable(
self.name, self._optb.splits, self._c, n_records, sums, stds,
min_target, max_target, n_zeros, lb, ub, x_clean.min(),
x_clean.max(), d_metrics)
self._time_postprocessing = time.perf_counter() - time_postprocessing
if self.verbose:
self._logger.info("Post-processing terminated. Time: {:.4f}s"
.format(self._time_postprocessing))
self._time_total = time.perf_counter() - time_init
if self.verbose:
self._logger.info("Optimal piecewise binning terminated. "
"Status: {}. Time: {:.4f}s"
.format(self._status, self._time_total))
self._class_logger.close()
self._is_fitted = True
return self
| true | true |
f71b7bd3333e78e80ac35643dea7e4992006e7c1 | 5,065 | py | Python | a3c_train.py | mmwebster/DeepRL-Grounding | aa7fa63fbc26e8b0fa3fe289a5fe5a00ef3e6278 | [
"MIT"
] | null | null | null | a3c_train.py | mmwebster/DeepRL-Grounding | aa7fa63fbc26e8b0fa3fe289a5fe5a00ef3e6278 | [
"MIT"
] | null | null | null | a3c_train.py | mmwebster/DeepRL-Grounding | aa7fa63fbc26e8b0fa3fe289a5fe5a00ef3e6278 | [
"MIT"
] | null | null | null | import torch.optim as optim
import env as grounding_env
from models import *
from torch.autograd import Variable
import logging
def ensure_shared_grads(model, shared_model):
for param, shared_param in zip(model.parameters(),
shared_model.parameters()):
if shared_param.grad is not None:
return
shared_param._grad = param.grad
def train(rank, args, shared_model):
torch.manual_seed(args.seed + rank)
env = grounding_env.GroundingEnv(args)
env.game_init()
model = A3C_LSTM_GA(args)
if (args.load != "0"):
print(str(rank) + " Loading model ... "+args.load)
model.load_state_dict(
torch.load(args.load, map_location=lambda storage, loc: storage))
model.train()
optimizer = optim.SGD(shared_model.parameters(), lr=args.lr)
p_losses = []
v_losses = []
(image, instruction), _, _, _ = env.reset()
instruction_idx = []
for word in instruction.split(" "):
instruction_idx.append(env.word_to_idx[word])
instruction_idx = np.array(instruction_idx)
image = torch.from_numpy(image).float()/255.0
instruction_idx = torch.from_numpy(instruction_idx).view(1, -1)
done = True
episode_length = 0
num_iters = 0
while True:
# Sync with the shared model
model.load_state_dict(shared_model.state_dict())
if done:
episode_length = 0
cx = Variable(torch.zeros(1, 256))
hx = Variable(torch.zeros(1, 256))
else:
cx = Variable(cx.data)
hx = Variable(hx.data)
values = []
log_probs = []
rewards = []
entropies = []
for step in range(args.num_steps):
episode_length += 1
tx = Variable(torch.from_numpy(np.array([episode_length])).long())
value, logit, (hx, cx) = model((Variable(image.unsqueeze(0)),
Variable(instruction_idx),
(tx, hx, cx)))
prob = F.softmax(logit)
log_prob = F.log_softmax(logit)
entropy = -(log_prob * prob).sum(1)
entropies.append(entropy)
action = prob.multinomial(num_samples=1).data
log_prob = log_prob.gather(1, Variable(action))
action = action.numpy()[0, 0]
(image, _), reward, done, _ = env.step(action)
done = done or episode_length >= args.max_episode_length
if done:
(image, instruction), _, _, _ = env.reset()
instruction_idx = []
for word in instruction.split(" "):
instruction_idx.append(env.word_to_idx[word])
instruction_idx = np.array(instruction_idx)
instruction_idx = torch.from_numpy(
instruction_idx).view(1, -1)
image = torch.from_numpy(image).float()/255.0
values.append(value)
log_probs.append(log_prob)
rewards.append(reward)
if done:
break
R = torch.zeros(1, 1)
if not done:
tx = Variable(torch.from_numpy(np.array([episode_length])).long())
value, _, _ = model((Variable(image.unsqueeze(0)),
Variable(instruction_idx), (tx, hx, cx)))
R = value.data
values.append(Variable(R))
policy_loss = 0
value_loss = 0
R = Variable(R)
gae = torch.zeros(1, 1)
for i in reversed(range(len(rewards))):
R = args.gamma * R + rewards[i]
advantage = R - values[i]
value_loss = value_loss + 0.5 * advantage.pow(2)
# Generalized Advantage Estimataion
delta_t = rewards[i] + args.gamma * \
values[i + 1].data - values[i].data
gae = gae * args.gamma * args.tau + delta_t
policy_loss = policy_loss - \
log_probs[i] * Variable(gae) - 0.01 * entropies[i]
optimizer.zero_grad()
p_losses.append(policy_loss.data[0, 0])
v_losses.append(value_loss.data[0, 0])
if(len(p_losses) > 1000):
num_iters += 1
print(" ".join([
"Training thread: {}".format(rank),
"Num iters: {}K".format(num_iters),
"Avg policy loss: {}".format(np.mean(p_losses)),
"Avg value loss: {}".format(np.mean(v_losses))]))
logging.info(" ".join([
"Training thread: {}".format(rank),
"Num iters: {}K".format(num_iters),
"Avg policy loss: {}".format(np.mean(p_losses)),
"Avg value loss: {}".format(np.mean(v_losses))]))
p_losses = []
v_losses = []
(policy_loss + 0.5 * value_loss).backward()
torch.nn.utils.clip_grad_norm(model.parameters(), 40)
ensure_shared_grads(model, shared_model)
optimizer.step()
| 32.261146 | 78 | 0.541165 | import torch.optim as optim
import env as grounding_env
from models import *
from torch.autograd import Variable
import logging
def ensure_shared_grads(model, shared_model):
for param, shared_param in zip(model.parameters(),
shared_model.parameters()):
if shared_param.grad is not None:
return
shared_param._grad = param.grad
def train(rank, args, shared_model):
torch.manual_seed(args.seed + rank)
env = grounding_env.GroundingEnv(args)
env.game_init()
model = A3C_LSTM_GA(args)
if (args.load != "0"):
print(str(rank) + " Loading model ... "+args.load)
model.load_state_dict(
torch.load(args.load, map_location=lambda storage, loc: storage))
model.train()
optimizer = optim.SGD(shared_model.parameters(), lr=args.lr)
p_losses = []
v_losses = []
(image, instruction), _, _, _ = env.reset()
instruction_idx = []
for word in instruction.split(" "):
instruction_idx.append(env.word_to_idx[word])
instruction_idx = np.array(instruction_idx)
image = torch.from_numpy(image).float()/255.0
instruction_idx = torch.from_numpy(instruction_idx).view(1, -1)
done = True
episode_length = 0
num_iters = 0
while True:
model.load_state_dict(shared_model.state_dict())
if done:
episode_length = 0
cx = Variable(torch.zeros(1, 256))
hx = Variable(torch.zeros(1, 256))
else:
cx = Variable(cx.data)
hx = Variable(hx.data)
values = []
log_probs = []
rewards = []
entropies = []
for step in range(args.num_steps):
episode_length += 1
tx = Variable(torch.from_numpy(np.array([episode_length])).long())
value, logit, (hx, cx) = model((Variable(image.unsqueeze(0)),
Variable(instruction_idx),
(tx, hx, cx)))
prob = F.softmax(logit)
log_prob = F.log_softmax(logit)
entropy = -(log_prob * prob).sum(1)
entropies.append(entropy)
action = prob.multinomial(num_samples=1).data
log_prob = log_prob.gather(1, Variable(action))
action = action.numpy()[0, 0]
(image, _), reward, done, _ = env.step(action)
done = done or episode_length >= args.max_episode_length
if done:
(image, instruction), _, _, _ = env.reset()
instruction_idx = []
for word in instruction.split(" "):
instruction_idx.append(env.word_to_idx[word])
instruction_idx = np.array(instruction_idx)
instruction_idx = torch.from_numpy(
instruction_idx).view(1, -1)
image = torch.from_numpy(image).float()/255.0
values.append(value)
log_probs.append(log_prob)
rewards.append(reward)
if done:
break
R = torch.zeros(1, 1)
if not done:
tx = Variable(torch.from_numpy(np.array([episode_length])).long())
value, _, _ = model((Variable(image.unsqueeze(0)),
Variable(instruction_idx), (tx, hx, cx)))
R = value.data
values.append(Variable(R))
policy_loss = 0
value_loss = 0
R = Variable(R)
gae = torch.zeros(1, 1)
for i in reversed(range(len(rewards))):
R = args.gamma * R + rewards[i]
advantage = R - values[i]
value_loss = value_loss + 0.5 * advantage.pow(2)
delta_t = rewards[i] + args.gamma * \
values[i + 1].data - values[i].data
gae = gae * args.gamma * args.tau + delta_t
policy_loss = policy_loss - \
log_probs[i] * Variable(gae) - 0.01 * entropies[i]
optimizer.zero_grad()
p_losses.append(policy_loss.data[0, 0])
v_losses.append(value_loss.data[0, 0])
if(len(p_losses) > 1000):
num_iters += 1
print(" ".join([
"Training thread: {}".format(rank),
"Num iters: {}K".format(num_iters),
"Avg policy loss: {}".format(np.mean(p_losses)),
"Avg value loss: {}".format(np.mean(v_losses))]))
logging.info(" ".join([
"Training thread: {}".format(rank),
"Num iters: {}K".format(num_iters),
"Avg policy loss: {}".format(np.mean(p_losses)),
"Avg value loss: {}".format(np.mean(v_losses))]))
p_losses = []
v_losses = []
(policy_loss + 0.5 * value_loss).backward()
torch.nn.utils.clip_grad_norm(model.parameters(), 40)
ensure_shared_grads(model, shared_model)
optimizer.step()
| true | true |
f71b7c3e0333f4799644586439f574a601d048f9 | 942 | py | Python | tema1/gym-master/gym/envs/tests/spec_list.py | oscarramos2001/Oscar-Marino-Ramos | c05e497b467aab4572f3578f1b9068d4585106d2 | [
"MIT"
] | 112 | 2018-11-19T17:23:40.000Z | 2022-03-29T05:36:14.000Z | tema1/gym-master/gym/envs/tests/spec_list.py | BrujitoOz/ia-course | c05e497b467aab4572f3578f1b9068d4585106d2 | [
"MIT"
] | 2 | 2020-03-23T01:17:45.000Z | 2020-07-02T07:01:06.000Z | tema1/gym-master/gym/envs/tests/spec_list.py | BrujitoOz/ia-course | c05e497b467aab4572f3578f1b9068d4585106d2 | [
"MIT"
] | 187 | 2018-11-28T11:38:02.000Z | 2022-03-16T11:18:39.000Z | from gym import envs, logger
import os
def should_skip_env_spec_for_tests(spec):
# We skip tests for envs that require dependencies or are otherwise
# troublesome to run frequently
ep = spec._entry_point
# Skip mujoco tests for pull request CI
skip_mujoco = not (os.environ.get('MUJOCO_KEY_BUNDLE') or os.path.exists(os.path.expanduser('~/.mujoco/mjkey.txt')))
if skip_mujoco and (ep.startswith('gym.envs.mujoco:') or ep.startswith('gym.envs.robotics:')):
return True
if ( 'GoEnv' in ep or
'HexEnv' in ep or
(ep.startswith("gym.envs.atari") and not spec.id.startswith("Pong") and not spec.id.startswith("Seaquest"))
):
logger.warn("Skipping tests for env {}".format(ep))
return True
return False
spec_list = [spec for spec in sorted(envs.registry.all(), key=lambda x: x.id) if spec._entry_point is not None and not should_skip_env_spec_for_tests(spec)]
| 44.857143 | 156 | 0.691083 | from gym import envs, logger
import os
def should_skip_env_spec_for_tests(spec):
ep = spec._entry_point
skip_mujoco = not (os.environ.get('MUJOCO_KEY_BUNDLE') or os.path.exists(os.path.expanduser('~/.mujoco/mjkey.txt')))
if skip_mujoco and (ep.startswith('gym.envs.mujoco:') or ep.startswith('gym.envs.robotics:')):
return True
if ( 'GoEnv' in ep or
'HexEnv' in ep or
(ep.startswith("gym.envs.atari") and not spec.id.startswith("Pong") and not spec.id.startswith("Seaquest"))
):
logger.warn("Skipping tests for env {}".format(ep))
return True
return False
spec_list = [spec for spec in sorted(envs.registry.all(), key=lambda x: x.id) if spec._entry_point is not None and not should_skip_env_spec_for_tests(spec)]
| true | true |
f71b7dce623850ef58b71a5f7bfbb56a6401aee0 | 495 | py | Python | python/concurrency/async_hello.py | cbare/Etudes | 8a803621f2abd20966843ccec696aec397d3c9f9 | [
"Apache-2.0"
] | null | null | null | python/concurrency/async_hello.py | cbare/Etudes | 8a803621f2abd20966843ccec696aec397d3c9f9 | [
"Apache-2.0"
] | null | null | null | python/concurrency/async_hello.py | cbare/Etudes | 8a803621f2abd20966843ccec696aec397d3c9f9 | [
"Apache-2.0"
] | null | null | null | import asyncio
async def upper_cased(value: str) -> str:
await asyncio.sleep(1)
return value.upper()
coroutines = [
upper_cased("h"),
upper_cased("e"),
upper_cased("l"),
upper_cased("l"),
upper_cased("o"),
upper_cased(" "),
upper_cased("w"),
upper_cased("o"),
upper_cased("r"),
upper_cased("l"),
upper_cased("d"),
]
async def main():
print("".join(await asyncio.gather(*coroutines)))
if __name__ == '__main__':
asyncio.run(main())
| 19.038462 | 53 | 0.60404 | import asyncio
async def upper_cased(value: str) -> str:
await asyncio.sleep(1)
return value.upper()
coroutines = [
upper_cased("h"),
upper_cased("e"),
upper_cased("l"),
upper_cased("l"),
upper_cased("o"),
upper_cased(" "),
upper_cased("w"),
upper_cased("o"),
upper_cased("r"),
upper_cased("l"),
upper_cased("d"),
]
async def main():
print("".join(await asyncio.gather(*coroutines)))
if __name__ == '__main__':
asyncio.run(main())
| true | true |
f71b7f85ca2d04fe797bbe93b9985f9eedf2ad7c | 2,785 | py | Python | main.py | mwojcik96/dtw-utterance-recognition | 9371393dfe92abb5b85c40828d099ceca599aa89 | [
"MIT"
] | null | null | null | main.py | mwojcik96/dtw-utterance-recognition | 9371393dfe92abb5b85c40828d099ceca599aa89 | [
"MIT"
] | null | null | null | main.py | mwojcik96/dtw-utterance-recognition | 9371393dfe92abb5b85c40828d099ceca599aa89 | [
"MIT"
] | null | null | null | import glob
import struct
import wave
from collections import Counter
from operator import itemgetter
import librosa
import numpy as np
from tslearn.metrics import dtw
def compute_mfcc_from_file(file):
time_characteristic = create_time_characteristics_of_a_file(file)
mfcc = librosa.feature.mfcc(y=time_characteristic, sr=16000, n_mfcc=13)
return mfcc
def create_time_characteristics_of_a_file(file):
wave_file = wave.open(file, 'r')
# rate = wave_file.getframerate()
length = wave_file.getnframes()
time_plot = []
for i in range(0, length):
wave_data = wave_file.readframes(1)
data = struct.unpack("<h", wave_data)
time_plot.append(int(data[0]))
return np.array(time_plot, dtype=np.float32)
def compute_spectral_roloff(file):
chars = create_time_characteristics_of_a_file(file)
return librosa.feature.spectral_rolloff(chars, sr=16000)[0]
def calculate_dict(mfcc_values, rolloff_values, names, labels):
final_dict = dict()
for i in names:
final_dict[i] = []
for id1, (mf1, ro1, nm1, lb1) in enumerate(zip(mfcc_values, rolloff_values, names, labels)):
for id2, (mf2, ro2, nm2, lb2) in enumerate(zip(mfcc_values, rolloff_values, names, labels)):
if id1 < id2:
current_dtw = dtw(mf1, mf2)
# current_dtw = dtw(mf1 + ro1, mf2 + ro2)
final_dict[nm1].append({"name": nm2, "label": lb2, "distance": current_dtw})
final_dict[nm2].append({"name": nm1, "label": lb1, "distance": current_dtw})
for final_key, final_item in final_dict.items():
final_dict[final_key] = sorted(final_item, key=itemgetter('distance'))
# print(key, len(final_dict[key]))
return final_dict
def recognize_speech(vector, k=1):
nearest_neighbours = Counter(elem["label"] for elem in vector[:k])
return nearest_neighbours.most_common(1)[0][0]
if __name__ == '__main__':
mfcc_list = []
rolloff_list = []
name_list = []
label_list = []
for wav_name in glob.glob("./*/*.WAV"):
mfcc_list.append(compute_mfcc_from_file(wav_name).T)
rolloff_list.append(compute_spectral_roloff(wav_name))
name_list.append(wav_name.split("/")[-1])
label_list.append(wav_name.split("/")[-2])
dist_dict = calculate_dict(mfcc_list, rolloff_list, name_list, label_list)
for n in range(1, 11):
accuracy = 0
print("KNN for k =", n)
for key, item in dist_dict.items():
real = label_list[name_list.index(key)]
predicted = recognize_speech(item, n)
# print(key, "Real:", real, "Predicted:", predicted)
if real == predicted:
accuracy += 1
print("Accuracy:", accuracy / len(name_list))
| 35.705128 | 100 | 0.656732 | import glob
import struct
import wave
from collections import Counter
from operator import itemgetter
import librosa
import numpy as np
from tslearn.metrics import dtw
def compute_mfcc_from_file(file):
time_characteristic = create_time_characteristics_of_a_file(file)
mfcc = librosa.feature.mfcc(y=time_characteristic, sr=16000, n_mfcc=13)
return mfcc
def create_time_characteristics_of_a_file(file):
wave_file = wave.open(file, 'r')
length = wave_file.getnframes()
time_plot = []
for i in range(0, length):
wave_data = wave_file.readframes(1)
data = struct.unpack("<h", wave_data)
time_plot.append(int(data[0]))
return np.array(time_plot, dtype=np.float32)
def compute_spectral_roloff(file):
chars = create_time_characteristics_of_a_file(file)
return librosa.feature.spectral_rolloff(chars, sr=16000)[0]
def calculate_dict(mfcc_values, rolloff_values, names, labels):
final_dict = dict()
for i in names:
final_dict[i] = []
for id1, (mf1, ro1, nm1, lb1) in enumerate(zip(mfcc_values, rolloff_values, names, labels)):
for id2, (mf2, ro2, nm2, lb2) in enumerate(zip(mfcc_values, rolloff_values, names, labels)):
if id1 < id2:
current_dtw = dtw(mf1, mf2)
final_dict[nm1].append({"name": nm2, "label": lb2, "distance": current_dtw})
final_dict[nm2].append({"name": nm1, "label": lb1, "distance": current_dtw})
for final_key, final_item in final_dict.items():
final_dict[final_key] = sorted(final_item, key=itemgetter('distance'))
return final_dict
def recognize_speech(vector, k=1):
nearest_neighbours = Counter(elem["label"] for elem in vector[:k])
return nearest_neighbours.most_common(1)[0][0]
if __name__ == '__main__':
mfcc_list = []
rolloff_list = []
name_list = []
label_list = []
for wav_name in glob.glob("./*/*.WAV"):
mfcc_list.append(compute_mfcc_from_file(wav_name).T)
rolloff_list.append(compute_spectral_roloff(wav_name))
name_list.append(wav_name.split("/")[-1])
label_list.append(wav_name.split("/")[-2])
dist_dict = calculate_dict(mfcc_list, rolloff_list, name_list, label_list)
for n in range(1, 11):
accuracy = 0
print("KNN for k =", n)
for key, item in dist_dict.items():
real = label_list[name_list.index(key)]
predicted = recognize_speech(item, n)
if real == predicted:
accuracy += 1
print("Accuracy:", accuracy / len(name_list))
| true | true |
f71b7feab878e3386680bd41b4a9588ddf08c6c1 | 308 | py | Python | Main.py | MynorSaban1906/vacas | 5ca5b483b48088a409cb75cb5d18603a09274498 | [
"MIT"
] | null | null | null | Main.py | MynorSaban1906/vacas | 5ca5b483b48088a409cb75cb5d18603a09274498 | [
"MIT"
] | null | null | null | Main.py | MynorSaban1906/vacas | 5ca5b483b48088a409cb75cb5d18603a09274498 | [
"MIT"
] | null | null | null | import tkinter as tk
from Windows import StorageGui, NavBar
class Main:
root = tk.Tk()
root.geometry("1000x600")
root.title(" [EDD] Fase-1" )
app = StorageGui(master=root)
app.configure(bg='#2C3E50')
app.place(x=200,width=200,height=200)
app.mainloop()
start = Main() | 18.117647 | 41 | 0.636364 | import tkinter as tk
from Windows import StorageGui, NavBar
class Main:
root = tk.Tk()
root.geometry("1000x600")
root.title(" [EDD] Fase-1" )
app = StorageGui(master=root)
app.configure(bg='#2C3E50')
app.place(x=200,width=200,height=200)
app.mainloop()
start = Main() | true | true |
f71b801eb247a574b85aeca0cc7b8ec2789deb6f | 37,260 | py | Python | src/azure-cli/azure/cli/command_modules/servicefabric/_help.py | zackliu/azure-cli | 680f8339ac010a89d4063566fabc5991abc8a4c2 | [
"MIT"
] | 2 | 2021-03-24T21:06:25.000Z | 2021-03-24T21:07:59.000Z | src/azure-cli/azure/cli/command_modules/servicefabric/_help.py | zackliu/azure-cli | 680f8339ac010a89d4063566fabc5991abc8a4c2 | [
"MIT"
] | null | null | null | src/azure-cli/azure/cli/command_modules/servicefabric/_help.py | zackliu/azure-cli | 680f8339ac010a89d4063566fabc5991abc8a4c2 | [
"MIT"
] | 9 | 2020-02-12T22:53:00.000Z | 2021-06-09T18:59:41.000Z | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.help_files import helps # pylint: disable=unused-import
# pylint: disable=line-too-long, too-many-lines
helps['sf'] = """
type: group
short-summary: Manage and administer Azure Service Fabric clusters.
"""
helps['sf application'] = """
type: group
short-summary: Manage applications running on an Azure Service Fabric cluster. Only support ARM deployed applications.
"""
helps['sf application create'] = """
type: command
short-summary: Create a new application on an Azure Service Fabric cluster.
examples:
- name: Create application "testApp" with parameters. The application type "TestAppType" version "v1" should already exist in the cluster, and the application parameters should be defined in the application manifest.
text: >
az sf application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --application-parameters key0=value0
- name: Create application "testApp" and app type version using the package url provided.
text: >
az sf application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg" \\
--application-parameters key0=value0
"""
helps['sf application update'] = """
type: command
short-summary: Update a Azure Service Fabric application. This allows updating the application parameters and/or upgrade the application type version which will trigger an application upgrade.
examples:
- name: Update application parameters and upgreade policy values and app type version to v2.
text: >
az sf application update -g testRG -c testCluster --application-name testApp --application-type-version v2 \\
--application-parameters key0=value0 --health-check-stable-duration 0 --health-check-wait-duration 0 --health-check-retry-timeout 0 \\
--upgrade-domain-timeout 5000 --upgrade-timeout 7000 --failure-action Rollback --upgrade-replica-set-check-timeout 300 --force-restart
- name: Update application minimum and maximum nodes.
text: >
az sf application update -g testRG -c testCluster --application-name testApp --minimum-nodes 1 --maximum-nodes 3
"""
helps['sf application certificate'] = """
type: group
short-summary: Manage the certificate of an application.
"""
helps['sf application certificate add'] = """
type: command
short-summary: Add a new certificate to the Virtual Machine Scale Sets that make up the cluster to be used by hosted applications.
examples:
- name: Add an application certificate.
text: >
az sf application certificate add -g group-name -c cluster1 --secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}'
"""
helps['sf application show'] = """
type: command
short-summary: Show the properties of an application on an Azure Service Fabric cluster.
examples:
- name: Get application.
text: >
az sf application show -g testRG -c testCluster --application-name testApp
"""
helps['sf application list'] = """
type: command
short-summary: List applications of a given cluster.
examples:
- name: List applications for a given cluster.
text: >
az sf application list -g testRG -c testCluster
"""
helps['sf application delete'] = """
type: command
short-summary: Delete an application.
examples:
- name: Delete application.
text: >
az sf application delete -g testRG -c testCluster --application-name testApp
"""
helps['sf application-type'] = """
type: group
short-summary: Manage applications types and its versions running on an Azure Service Fabric cluster. Only support ARM deployed application types.
"""
helps['sf application-type'] = """
type: group
short-summary: Manage application types on an Azure Service Fabric cluster.
"""
helps['sf application-type create'] = """
type: command
short-summary: Create a new application type on an Azure Service Fabric cluster.
examples:
- name: Create new application type.
text: >
az sf application-type create -g testRG -c testCluster --application-type-name testAppType
"""
helps['sf application-type show'] = """
type: command
short-summary: Show the properties of an application type on an Azure Service Fabric cluster.
examples:
- name: Get application type.
text: >
az sf application-type show -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type list'] = """
type: command
short-summary: List application types of a given cluster.
examples:
- name: List application types for a given cluster.
text: >
az sf application-type list -g testRG -c testCluster
"""
helps['sf application-type delete'] = """
type: command
short-summary: Delete an application type.
examples:
- name: Delete application type.
text: >
az sf application-type delete -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type version'] = """
type: group
short-summary: Manage application type versions on an Azure Service Fabric cluster. Only support ARM deployed application type versions.
"""
helps['sf application-type version create'] = """
type: command
short-summary: Create a new application type on an Azure Service Fabric cluster.
examples:
- name: Create new application type version using the provided package url. The version in the application manifest contained in the package should have the same version as the one specified in --version.
text: >
az sf application-type version create -g testRG -c testCluster --application-type-name testAppType \\
--version 1.0 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg"
"""
helps['sf application-type version show'] = """
type: command
short-summary: Show the properties of an application type version on an Azure Service Fabric cluster.
examples:
- name: Show the properties of an application type version on an Azure Service Fabric cluster.
text: >
az sf application-type version show -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf application-type version list'] = """
type: command
short-summary: List version of a given application type.
examples:
- name: List versions for a particular application type.
text: >
az sf application-type version list -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type version delete'] = """
type: command
short-summary: Delete an application type version.
examples:
- name: Delete application type version.
text: >
az sf application-type version delete -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf service'] = """
type: group
short-summary: Manage services running on an Azure Service Fabric cluster. Only support ARM deployed services.
"""
helps['sf service create'] = """
type: command
short-summary: Create a new service on an Azure Service Fabric cluster.
examples:
- name: Create a new stateless service "testApp~testService1" with instance count -1 (on all the nodes).
text: >
az sf service create -g testRG -c testCluster --application-name testApp --state stateless --service-name testApp~testService \\
--service-type testStateless --instance-count -1 --partition-scheme singleton
- name: Create a new stateful service "testApp~testService2" with a target of 5 nodes.
text: >
az sf service create -g testRG -c testCluster --application-name testApp --state stateful --service-name testApp~testService2 \\
--service-type testStatefulType --min-replica-set-size 3 --target-replica-set-size 5
"""
helps['sf service show'] = """
type: command
short-summary: Get a service.
examples:
- name: Show the properties of a service on an Azure Service Fabric cluster.
text: >
az sf service show -g testRG -c testCluster --application-name testApp --service-name testApp~testService
"""
helps['sf service list'] = """
type: command
short-summary: List services of a given application.
examples:
- name: List services.
text: >
az sf service list -g testRG -c testCluster --application-name testApp
"""
helps['sf service delete'] = """
type: command
short-summary: Delete a service.
examples:
- name: Delete service.
text: >
az sf service delete -g testRG -c testCluster --application-name testApp --service-name testApp~testService
"""
helps['sf cluster'] = """
type: group
short-summary: Manage an Azure Service Fabric cluster.
"""
helps['sf cluster certificate'] = """
type: group
short-summary: Manage a cluster certificate.
"""
helps['sf cluster certificate add'] = """
type: command
short-summary: Add a secondary cluster certificate to the cluster.
examples:
- name: Add a certificate to a cluster using a keyvault secret identifier.
text: |
az sf cluster certificate add -g group-name -c cluster1 \\
--secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}'
- name: Add a self-signed certificate to a cluster.
text: >
az sf cluster certificate add -g group-name -c cluster1 --certificate-subject-name test.com
- name: Add a secondary cluster certificate to the cluster. (autogenerated)
text: az sf cluster certificate add --cluster-name cluster1 --resource-group group-name --secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}' --vault-name MyVault
crafted: true
"""
helps['sf cluster certificate remove'] = """
type: command
short-summary: Remove a certificate from a cluster.
examples:
- name: Remove a certificate by thumbprint.
text: >
az sf cluster certificate remove -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster client-certificate'] = """
type: group
short-summary: Manage the client certificate of a cluster.
"""
helps['sf cluster client-certificate add'] = """
type: command
short-summary: Add a common name or certificate thumbprint to the cluster for client authentication.
examples:
- name: Add client certificate by thumbprint
text: >
az sf cluster client-certificate add -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster client-certificate remove'] = """
type: command
short-summary: Remove client certificates or subject names used for authentication.
examples:
- name: Remove a client certificate by thumbprint.
text: >
az sf cluster client-certificate remove -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster create'] = """
type: command
short-summary: Create a new Azure Service Fabric cluster.
examples:
- name: Create a cluster with a given size and self-signed certificate that is downloaded locally.
text: >
az sf cluster create -g group-name -c cluster1 -l westus --cluster-size 4 --vm-password Password#1234 --certificate-output-folder MyCertificates --certificate-subject-name cluster1
- name: Use a keyvault certificate and custom template to deploy a cluster.
text: >
az sf cluster create -g group-name -c cluster1 -l westus --template-file template.json \\
--parameter-file parameter.json --secret-identifier https://{KeyVault}.vault.azure.net:443/secrets/{MyCertificate}
"""
helps['sf cluster durability'] = """
type: group
short-summary: Manage the durability of a cluster.
"""
helps['sf cluster durability update'] = """
type: command
short-summary: Update the durability tier or VM SKU of a node type in the cluster.
examples:
- name: Change the cluster durability level to 'Silver'.
text: >
az sf cluster durability update -g group-name -c cluster1 --durability-level Silver --node-type nt1
"""
helps['sf cluster list'] = """
type: command
short-summary: List cluster resources.
"""
helps['sf cluster node'] = """
type: group
short-summary: Manage the node instance of a cluster.
"""
helps['sf cluster node add'] = """
type: command
short-summary: Add nodes to a node type in a cluster.
examples:
- name: Add 2 'nt1' nodes to a cluster.
text: >
az sf cluster node add -g group-name -c cluster1 --number-of-nodes-to-add 2 --node-type 'nt1'
"""
helps['sf cluster node remove'] = """
type: command
short-summary: Remove nodes from a node type in a cluster.
examples:
- name: Remove 2 'nt1' nodes from a cluster.
text: >
az sf cluster node remove -g group-name -c cluster1 --node-type 'nt1' --number-of-nodes-to-remove 2
"""
helps['sf cluster node-type'] = """
type: group
short-summary: Manage the node-type of a cluster.
"""
helps['sf cluster node-type add'] = """
type: command
short-summary: Add a new node type to a cluster.
examples:
- name: Add a new node type to a cluster.
text: >
az sf cluster node-type add -g group-name -c cluster1 --node-type 'n2' --capacity 5 --vm-user-name 'adminName' --vm-password testPassword0
"""
helps['sf cluster reliability'] = """
type: group
short-summary: Manage the reliability of a cluster.
"""
helps['sf cluster reliability update'] = """
type: command
short-summary: Update the reliability tier for the primary node in a cluster.
examples:
- name: Change the cluster reliability level to 'Silver'.
text: >
az sf cluster reliability update -g group-name -c cluster1 --reliability-level Silver
"""
helps['sf cluster setting'] = """
type: group
short-summary: Manage a cluster's settings.
"""
helps['sf cluster setting remove'] = """
type: command
short-summary: Remove settings from a cluster.
examples:
- name: Remove the `MaxFileOperationTimeout` setting from a cluster.
text: >
az sf cluster setting remove -g group-name -c cluster1 --section 'NamingService' --parameter 'MaxFileOperationTimeout'
"""
helps['sf cluster setting set'] = """
type: command
short-summary: Update the settings of a cluster.
examples:
- name: Set the `MaxFileOperationTimeout` setting for a cluster to 5 seconds.
text: >
az sf cluster setting set -g group-name -c cluster1 --section 'NamingService' --parameter 'MaxFileOperationTimeout' --value 5000
"""
helps['sf cluster upgrade-type'] = """
type: group
short-summary: Manage the upgrade type of a cluster.
"""
helps['sf cluster upgrade-type set'] = """
type: command
short-summary: Change the upgrade type for a cluster.
examples:
- name: Set a cluster to use the 'Automatic' upgrade mode.
text: >
az sf cluster upgrade-type set -g group-name -c cluster1 --upgrade-mode Automatic
"""
helps['sf managed-cluster'] = """
type: group
short-summary: Manage an Azure Service Fabric managed cluster.
"""
helps['sf managed-cluster show'] = """
type: command
short-summary: Show the properties of an Azure Service Fabric managed cluster.
examples:
- name: Get cluster.
text: >
az sf managed-cluster show -g testRG -c testCluster
"""
helps['sf managed-cluster list'] = """
type: command
short-summary: List managed clusters.
examples:
- name: List clusters by resource group.
text: >
az sf managed-cluster list -g testRG
- name: List clusters by subscription.
text: >
az sf managed-cluster list
"""
helps['sf managed-cluster create'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Create cluster with standard sku and client cert by thumbprint.
text: >
az sf managed-cluster create -g testRG -c testCluster -l eastus2 --cert-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --cert-is-admin --admin-password PassTest123@ --sku Standard
- name: Create cluster with standard sku and client cert by common name.
text: >
az sf managed-cluster create -g testRG -c testCluster -l eastus2 --cert-common-name Contoso.com --cert-issuer-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --cert-is-admin --admin-password PassTest123@ --sku Standard
"""
helps['sf managed-cluster update'] = """
type: command
short-summary: Update a managed cluster.
examples:
- name: Update cluster client port and dns name.
text: >
az sf managed-cluster update -g testRG -c testCluster --client-port 50000 --dns-name testnewdns
"""
helps['sf managed-cluster delete'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Delete cluster.
text: >
az sf managed-cluster delete -g testRG -c testCluster
"""
helps['sf managed-cluster client-certificate'] = """
type: group
short-summary: Manage client certificates of a manged cluster.
"""
helps['sf managed-cluster client-certificate add'] = """
type: command
short-summary: Add a new client certificate to the managed cluster.
examples:
- name: Add admin client certificate by thumbprint.
text: >
az sf managed-cluster client-certificate add -g testRG -c testCluster --thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --is-admin
- name: Add non admin client certificate by common name.
text: >
az sf managed-cluster client-certificate add -g testRG -c testCluster --common-name Contoso.com --issuer-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
"""
helps['sf managed-cluster client-certificate delete'] = """
type: command
short-summary: Delete a client certificate from the managed cluster.
examples:
- name: Delete client certificate by thumbprint.
text: >
az sf managed-cluster client-certificate delete -g testRG -c testCluster --thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
- name: Delete client certificate by common name.
text: >
az sf managed-cluster client-certificate delete -g testRG -c testCluster --common-name Contoso.com
"""
helps['sf managed-node-type'] = """
type: group
short-summary: Manage a node type of an Azure Service Fabric managed cluster.
"""
helps['sf managed-node-type show'] = """
type: command
short-summary: Show the properties of a node type.
examples:
- name: Get node type.
text: >
az sf managed-node-type show -g testRG -c testCluster -n pnt
"""
helps['sf managed-node-type list'] = """
type: command
short-summary: List node types of a managed cluster.
examples:
- name: List node types by cluster.
text: >
az sf managed-node-type list -g testRG -c testCluster
"""
helps['sf managed-node-type create'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Create primary node type with 5 nodes.
text: >
az sf managed-node-type create -g testRG -c testCluster -n pnt --instance-count 5 --primary
- name: Create non primary node type with placement properities, capacities and ports.
text: >
az sf managed-node-type create -g testRG -c testCluster -n snt --instance-count 5 --placement-property NodeColor=Green SomeProperty=5 --capacity ClientConnections=65536 --app-start-port 20575 --app-end-port 20605 --ephemeral-start-port 20606 --ephemeral-end-port 20861
"""
helps['sf managed-node-type update'] = """
type: command
short-summary: Update a managed cluster.
examples:
- name: Update the instance count of the node type.
text: >
az sf managed-node-type update -g testRG -c testCluster -n snt --instance-count 7
- name: Update placement properties of the node type. This will overwrite older placement properties if any.
text: >
az sf managed-node-type update -g testRG -c testCluster -n snt --placement-property NodeColor=Red SomeProperty=6
"""
helps['sf managed-node-type delete'] = """
type: command
short-summary: Delete node type from a cluster.
examples:
- name: Delete cluster.
text: >
az sf managed-node-type delete -g testRG -c testCluster -n snt
"""
helps['sf managed-node-type node'] = """
type: group
short-summary: Perform operations on nodes of a node type on managed clusters.
"""
helps['sf managed-node-type node restart'] = """
type: command
short-summary: Restart nodes of a node type.
examples:
- name: Restart 2 nodes.
text: >
az sf managed-node-type node restart -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type node reimage'] = """
type: command
short-summary: Reimage nodes of a node type.
examples:
- name: Reimage 2 nodes.
text: >
az sf managed-node-type node reimage -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type node delete'] = """
type: command
short-summary: Delete nodes of a node type.
examples:
- name: Delete 2 nodes.
text: >
az sf managed-node-type node delete -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type vm-extension'] = """
type: group
short-summary: Managed vm extension on a node type on managed clusters.
"""
helps['sf managed-node-type vm-extension add'] = """
type: command
short-summary: Add an extension to the node type.
examples:
- name: Add bg extension.
text: >
az sf managed-node-type vm-extension add -g testRG -c testCluster -n snt --extension-name csetest --publisher Microsoft.Compute --extension-type BGInfo --type-handler-version 2.1 --auto-upgrade-minor-version
"""
helps['sf managed-node-type vm-extension delete'] = """
type: command
short-summary: Delete an extension to the node type.
examples:
- name: Delete extension by name.
text: >
az sf managed-node-type vm-extension delete -g testRG -c testCluster -n snt --extension-name csetest
"""
helps['sf managed-node-type vm-secret'] = """
type: group
short-summary: Managed vm secrets on a node type on managed clusters.
"""
helps['sf managed-node-type vm-secret add'] = """
type: command
short-summary: Add a secret to the node type.
examples:
- name: Add certificate to the node type as a secret.
text: >
az sf managed-node-type vm-secret add -g testRG -c testCluster -n snt --source-vault-id /subscriptions/XXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/resourceGroups/testRG/providers/Microsoft.KeyVault/vaults/testkv --certificate-url https://testskv.vault.azure.net:443/secrets/TestCert/xxxxxxxxxxxxxxxxxxxxxxxx --certificate-store my
"""
helps['sf managed-application'] = """
type: group
short-summary: Manage applications running on an Azure Service Fabric managed cluster. Only support ARM deployed applications.
"""
helps['sf managed-application create'] = """
type: command
short-summary: Create a new managed application on an Azure Service Fabric managed cluster.
examples:
- name: Create managed application "testApp" with parameters. The application type "TestAppType" version "v1" should already exist in the cluster, and the application parameters should be defined in the application manifest.
text: >
az sf managed-application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --application-parameters key0=value0 --tags key1=value1
- name: Create application "testApp" and app type version using the package url provided.
text: >
az sf managed-application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg" \\
--application-parameters key0=value0
"""
helps['sf managed-application update'] = """
type: command
short-summary: Update a Azure Service Fabric managed application.
long-summary: This allows for updating the tags, the application parameters, value is the application UpgradePolicy and/or upgrade the application type version which will trigger an application upgrade.
examples:
- name: Update application parameters and upgreade policy values and app type version to v2.
text: >
az sf managed-application update -g testRG -c testCluster --application-name testApp --application-type-version v2 \\
--application-parameters key0=value0 --health-check-stable-duration 0 --health-check-wait-duration 0 --health-check-retry-timeout 0 \\
--upgrade-domain-timeout 5000 --upgrade-timeout 7000 --failure-action Rollback --upgrade-replica-set-check-timeout 300 --force-restart
- name: Update managed application service type health policy map.
text: >
az sf managed-application update -g testRG -c testCluster --application-name testApp --service-type-health-policy-map \"ServiceTypeName01\"=\"5,10,5\" \"ServiceTypeName02\"=\"5,5,5\"
"""
helps['sf managed-application show'] = """
type: command
short-summary: Show the properties of a managed application on an Azure Service Fabric managed cluster.
examples:
- name: Get managed application.
text: >
az sf managed-application show -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-application list'] = """
type: command
short-summary: List managed applications of a given managed cluster.
examples:
- name: List managed applications for a given managed cluster.
text: >
az sf managed-application list -g testRG -c testCluster
"""
helps['sf managed-application delete'] = """
type: command
short-summary: Delete a managed application.
examples:
- name: Delete managed application.
text: >
az sf managed-application delete -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-application-type'] = """
type: group
short-summary: Manage applications types and its versions running on an Azure Service Fabric managed cluster. Only support ARM deployed application types.
"""
helps['sf managed-application-type'] = """
type: group
short-summary: Manage application types on an Azure Service Fabric cluster.
"""
helps['sf managed-application-type create'] = """
type: command
short-summary: Create a new managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Create new managed application type.
text: >
az sf managed-application-type create -g testRG -c testCluster --application-type-name testAppType
"""
helps['sf managed-application-type show'] = """
type: command
short-summary: Show the properties of a managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Get managed application type.
text: >
az sf managed-application-type show -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type list'] = """
type: command
short-summary: List managed application types of a given managed cluster.
examples:
- name: List managed application types for a given managed cluster.
text: >
az sf managed-application-type list -g testRG -c testCluster
"""
helps['sf managed-application-type update'] = """
type: command
short-summary: Update an managed application type.
long-summary: This allows for updating of application type tags.
examples:
- name: Update application type tags.
text: >
az sf managed-application-type update -g testRG -c testCluster --application-type-name CalcServiceApp --tags new=tags are=nice
"""
helps['sf managed-application-type delete'] = """
type: command
short-summary: Delete a managed application type.
examples:
- name: Delete managed application type.
text: >
az sf managed-application-type delete -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type version'] = """
type: group
short-summary: Manage application type versions on an Azure Service Fabric managed cluster. Only support ARM deployed application type versions.
"""
helps['sf managed-application-type version create'] = """
type: command
short-summary: Create a new managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Create new managed application type version using the provided package url. The version in the application manifest contained in the package should have the same version as the one specified in --version.
text: >
az sf managed-application-type version create -g testRG -c testCluster --application-type-name testAppType \\
--version 1.0 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg"
"""
helps['sf managed-application-type version show'] = """
type: command
short-summary: Show the properties of a managed application type version on an Azure Service Fabric managed cluster.
examples:
- name: Show the properties of a managed application type version on an Azure Service Fabric managed cluster.
text: >
az sf managed-application-type version show -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf managed-application-type version list'] = """
type: command
short-summary: List versions of a given managed application type.
examples:
- name: List versions for a particular managed application type.
text: >
az sf managed-application-type version list -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type version update'] = """
type: command
short-summary: Update a managed application type version.
long-summary: This allows for updating of application type version tags and the package url.
examples:
- name: Update managed application type version.
text: >
az sf managed-application-type version update -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0 --tags new=tags
"""
helps['sf managed-application-type version delete'] = """
type: command
short-summary: Delete a managed application type version.
examples:
- name: Delete managed application type version.
text: >
az sf managed-application-type version delete -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf managed-service'] = """
type: group
short-summary: Manage services running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service create'] = """
type: command
short-summary: Create a new managed service on an Azure Service Fabric managed cluster.
examples:
- name: Create a new stateless managed service "testService1" with instance count -1 (on all the nodes).
text: >
az sf managed-service create -g testRG -c testCluster --application-name testApp --state stateless --service-name testService \\
--service-type testStateless --instance-count -1 --partition-scheme singleton
- name: Create a new stateful service "testService2" with a target of 5 nodes.
text: >
az sf managed-service create -g testRG -c testCluster --application-name testApp --state stateful --service-name testService2 --has-persisted-state \\
--service-type testStatefulType --min-replica-set-size 3 --target-replica-set-size 5 --partition-scheme uniformint64range --partition-count 1 --low-key 0 --high-key 25
"""
helps['sf managed-service show'] = """
type: command
short-summary: Get a service.
examples:
- name: Show the properties of a managed service on an Azure Service Fabric managed cluster.
text: >
az sf managed-service show -g testRG -c testCluster --application-name testApp --service-name testService
"""
helps['sf managed-service list'] = """
type: command
short-summary: List managed services of a given managed application.
examples:
- name: List managed services.
text: >
az sf managed-service list -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-service update'] = """
type: command
short-summary: Update a managed service.
examples:
- name: Update managed stateless service.
text: >
az sf managed-service update -g testRG -c testCluster --application-name testApp --service-name testService --min-instance-count 2 \\
--min-instance-percentage 20 --instance-close-delay-duration '00:11:00'
- name: Update managed stateful service.
text: >
az sf managed-service update -g testRG -c testCluster --application-name testApp --service-name testService2 --service-placement-time-limit '00:11:00' \\
--stand-by-replica-keep-duration '00:11:00' --replica-restart-wait-duration '00:11:00' --quorum-loss-wait-duration '00:11:00'
"""
helps['sf managed-service delete'] = """
type: command
short-summary: Delete a managed service.
examples:
- name: Delete managed service.
text: >
az sf managed-service delete -g testRG -c testCluster --application-name testApp --service-name testService
"""
helps['sf managed-service correlation-scheme'] = """
type: group
short-summary: Manage correlation schemes of services running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service correlation-scheme create'] = """
type: command
short-summary: Create a new managed service correlation scheme on an Azure Service Fabric managed cluster.
long-summary: Create a new managed service correlation scheme on an Azure Service Fabric managed cluster. NOTE You can only have one service correlation per service.
examples:
- name: Create a new managed service correlation scheme.
text: >
az sf managed-service correlation-scheme create -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2" \\
--scheme AlignedAffinity
"""
helps['sf managed-service correlation-scheme update'] = """
type: command
short-summary: Update a managed service correlation scheme.
examples:
- name: Update managed service correlation scheme.
text: >
az sf managed-service correlation-scheme update -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2" \\
--scheme NonAlignedAffinity
"""
helps['sf managed-service correlation-scheme delete'] = """
type: command
short-summary: Delete a managed service correlation scheme.
examples:
- name: Delete managed service correlation scheme.
text: >
az sf managed-service correlation-scheme delete -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2"
"""
helps['sf managed-service load-metrics'] = """
type: group
short-summary: Manage service load metrics running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service load-metrics create'] = """
type: command
short-summary: Create a new managed service load metric on an Azure Service Fabric managed cluster.
examples:
- name: Create a new stateless managed service load metric.
text: >
az sf managed-service load-metrics create -g testRG -c testCluster --application-name testApp --service-name testService \\
--metric-name Metric1 --weight Low --default-load 3
- name: Create a new stateful service load metric.
text: >
az sf managed-service load-metrics create -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric2 --weight High --primary-default-load 3 --secondary-default-load 2
"""
helps['sf managed-service load-metrics update'] = """
type: command
short-summary: Update a managed service.
examples:
- name: Update a new stateless managed service load metric.
text: >
az sf managed-service load-metrics update -g testRG -c testCluster --application-name testApp --service-name testService \\
--metric-name Metric1 --weight Medium --default-load 5
- name: Update a new stateful service load metric.
text: >
az sf managed-service load-metrics update -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric2 --weight Low --primary-default-load 2 --secondary-default-load 1
"""
helps['sf managed-service load-metrics delete'] = """
type: command
short-summary: Delete a managed service.
examples:
- name: Delete managed service.
text: >
az sf managed-service load-metrics delete -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric1
"""
| 39.892934 | 328 | 0.725819 |
from knack.help_files import helps
helps['sf'] = """
type: group
short-summary: Manage and administer Azure Service Fabric clusters.
"""
helps['sf application'] = """
type: group
short-summary: Manage applications running on an Azure Service Fabric cluster. Only support ARM deployed applications.
"""
helps['sf application create'] = """
type: command
short-summary: Create a new application on an Azure Service Fabric cluster.
examples:
- name: Create application "testApp" with parameters. The application type "TestAppType" version "v1" should already exist in the cluster, and the application parameters should be defined in the application manifest.
text: >
az sf application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --application-parameters key0=value0
- name: Create application "testApp" and app type version using the package url provided.
text: >
az sf application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg" \\
--application-parameters key0=value0
"""
helps['sf application update'] = """
type: command
short-summary: Update a Azure Service Fabric application. This allows updating the application parameters and/or upgrade the application type version which will trigger an application upgrade.
examples:
- name: Update application parameters and upgreade policy values and app type version to v2.
text: >
az sf application update -g testRG -c testCluster --application-name testApp --application-type-version v2 \\
--application-parameters key0=value0 --health-check-stable-duration 0 --health-check-wait-duration 0 --health-check-retry-timeout 0 \\
--upgrade-domain-timeout 5000 --upgrade-timeout 7000 --failure-action Rollback --upgrade-replica-set-check-timeout 300 --force-restart
- name: Update application minimum and maximum nodes.
text: >
az sf application update -g testRG -c testCluster --application-name testApp --minimum-nodes 1 --maximum-nodes 3
"""
helps['sf application certificate'] = """
type: group
short-summary: Manage the certificate of an application.
"""
helps['sf application certificate add'] = """
type: command
short-summary: Add a new certificate to the Virtual Machine Scale Sets that make up the cluster to be used by hosted applications.
examples:
- name: Add an application certificate.
text: >
az sf application certificate add -g group-name -c cluster1 --secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}'
"""
helps['sf application show'] = """
type: command
short-summary: Show the properties of an application on an Azure Service Fabric cluster.
examples:
- name: Get application.
text: >
az sf application show -g testRG -c testCluster --application-name testApp
"""
helps['sf application list'] = """
type: command
short-summary: List applications of a given cluster.
examples:
- name: List applications for a given cluster.
text: >
az sf application list -g testRG -c testCluster
"""
helps['sf application delete'] = """
type: command
short-summary: Delete an application.
examples:
- name: Delete application.
text: >
az sf application delete -g testRG -c testCluster --application-name testApp
"""
helps['sf application-type'] = """
type: group
short-summary: Manage applications types and its versions running on an Azure Service Fabric cluster. Only support ARM deployed application types.
"""
helps['sf application-type'] = """
type: group
short-summary: Manage application types on an Azure Service Fabric cluster.
"""
helps['sf application-type create'] = """
type: command
short-summary: Create a new application type on an Azure Service Fabric cluster.
examples:
- name: Create new application type.
text: >
az sf application-type create -g testRG -c testCluster --application-type-name testAppType
"""
helps['sf application-type show'] = """
type: command
short-summary: Show the properties of an application type on an Azure Service Fabric cluster.
examples:
- name: Get application type.
text: >
az sf application-type show -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type list'] = """
type: command
short-summary: List application types of a given cluster.
examples:
- name: List application types for a given cluster.
text: >
az sf application-type list -g testRG -c testCluster
"""
helps['sf application-type delete'] = """
type: command
short-summary: Delete an application type.
examples:
- name: Delete application type.
text: >
az sf application-type delete -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type version'] = """
type: group
short-summary: Manage application type versions on an Azure Service Fabric cluster. Only support ARM deployed application type versions.
"""
helps['sf application-type version create'] = """
type: command
short-summary: Create a new application type on an Azure Service Fabric cluster.
examples:
- name: Create new application type version using the provided package url. The version in the application manifest contained in the package should have the same version as the one specified in --version.
text: >
az sf application-type version create -g testRG -c testCluster --application-type-name testAppType \\
--version 1.0 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg"
"""
helps['sf application-type version show'] = """
type: command
short-summary: Show the properties of an application type version on an Azure Service Fabric cluster.
examples:
- name: Show the properties of an application type version on an Azure Service Fabric cluster.
text: >
az sf application-type version show -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf application-type version list'] = """
type: command
short-summary: List version of a given application type.
examples:
- name: List versions for a particular application type.
text: >
az sf application-type version list -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf application-type version delete'] = """
type: command
short-summary: Delete an application type version.
examples:
- name: Delete application type version.
text: >
az sf application-type version delete -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf service'] = """
type: group
short-summary: Manage services running on an Azure Service Fabric cluster. Only support ARM deployed services.
"""
helps['sf service create'] = """
type: command
short-summary: Create a new service on an Azure Service Fabric cluster.
examples:
- name: Create a new stateless service "testApp~testService1" with instance count -1 (on all the nodes).
text: >
az sf service create -g testRG -c testCluster --application-name testApp --state stateless --service-name testApp~testService \\
--service-type testStateless --instance-count -1 --partition-scheme singleton
- name: Create a new stateful service "testApp~testService2" with a target of 5 nodes.
text: >
az sf service create -g testRG -c testCluster --application-name testApp --state stateful --service-name testApp~testService2 \\
--service-type testStatefulType --min-replica-set-size 3 --target-replica-set-size 5
"""
helps['sf service show'] = """
type: command
short-summary: Get a service.
examples:
- name: Show the properties of a service on an Azure Service Fabric cluster.
text: >
az sf service show -g testRG -c testCluster --application-name testApp --service-name testApp~testService
"""
helps['sf service list'] = """
type: command
short-summary: List services of a given application.
examples:
- name: List services.
text: >
az sf service list -g testRG -c testCluster --application-name testApp
"""
helps['sf service delete'] = """
type: command
short-summary: Delete a service.
examples:
- name: Delete service.
text: >
az sf service delete -g testRG -c testCluster --application-name testApp --service-name testApp~testService
"""
helps['sf cluster'] = """
type: group
short-summary: Manage an Azure Service Fabric cluster.
"""
helps['sf cluster certificate'] = """
type: group
short-summary: Manage a cluster certificate.
"""
helps['sf cluster certificate add'] = """
type: command
short-summary: Add a secondary cluster certificate to the cluster.
examples:
- name: Add a certificate to a cluster using a keyvault secret identifier.
text: |
az sf cluster certificate add -g group-name -c cluster1 \\
--secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}'
- name: Add a self-signed certificate to a cluster.
text: >
az sf cluster certificate add -g group-name -c cluster1 --certificate-subject-name test.com
- name: Add a secondary cluster certificate to the cluster. (autogenerated)
text: az sf cluster certificate add --cluster-name cluster1 --resource-group group-name --secret-identifier 'https://{KeyVault}.vault.azure.net/secrets/{Secret}' --vault-name MyVault
crafted: true
"""
helps['sf cluster certificate remove'] = """
type: command
short-summary: Remove a certificate from a cluster.
examples:
- name: Remove a certificate by thumbprint.
text: >
az sf cluster certificate remove -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster client-certificate'] = """
type: group
short-summary: Manage the client certificate of a cluster.
"""
helps['sf cluster client-certificate add'] = """
type: command
short-summary: Add a common name or certificate thumbprint to the cluster for client authentication.
examples:
- name: Add client certificate by thumbprint
text: >
az sf cluster client-certificate add -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster client-certificate remove'] = """
type: command
short-summary: Remove client certificates or subject names used for authentication.
examples:
- name: Remove a client certificate by thumbprint.
text: >
az sf cluster client-certificate remove -g group-name -c cluster1 --thumbprint '5F3660C715EBBDA31DB1FFDCF508302348DE8E7A'
"""
helps['sf cluster create'] = """
type: command
short-summary: Create a new Azure Service Fabric cluster.
examples:
- name: Create a cluster with a given size and self-signed certificate that is downloaded locally.
text: >
az sf cluster create -g group-name -c cluster1 -l westus --cluster-size 4 --vm-password Password#1234 --certificate-output-folder MyCertificates --certificate-subject-name cluster1
- name: Use a keyvault certificate and custom template to deploy a cluster.
text: >
az sf cluster create -g group-name -c cluster1 -l westus --template-file template.json \\
--parameter-file parameter.json --secret-identifier https://{KeyVault}.vault.azure.net:443/secrets/{MyCertificate}
"""
helps['sf cluster durability'] = """
type: group
short-summary: Manage the durability of a cluster.
"""
helps['sf cluster durability update'] = """
type: command
short-summary: Update the durability tier or VM SKU of a node type in the cluster.
examples:
- name: Change the cluster durability level to 'Silver'.
text: >
az sf cluster durability update -g group-name -c cluster1 --durability-level Silver --node-type nt1
"""
helps['sf cluster list'] = """
type: command
short-summary: List cluster resources.
"""
helps['sf cluster node'] = """
type: group
short-summary: Manage the node instance of a cluster.
"""
helps['sf cluster node add'] = """
type: command
short-summary: Add nodes to a node type in a cluster.
examples:
- name: Add 2 'nt1' nodes to a cluster.
text: >
az sf cluster node add -g group-name -c cluster1 --number-of-nodes-to-add 2 --node-type 'nt1'
"""
helps['sf cluster node remove'] = """
type: command
short-summary: Remove nodes from a node type in a cluster.
examples:
- name: Remove 2 'nt1' nodes from a cluster.
text: >
az sf cluster node remove -g group-name -c cluster1 --node-type 'nt1' --number-of-nodes-to-remove 2
"""
helps['sf cluster node-type'] = """
type: group
short-summary: Manage the node-type of a cluster.
"""
helps['sf cluster node-type add'] = """
type: command
short-summary: Add a new node type to a cluster.
examples:
- name: Add a new node type to a cluster.
text: >
az sf cluster node-type add -g group-name -c cluster1 --node-type 'n2' --capacity 5 --vm-user-name 'adminName' --vm-password testPassword0
"""
helps['sf cluster reliability'] = """
type: group
short-summary: Manage the reliability of a cluster.
"""
helps['sf cluster reliability update'] = """
type: command
short-summary: Update the reliability tier for the primary node in a cluster.
examples:
- name: Change the cluster reliability level to 'Silver'.
text: >
az sf cluster reliability update -g group-name -c cluster1 --reliability-level Silver
"""
helps['sf cluster setting'] = """
type: group
short-summary: Manage a cluster's settings.
"""
helps['sf cluster setting remove'] = """
type: command
short-summary: Remove settings from a cluster.
examples:
- name: Remove the `MaxFileOperationTimeout` setting from a cluster.
text: >
az sf cluster setting remove -g group-name -c cluster1 --section 'NamingService' --parameter 'MaxFileOperationTimeout'
"""
helps['sf cluster setting set'] = """
type: command
short-summary: Update the settings of a cluster.
examples:
- name: Set the `MaxFileOperationTimeout` setting for a cluster to 5 seconds.
text: >
az sf cluster setting set -g group-name -c cluster1 --section 'NamingService' --parameter 'MaxFileOperationTimeout' --value 5000
"""
helps['sf cluster upgrade-type'] = """
type: group
short-summary: Manage the upgrade type of a cluster.
"""
helps['sf cluster upgrade-type set'] = """
type: command
short-summary: Change the upgrade type for a cluster.
examples:
- name: Set a cluster to use the 'Automatic' upgrade mode.
text: >
az sf cluster upgrade-type set -g group-name -c cluster1 --upgrade-mode Automatic
"""
helps['sf managed-cluster'] = """
type: group
short-summary: Manage an Azure Service Fabric managed cluster.
"""
helps['sf managed-cluster show'] = """
type: command
short-summary: Show the properties of an Azure Service Fabric managed cluster.
examples:
- name: Get cluster.
text: >
az sf managed-cluster show -g testRG -c testCluster
"""
helps['sf managed-cluster list'] = """
type: command
short-summary: List managed clusters.
examples:
- name: List clusters by resource group.
text: >
az sf managed-cluster list -g testRG
- name: List clusters by subscription.
text: >
az sf managed-cluster list
"""
helps['sf managed-cluster create'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Create cluster with standard sku and client cert by thumbprint.
text: >
az sf managed-cluster create -g testRG -c testCluster -l eastus2 --cert-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --cert-is-admin --admin-password PassTest123@ --sku Standard
- name: Create cluster with standard sku and client cert by common name.
text: >
az sf managed-cluster create -g testRG -c testCluster -l eastus2 --cert-common-name Contoso.com --cert-issuer-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --cert-is-admin --admin-password PassTest123@ --sku Standard
"""
helps['sf managed-cluster update'] = """
type: command
short-summary: Update a managed cluster.
examples:
- name: Update cluster client port and dns name.
text: >
az sf managed-cluster update -g testRG -c testCluster --client-port 50000 --dns-name testnewdns
"""
helps['sf managed-cluster delete'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Delete cluster.
text: >
az sf managed-cluster delete -g testRG -c testCluster
"""
helps['sf managed-cluster client-certificate'] = """
type: group
short-summary: Manage client certificates of a manged cluster.
"""
helps['sf managed-cluster client-certificate add'] = """
type: command
short-summary: Add a new client certificate to the managed cluster.
examples:
- name: Add admin client certificate by thumbprint.
text: >
az sf managed-cluster client-certificate add -g testRG -c testCluster --thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX --is-admin
- name: Add non admin client certificate by common name.
text: >
az sf managed-cluster client-certificate add -g testRG -c testCluster --common-name Contoso.com --issuer-thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
"""
helps['sf managed-cluster client-certificate delete'] = """
type: command
short-summary: Delete a client certificate from the managed cluster.
examples:
- name: Delete client certificate by thumbprint.
text: >
az sf managed-cluster client-certificate delete -g testRG -c testCluster --thumbprint XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
- name: Delete client certificate by common name.
text: >
az sf managed-cluster client-certificate delete -g testRG -c testCluster --common-name Contoso.com
"""
helps['sf managed-node-type'] = """
type: group
short-summary: Manage a node type of an Azure Service Fabric managed cluster.
"""
helps['sf managed-node-type show'] = """
type: command
short-summary: Show the properties of a node type.
examples:
- name: Get node type.
text: >
az sf managed-node-type show -g testRG -c testCluster -n pnt
"""
helps['sf managed-node-type list'] = """
type: command
short-summary: List node types of a managed cluster.
examples:
- name: List node types by cluster.
text: >
az sf managed-node-type list -g testRG -c testCluster
"""
helps['sf managed-node-type create'] = """
type: command
short-summary: Delete a managed cluster.
examples:
- name: Create primary node type with 5 nodes.
text: >
az sf managed-node-type create -g testRG -c testCluster -n pnt --instance-count 5 --primary
- name: Create non primary node type with placement properities, capacities and ports.
text: >
az sf managed-node-type create -g testRG -c testCluster -n snt --instance-count 5 --placement-property NodeColor=Green SomeProperty=5 --capacity ClientConnections=65536 --app-start-port 20575 --app-end-port 20605 --ephemeral-start-port 20606 --ephemeral-end-port 20861
"""
helps['sf managed-node-type update'] = """
type: command
short-summary: Update a managed cluster.
examples:
- name: Update the instance count of the node type.
text: >
az sf managed-node-type update -g testRG -c testCluster -n snt --instance-count 7
- name: Update placement properties of the node type. This will overwrite older placement properties if any.
text: >
az sf managed-node-type update -g testRG -c testCluster -n snt --placement-property NodeColor=Red SomeProperty=6
"""
helps['sf managed-node-type delete'] = """
type: command
short-summary: Delete node type from a cluster.
examples:
- name: Delete cluster.
text: >
az sf managed-node-type delete -g testRG -c testCluster -n snt
"""
helps['sf managed-node-type node'] = """
type: group
short-summary: Perform operations on nodes of a node type on managed clusters.
"""
helps['sf managed-node-type node restart'] = """
type: command
short-summary: Restart nodes of a node type.
examples:
- name: Restart 2 nodes.
text: >
az sf managed-node-type node restart -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type node reimage'] = """
type: command
short-summary: Reimage nodes of a node type.
examples:
- name: Reimage 2 nodes.
text: >
az sf managed-node-type node reimage -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type node delete'] = """
type: command
short-summary: Delete nodes of a node type.
examples:
- name: Delete 2 nodes.
text: >
az sf managed-node-type node delete -g testRG -c testCluster -n snt --node-name snt_0 snt_1
"""
helps['sf managed-node-type vm-extension'] = """
type: group
short-summary: Managed vm extension on a node type on managed clusters.
"""
helps['sf managed-node-type vm-extension add'] = """
type: command
short-summary: Add an extension to the node type.
examples:
- name: Add bg extension.
text: >
az sf managed-node-type vm-extension add -g testRG -c testCluster -n snt --extension-name csetest --publisher Microsoft.Compute --extension-type BGInfo --type-handler-version 2.1 --auto-upgrade-minor-version
"""
helps['sf managed-node-type vm-extension delete'] = """
type: command
short-summary: Delete an extension to the node type.
examples:
- name: Delete extension by name.
text: >
az sf managed-node-type vm-extension delete -g testRG -c testCluster -n snt --extension-name csetest
"""
helps['sf managed-node-type vm-secret'] = """
type: group
short-summary: Managed vm secrets on a node type on managed clusters.
"""
helps['sf managed-node-type vm-secret add'] = """
type: command
short-summary: Add a secret to the node type.
examples:
- name: Add certificate to the node type as a secret.
text: >
az sf managed-node-type vm-secret add -g testRG -c testCluster -n snt --source-vault-id /subscriptions/XXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/resourceGroups/testRG/providers/Microsoft.KeyVault/vaults/testkv --certificate-url https://testskv.vault.azure.net:443/secrets/TestCert/xxxxxxxxxxxxxxxxxxxxxxxx --certificate-store my
"""
helps['sf managed-application'] = """
type: group
short-summary: Manage applications running on an Azure Service Fabric managed cluster. Only support ARM deployed applications.
"""
helps['sf managed-application create'] = """
type: command
short-summary: Create a new managed application on an Azure Service Fabric managed cluster.
examples:
- name: Create managed application "testApp" with parameters. The application type "TestAppType" version "v1" should already exist in the cluster, and the application parameters should be defined in the application manifest.
text: >
az sf managed-application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --application-parameters key0=value0 --tags key1=value1
- name: Create application "testApp" and app type version using the package url provided.
text: >
az sf managed-application create -g testRG -c testCluster --application-name testApp --application-type-name TestAppType \\
--application-type-version v1 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg" \\
--application-parameters key0=value0
"""
helps['sf managed-application update'] = """
type: command
short-summary: Update a Azure Service Fabric managed application.
long-summary: This allows for updating the tags, the application parameters, value is the application UpgradePolicy and/or upgrade the application type version which will trigger an application upgrade.
examples:
- name: Update application parameters and upgreade policy values and app type version to v2.
text: >
az sf managed-application update -g testRG -c testCluster --application-name testApp --application-type-version v2 \\
--application-parameters key0=value0 --health-check-stable-duration 0 --health-check-wait-duration 0 --health-check-retry-timeout 0 \\
--upgrade-domain-timeout 5000 --upgrade-timeout 7000 --failure-action Rollback --upgrade-replica-set-check-timeout 300 --force-restart
- name: Update managed application service type health policy map.
text: >
az sf managed-application update -g testRG -c testCluster --application-name testApp --service-type-health-policy-map \"ServiceTypeName01\"=\"5,10,5\" \"ServiceTypeName02\"=\"5,5,5\"
"""
helps['sf managed-application show'] = """
type: command
short-summary: Show the properties of a managed application on an Azure Service Fabric managed cluster.
examples:
- name: Get managed application.
text: >
az sf managed-application show -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-application list'] = """
type: command
short-summary: List managed applications of a given managed cluster.
examples:
- name: List managed applications for a given managed cluster.
text: >
az sf managed-application list -g testRG -c testCluster
"""
helps['sf managed-application delete'] = """
type: command
short-summary: Delete a managed application.
examples:
- name: Delete managed application.
text: >
az sf managed-application delete -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-application-type'] = """
type: group
short-summary: Manage applications types and its versions running on an Azure Service Fabric managed cluster. Only support ARM deployed application types.
"""
helps['sf managed-application-type'] = """
type: group
short-summary: Manage application types on an Azure Service Fabric cluster.
"""
helps['sf managed-application-type create'] = """
type: command
short-summary: Create a new managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Create new managed application type.
text: >
az sf managed-application-type create -g testRG -c testCluster --application-type-name testAppType
"""
helps['sf managed-application-type show'] = """
type: command
short-summary: Show the properties of a managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Get managed application type.
text: >
az sf managed-application-type show -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type list'] = """
type: command
short-summary: List managed application types of a given managed cluster.
examples:
- name: List managed application types for a given managed cluster.
text: >
az sf managed-application-type list -g testRG -c testCluster
"""
helps['sf managed-application-type update'] = """
type: command
short-summary: Update an managed application type.
long-summary: This allows for updating of application type tags.
examples:
- name: Update application type tags.
text: >
az sf managed-application-type update -g testRG -c testCluster --application-type-name CalcServiceApp --tags new=tags are=nice
"""
helps['sf managed-application-type delete'] = """
type: command
short-summary: Delete a managed application type.
examples:
- name: Delete managed application type.
text: >
az sf managed-application-type delete -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type version'] = """
type: group
short-summary: Manage application type versions on an Azure Service Fabric managed cluster. Only support ARM deployed application type versions.
"""
helps['sf managed-application-type version create'] = """
type: command
short-summary: Create a new managed application type on an Azure Service Fabric managed cluster.
examples:
- name: Create new managed application type version using the provided package url. The version in the application manifest contained in the package should have the same version as the one specified in --version.
text: >
az sf managed-application-type version create -g testRG -c testCluster --application-type-name testAppType \\
--version 1.0 --package-url "https://sftestapp.blob.core.windows.net/sftestapp/testApp_1.0.sfpkg"
"""
helps['sf managed-application-type version show'] = """
type: command
short-summary: Show the properties of a managed application type version on an Azure Service Fabric managed cluster.
examples:
- name: Show the properties of a managed application type version on an Azure Service Fabric managed cluster.
text: >
az sf managed-application-type version show -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf managed-application-type version list'] = """
type: command
short-summary: List versions of a given managed application type.
examples:
- name: List versions for a particular managed application type.
text: >
az sf managed-application-type version list -g testRG -c testCluster --application-type-name CalcServiceApp
"""
helps['sf managed-application-type version update'] = """
type: command
short-summary: Update a managed application type version.
long-summary: This allows for updating of application type version tags and the package url.
examples:
- name: Update managed application type version.
text: >
az sf managed-application-type version update -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0 --tags new=tags
"""
helps['sf managed-application-type version delete'] = """
type: command
short-summary: Delete a managed application type version.
examples:
- name: Delete managed application type version.
text: >
az sf managed-application-type version delete -g testRG -c testCluster --application-type-name CalcServiceApp --version 1.0
"""
helps['sf managed-service'] = """
type: group
short-summary: Manage services running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service create'] = """
type: command
short-summary: Create a new managed service on an Azure Service Fabric managed cluster.
examples:
- name: Create a new stateless managed service "testService1" with instance count -1 (on all the nodes).
text: >
az sf managed-service create -g testRG -c testCluster --application-name testApp --state stateless --service-name testService \\
--service-type testStateless --instance-count -1 --partition-scheme singleton
- name: Create a new stateful service "testService2" with a target of 5 nodes.
text: >
az sf managed-service create -g testRG -c testCluster --application-name testApp --state stateful --service-name testService2 --has-persisted-state \\
--service-type testStatefulType --min-replica-set-size 3 --target-replica-set-size 5 --partition-scheme uniformint64range --partition-count 1 --low-key 0 --high-key 25
"""
helps['sf managed-service show'] = """
type: command
short-summary: Get a service.
examples:
- name: Show the properties of a managed service on an Azure Service Fabric managed cluster.
text: >
az sf managed-service show -g testRG -c testCluster --application-name testApp --service-name testService
"""
helps['sf managed-service list'] = """
type: command
short-summary: List managed services of a given managed application.
examples:
- name: List managed services.
text: >
az sf managed-service list -g testRG -c testCluster --application-name testApp
"""
helps['sf managed-service update'] = """
type: command
short-summary: Update a managed service.
examples:
- name: Update managed stateless service.
text: >
az sf managed-service update -g testRG -c testCluster --application-name testApp --service-name testService --min-instance-count 2 \\
--min-instance-percentage 20 --instance-close-delay-duration '00:11:00'
- name: Update managed stateful service.
text: >
az sf managed-service update -g testRG -c testCluster --application-name testApp --service-name testService2 --service-placement-time-limit '00:11:00' \\
--stand-by-replica-keep-duration '00:11:00' --replica-restart-wait-duration '00:11:00' --quorum-loss-wait-duration '00:11:00'
"""
helps['sf managed-service delete'] = """
type: command
short-summary: Delete a managed service.
examples:
- name: Delete managed service.
text: >
az sf managed-service delete -g testRG -c testCluster --application-name testApp --service-name testService
"""
helps['sf managed-service correlation-scheme'] = """
type: group
short-summary: Manage correlation schemes of services running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service correlation-scheme create'] = """
type: command
short-summary: Create a new managed service correlation scheme on an Azure Service Fabric managed cluster.
long-summary: Create a new managed service correlation scheme on an Azure Service Fabric managed cluster. NOTE You can only have one service correlation per service.
examples:
- name: Create a new managed service correlation scheme.
text: >
az sf managed-service correlation-scheme create -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2" \\
--scheme AlignedAffinity
"""
helps['sf managed-service correlation-scheme update'] = """
type: command
short-summary: Update a managed service correlation scheme.
examples:
- name: Update managed service correlation scheme.
text: >
az sf managed-service correlation-scheme update -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2" \\
--scheme NonAlignedAffinity
"""
helps['sf managed-service correlation-scheme delete'] = """
type: command
short-summary: Delete a managed service correlation scheme.
examples:
- name: Delete managed service correlation scheme.
text: >
az sf managed-service correlation-scheme delete -g testRG -c testCluster --application-name testApp --service-name testService \\
--correlated-service-name "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/testRg/providers/Microsoft.ServiceFabric/managedclusters/testCluster/applications/testApp/services/testService2"
"""
helps['sf managed-service load-metrics'] = """
type: group
short-summary: Manage service load metrics running on an Azure Service Fabric managed cluster. Only support ARM deployed services.
"""
helps['sf managed-service load-metrics create'] = """
type: command
short-summary: Create a new managed service load metric on an Azure Service Fabric managed cluster.
examples:
- name: Create a new stateless managed service load metric.
text: >
az sf managed-service load-metrics create -g testRG -c testCluster --application-name testApp --service-name testService \\
--metric-name Metric1 --weight Low --default-load 3
- name: Create a new stateful service load metric.
text: >
az sf managed-service load-metrics create -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric2 --weight High --primary-default-load 3 --secondary-default-load 2
"""
helps['sf managed-service load-metrics update'] = """
type: command
short-summary: Update a managed service.
examples:
- name: Update a new stateless managed service load metric.
text: >
az sf managed-service load-metrics update -g testRG -c testCluster --application-name testApp --service-name testService \\
--metric-name Metric1 --weight Medium --default-load 5
- name: Update a new stateful service load metric.
text: >
az sf managed-service load-metrics update -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric2 --weight Low --primary-default-load 2 --secondary-default-load 1
"""
helps['sf managed-service load-metrics delete'] = """
type: command
short-summary: Delete a managed service.
examples:
- name: Delete managed service.
text: >
az sf managed-service load-metrics delete -g testRG -c testCluster --application-name testApp --service-name testService2 \\
--metric-name Metric1
"""
| true | true |
f71b8100e2c77204d39461f764e255793c25b730 | 1,884 | py | Python | american_gut_project_pipeline/pipeline/metrics.py | mas-dse-ringhilt/DSE-American-Gut-Project | dadb3be8d40d6fb325d26920b145c04c837a6869 | [
"CC-BY-4.0"
] | 1 | 2020-05-02T21:15:21.000Z | 2020-05-02T21:15:21.000Z | american_gut_project_pipeline/pipeline/metrics.py | ringhilterra/DSE-American-Gut-Project | dadb3be8d40d6fb325d26920b145c04c837a6869 | [
"CC-BY-4.0"
] | null | null | null | american_gut_project_pipeline/pipeline/metrics.py | ringhilterra/DSE-American-Gut-Project | dadb3be8d40d6fb325d26920b145c04c837a6869 | [
"CC-BY-4.0"
] | 2 | 2019-06-26T02:07:41.000Z | 2019-07-15T16:28:44.000Z | import pandas as pd
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
def evaluate(clf, x_train, x_test, y_train, y_test, name, training_data_name, embedding, params=None):
predictions = clf.predict(x_train)
# train_tn, train_fp, train_fn, train_tp = confusion_matrix(y_train, predictions).ravel()
train_accuracy = accuracy_score(y_train, predictions)
# train_precision = precision_score(y_train, predictions)
# train_recall = recall_score(y_train, predictions)
train_f1_score = f1_score(y_train, predictions, average='weighted')
predictions = clf.predict(x_test)
# test_tn, test_fp, test_fn, test_tp = confusion_matrix(y_test, predictions).ravel()
test_accuracy = accuracy_score(y_test, predictions)
# test_precision = precision_score(y_test, predictions)
# test_recall = recall_score(y_test, predictions)
test_f1_score = f1_score(y_test, predictions, average='weighted')
result_dict = {
'name': [name],
'embedding': [embedding],
'params': [params],
'training_data_name': [training_data_name],
# 'train_true_negative': [train_tn],
# 'train_false_positive': [train_fp],
# 'train_false_negative': [train_fn],
# 'train_true_positive': [train_tp],
'train_accuracy': [train_accuracy],
# 'train_precision': [train_precision],
# 'train_recall': [train_recall],
'train_f1_score': [train_f1_score],
# 'test_true_negative': [test_tn],
# 'test_false_positive': [test_fp],
# 'test_false_negative': [test_fn],
# 'test_true_positive': [test_tp],
'test_accuracy': [test_accuracy],
# 'test_precision': [test_precision],
# 'test_recall': [test_recall],
'test_f1_score': [test_f1_score],
}
return pd.DataFrame(result_dict)
| 41.866667 | 102 | 0.684183 | import pandas as pd
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
def evaluate(clf, x_train, x_test, y_train, y_test, name, training_data_name, embedding, params=None):
predictions = clf.predict(x_train)
train_accuracy = accuracy_score(y_train, predictions)
train_f1_score = f1_score(y_train, predictions, average='weighted')
predictions = clf.predict(x_test)
test_accuracy = accuracy_score(y_test, predictions)
test_f1_score = f1_score(y_test, predictions, average='weighted')
result_dict = {
'name': [name],
'embedding': [embedding],
'params': [params],
'training_data_name': [training_data_name],
'train_accuracy': [train_accuracy],
'train_f1_score': [train_f1_score],
'test_accuracy': [test_accuracy],
'test_f1_score': [test_f1_score],
}
return pd.DataFrame(result_dict)
| true | true |
f71b824fba4f7bb51835a6ef5657cce8b66fe369 | 112 | py | Python | ABC183/B.py | shimomura314/AtcoderCodes | db1d62a7715f5f1b3c40eceff8d34f0f34839f41 | [
"MIT"
] | null | null | null | ABC183/B.py | shimomura314/AtcoderCodes | db1d62a7715f5f1b3c40eceff8d34f0f34839f41 | [
"MIT"
] | null | null | null | ABC183/B.py | shimomura314/AtcoderCodes | db1d62a7715f5f1b3c40eceff8d34f0f34839f41 | [
"MIT"
] | null | null | null | sx, sy, gx, gy = map(int, input().split())
if sx == gx:
print(sx)
exit()
print(sx + sy*(gx-sx)/(gy+sy))
| 18.666667 | 42 | 0.517857 | sx, sy, gx, gy = map(int, input().split())
if sx == gx:
print(sx)
exit()
print(sx + sy*(gx-sx)/(gy+sy))
| true | true |
f71b826b9f28eb525f8e2cb61594898e1ab461e2 | 685 | py | Python | ted_sws/rml_to_html/resources/query_registry.py | meaningfy-ws/ted-xml-2-rdf | ac26a19f3761b7cf79d79a46be6323b658f067eb | [
"Apache-2.0"
] | 1 | 2022-03-21T12:32:52.000Z | 2022-03-21T12:32:52.000Z | ted_sws/rml_to_html/resources/query_registry.py | meaningfy-ws/ted-xml-2-rdf | ac26a19f3761b7cf79d79a46be6323b658f067eb | [
"Apache-2.0"
] | 24 | 2022-02-10T10:43:56.000Z | 2022-03-29T12:36:21.000Z | ted_sws/rml_to_html/resources/query_registry.py | meaningfy-ws/ted-sws | d1e351eacb2900f84ec7edc457e49d8202fbaff5 | [
"Apache-2.0"
] | null | null | null | from ted_sws.rml_to_html.resources import get_sparql_query
class QueryRegistry:
@property
def TRIPLE_MAP(self):
return get_sparql_query(query_file_name="get_triple_maps.rq")
@property
def LOGICAL_SOURCE(self):
return get_sparql_query(query_file_name="get_logical_source.rq")
@property
def SUBJECT_MAP(self):
return get_sparql_query(query_file_name="get_subject_map.rq")
@property
def PREDICATE_OBJECT_MAP(self):
return get_sparql_query(query_file_name="get_predicate_object_map.rq") \
@property
def TRIPLE_MAP_COMMENT_LABEL(self):
return get_sparql_query(query_file_name="get_label_comment.rq")
| 27.4 | 83 | 0.743066 | from ted_sws.rml_to_html.resources import get_sparql_query
class QueryRegistry:
@property
def TRIPLE_MAP(self):
return get_sparql_query(query_file_name="get_triple_maps.rq")
@property
def LOGICAL_SOURCE(self):
return get_sparql_query(query_file_name="get_logical_source.rq")
@property
def SUBJECT_MAP(self):
return get_sparql_query(query_file_name="get_subject_map.rq")
@property
def PREDICATE_OBJECT_MAP(self):
return get_sparql_query(query_file_name="get_predicate_object_map.rq") \
@property
def TRIPLE_MAP_COMMENT_LABEL(self):
return get_sparql_query(query_file_name="get_label_comment.rq")
| true | true |
f71b8275c618ced19c9930e8361174690cf06e80 | 10,326 | py | Python | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/18-extending_bound_32.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/18-extending_bound_32.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/18-extending_bound_32.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
# r' = r
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
# i >= l -> i' = 0 & l' = l + 1 & !inc_i'
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.GE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1)))
h_i = Hint("h_i0", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.LE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Minus(r, n1)))
h_r = Hint("h_r1", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r2", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(2, mgr.Equals(x_i, i))
loc2 = Location(env, mgr.GE(i, n0))
loc2.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i4", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1, loc2])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
return frozenset(res)
| 35.242321 | 89 | 0.62454 | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.GE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1)))
h_i = Hint("h_i0", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, mgr.LE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Minus(r, n1)))
h_r = Hint("h_r1", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r2", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(2, mgr.Equals(x_i, i))
loc2 = Location(env, mgr.GE(i, n0))
loc2.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i4", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1, loc2])
res.append(h_i)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
return frozenset(res)
| true | true |
f71b839ea462d9355fe88e220fc9c5b89f52ab5a | 827 | py | Python | virtex/serial/__init__.py | chrislarson1/virtex | 36eb47d1ace297951cae36edc8a00544b85fed79 | [
"Apache-2.0"
] | 5 | 2020-06-17T06:22:32.000Z | 2022-03-04T09:25:31.000Z | virtex/serial/__init__.py | virtexlabs/virtex | 36eb47d1ace297951cae36edc8a00544b85fed79 | [
"Apache-2.0"
] | null | null | null | virtex/serial/__init__.py | virtexlabs/virtex | 36eb47d1ace297951cae36edc8a00544b85fed79 | [
"Apache-2.0"
] | null | null | null | # -------------------------------------------------------------------
# Copyright 2021 Virtex authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# -------------------------------------------------------------------
from .bytes import *
from .pillow import *
from .pickle import *
from .numpy import *
| 39.380952 | 69 | 0.629988 |
from .bytes import *
from .pillow import *
from .pickle import *
from .numpy import *
| true | true |
f71b83f83a3906286de03127e55e5392ecaea99d | 1,613 | py | Python | ishuhui/__init__.py | lawnight/flask_ishuhui | be42684a4cf461aaccd691fc61548450869abc17 | [
"MIT"
] | null | null | null | ishuhui/__init__.py | lawnight/flask_ishuhui | be42684a4cf461aaccd691fc61548450869abc17 | [
"MIT"
] | null | null | null | ishuhui/__init__.py | lawnight/flask_ishuhui | be42684a4cf461aaccd691fc61548450869abc17 | [
"MIT"
] | 1 | 2021-05-20T10:19:19.000Z | 2021-05-20T10:19:19.000Z | from flask import Flask
from . import csrf
import ishuhui.data as data
import env
from flask_assets import Environment, Bundle
def create_app(config, should_register_blueprints=True):
app = Flask(__name__,static_folder = env.ASSETS,static_url_path='/assets')
assets = Environment(app)
js = Bundle('app.js','style.css')
assets.register('assets',js)
app.config.from_object(config)
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
from ishuhui.extensions.loginmanger import login_manager
from ishuhui.extensions.flasksqlalchemy import db
login_manager.setup_app(app)
db.init_app(app)
csrf.init(app)
from ishuhui.logger import init_logger
init_logger(app)
if should_register_blueprints:
register_blueprints(app)
with app.app_context():
db.create_all()
fake_db()
return app
def fake_db():
from ishuhui.extensions.flasksqlalchemy import db
data.Comic.query.delete()
for item in env.COMICS:
comic = data.Comic()
comic.title = item['title']
comic.description = item['description']
comic.classify_id = item['classify_id']
db.session.add(comic)
db.session.commit()
def register_blueprints(app):
from ishuhui.controllers.comic import bp_comic
app.register_blueprint(bp_comic)
from ishuhui.controllers.admin import bp_admin
app.register_blueprint(bp_admin)
from ishuhui.controllers.auth import bp_auth
app.register_blueprint(bp_auth)
from ishuhui.controllers.error import bp_error
app.register_blueprint(bp_error)
| 26.016129 | 78 | 0.716677 | from flask import Flask
from . import csrf
import ishuhui.data as data
import env
from flask_assets import Environment, Bundle
def create_app(config, should_register_blueprints=True):
app = Flask(__name__,static_folder = env.ASSETS,static_url_path='/assets')
assets = Environment(app)
js = Bundle('app.js','style.css')
assets.register('assets',js)
app.config.from_object(config)
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
from ishuhui.extensions.loginmanger import login_manager
from ishuhui.extensions.flasksqlalchemy import db
login_manager.setup_app(app)
db.init_app(app)
csrf.init(app)
from ishuhui.logger import init_logger
init_logger(app)
if should_register_blueprints:
register_blueprints(app)
with app.app_context():
db.create_all()
fake_db()
return app
def fake_db():
from ishuhui.extensions.flasksqlalchemy import db
data.Comic.query.delete()
for item in env.COMICS:
comic = data.Comic()
comic.title = item['title']
comic.description = item['description']
comic.classify_id = item['classify_id']
db.session.add(comic)
db.session.commit()
def register_blueprints(app):
from ishuhui.controllers.comic import bp_comic
app.register_blueprint(bp_comic)
from ishuhui.controllers.admin import bp_admin
app.register_blueprint(bp_admin)
from ishuhui.controllers.auth import bp_auth
app.register_blueprint(bp_auth)
from ishuhui.controllers.error import bp_error
app.register_blueprint(bp_error)
| true | true |
f71b8422d45790abd3999a3b0b1534cd72a75c0b | 19,515 | py | Python | prody/chromatin/hic.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 210 | 2015-01-26T08:17:56.000Z | 2022-03-30T01:40:34.000Z | prody/chromatin/hic.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 555 | 2015-01-05T21:51:54.000Z | 2022-03-31T16:51:41.000Z | prody/chromatin/hic.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 99 | 2015-02-09T18:00:39.000Z | 2022-03-07T12:52:51.000Z | from numbers import Integral
from numpy import ma
import numpy as np
from scipy.sparse import coo_matrix
from scipy.stats import mode
from prody.chromatin.norm import VCnorm, SQRTVCnorm, Filenorm
from prody.chromatin.functions import div0, showDomains, _getEigvecs
from prody import PY2K
from prody.dynamics import GNM, MaskedGNM
from prody.dynamics.functions import writeArray
from prody.dynamics.mode import Mode
from prody.dynamics.modeset import ModeSet
from prody.utilities import openFile, importLA, showMatrix, isURL, fixArraySize, makeSymmetric
__all__ = ['HiC', 'parseHiC', 'parseHiCStream', 'parseHiCBinary', 'saveHiC', 'loadHiC', 'writeMap']
class HiC(object):
"""This class is used to store and preprocess Hi-C contact map. A :class:`.GNM`
instance for analyzing the contact map can be also created by using this class.
"""
def __init__(self, title='Unknown', map=None, bin=None):
self._title = title
self._map = None
self.mask = False
self._labels = 0
self.masked = True
self.bin = bin
self.map = map
@property
def map(self):
if self.masked:
return self.getTrimedMap()
else:
return self._map
@map.setter
def map(self, value):
if value is None:
self._map = None
else:
self._map = np.asarray(value)
self._map = makeSymmetric(self._map)
self._maskUnmappedRegions()
self._labels = np.zeros(len(self._map), dtype=int)
def __repr__(self):
mask = self.mask
if np.isscalar(mask):
return '<HiC: {0} ({1} loci)>'.format(self._title, len(self._map))
else:
return '<HiC: {0} ({1} mapped loci; {2} in total)>'.format(self._title, np.count_nonzero(mask), len(self._map))
def __str__(self):
return 'HiC ' + self._title
def __getitem__(self, index):
if isinstance(index, Integral):
return self.map.flatten()[index]
else:
i, j = index
return self.map[i,j]
def __len__(self):
mask = self.mask
if np.isscalar(mask):
return len(self._map)
else:
return np.count_nonzero(mask)
def numAtoms(self):
return len(self.map)
def getTitle(self):
"""Returns title of the instance."""
return self._title
def setTitle(self, title):
"""Sets title of the instance."""
self._title = str(title)
def getCompleteMap(self):
"""Obtains the complete contact map with unmapped regions."""
return self._map
def getTrimedMap(self):
"""Obtains the contact map without unmapped regions."""
if self._map is None:
return None
if np.isscalar(self.mask):
return self._map
M = ma.array(self._map)
M.mask = np.diag(~self.mask)
return ma.compress_rowcols(M)
def align(self, array, axis=None):
if not isinstance(array, np.ndarray):
array = np.array(array)
ret = array = array.copy()
if np.isscalar(self.mask):
return ret
mask = self.mask.copy()
l_full = self.getCompleteMap().shape[0]
l_trim = self.getTrimedMap().shape[0]
if len(array.shape) == 0:
raise ValueError('array cannot be empty')
elif len(array.shape) == 1:
l = array.shape[0]
if l == l_trim:
N = len(mask)
ret = np.zeros(N, dtype=array.dtype)
ret[mask] = array
elif l == l_full:
ret = array[mask]
else:
raise ValueError('The length of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(l, l_full, l_trim))
elif len(array.shape) == 2:
s = array.shape
if axis is None:
if s[0] != s[1]:
raise ValueError('The array must be a square matrix '
'if axis is set to None.')
if s[0] == l_trim:
N = len(mask)
whole_mat = np.zeros((N,N), dtype=array.dtype)
mask = np.outer(mask, mask)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[0] == l_full:
M = ma.array(array)
M.mask = np.diag(mask)
ret = ma.compress_rowcols(M)
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
else:
new_shape = list(s)
otheraxis = 0 if axis!=0 else 1
if s[axis] == l_trim:
N = len(mask)
new_shape[axis] = N
whole_mat = np.zeros(new_shape)
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis], axis=otheraxis)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[axis] == l_full:
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis])
ret = self._map[mask]
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
return ret
def getKirchhoff(self):
"""Builds a Kirchhoff matrix based on the contact map."""
if self._map is None:
return None
else:
M = self.map
I = np.eye(M.shape[0], dtype=bool)
A = M.copy()
A[I] = 0.
D = np.diag(np.sum(A, axis=0))
K = D - A
return K
def _maskUnmappedRegions(self, diag=False):
"""Finds and masks unmapped regions in the contact map."""
M = self._map
if M is None: return
if diag:
# Obtain the diagonal values, need to make sure d is an array
# instead of a matrix, otherwise diag() later will not work as
# intended.
d = np.array(np.diag(M))
else:
d = np.array(M.sum(0))
# mask if a diagonal value is zero
mask_zero = np.array(d==0)
# mask if a diagonal value is NAN
mask_nan = np.isnan(d)
# combine two masks
mask = np.logical_or(mask_nan, mask_zero)
self.mask = ~mask
return self.mask
def calcGNM(self, n_modes=None, **kwargs):
"""Calculates GNM on the current Hi-C map. By default, ``n_modes`` is
set to **None** and ``zeros`` to **True**."""
if 'zeros' not in kwargs:
kwargs['zeros'] = True
if self.masked:
gnm = MaskedGNM(self._title, self.mask)
else:
gnm = GNM(self._title)
gnm.setKirchhoff(self.getKirchhoff())
gnm.calcModes(n_modes=n_modes, **kwargs)
return gnm
def normalize(self, method=VCnorm, **kwargs):
"""Applies chosen normalization on the current Hi-C map."""
M = self._map
N = method(M, **kwargs)
self.map = N
return N
def setDomains(self, labels, **kwargs):
"""Uses spectral clustering to identify structural domains on the chromosome.
:arg labels: domain labels
:type labels: :class:`~numpy.ndarray`, list
:arg method: Label assignment algorithm used after Laplacian embedding.
:type method: func
"""
wastrimmed = self.masked
self.masked = True
if len(labels) == self.numAtoms():
full_length = self.numAtoms()
if full_length != len(labels):
_labels = np.empty(full_length)
_labels.fill(np.nan)
_labels[self.mask] = labels
currlbl = labels[0]
for i in range(len(_labels)):
l = _labels[i]
if np.isnan(l):
_labels[i] = currlbl
elif currlbl != l:
currlbl = l
labels = _labels
else:
self.masked = False
if len(labels) != self.numAtoms():
raise ValueError('The length of the labels should match either the length '
'of masked or complete Hi-C map. Turn off "masked" if '
'you intended to set the labels to the full map.')
self.masked = wastrimmed
self._labels = labels
return self.getDomains()
def getDomains(self):
"""Returns an 1D :class:`numpy.ndarray` whose length is the number of loci. Each
element is an index denotes to which domain the locus belongs."""
lbl = self._labels
mask = self.mask
if self.masked:
lbl = lbl[mask]
return lbl
def getDomainList(self):
"""Returns a list of domain separations. The list has two columns: the first is for
the domain starts and the second is for the domain ends."""
indicators = np.diff(self.getDomains())
indicators = np.append(1., indicators)
indicators[-1] = 1
sites = np.where(indicators != 0)[0]
starts = sites[:-1]
ends = sites[1:]
domains = np.array([starts, ends]).T
return domains
def view(self, spec='p', **kwargs):
"""Visualization of the Hi-C map and domains (if present). The function makes use
of :func:`.showMatrix`.
:arg spec: a string specifies how to preprocess the matrix. Blank for no preprocessing,
'p' for showing only data from *p*-th to *100-p*-th percentile. '_' is to suppress
creating a new figure and paint to the current one instead. The letter specifications
can be applied sequentially, e.g. 'p_'.
:type spec: str
:arg p: specifies the percentile threshold.
:type p: double
"""
dm_kwargs = {}
keys = list(kwargs.keys())
for k in keys:
if k.startswith('dm_'):
dm_kwargs[k[3:]] = kwargs.pop(k)
elif k.startswith('domain_'):
dm_kwargs[k[7:]] = kwargs.pop(k)
M = self.map
if 'p' in spec:
p = kwargs.pop('p', 5)
lp = kwargs.pop('lp', p)
hp = kwargs.pop('hp', 100-p)
vmin = np.percentile(M, lp)
vmax = np.percentile(M, hp)
else:
vmin = vmax = None
if not 'vmin' in kwargs:
kwargs['vmin'] = vmin
if not 'vmax' in kwargs:
kwargs['vmax'] = vmax
im = showMatrix(M, **kwargs)
domains = self.getDomainList()
if len(domains) > 1:
showDomains(domains, **dm_kwargs)
return im
def copy(self):
new = type(self)()
new.__dict__.update(self.__dict__)
return new
__copy__ = copy
def parseHiC(filename, **kwargs):
"""Returns an :class:`.HiC` from a Hi-C data file.
This function extends :func:`.parseHiCStream`.
:arg filename: the filename to the Hi-C data file.
:type filename: str
"""
import os, struct
title = kwargs.get('title')
if title is None:
title = os.path.basename(filename)
else:
title = kwargs.pop('title')
if isURL(filename):
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename,'rb') as req:
magic_number = struct.unpack('<3s',req.read(3))[0]
if magic_number == b"HIC":
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename, 'r') as filestream:
M, res = parseHiCStream(filestream, title=title, **kwargs)
hic = HiC(title=title, map=M, bin=res)
return hic
def _sparse2dense(I, J, values, bin=None):
I = np.asarray(I, dtype=int)
J = np.asarray(J, dtype=int)
values = np.asarray(values, dtype=float)
# determine the bin size by the most frequent interval
if bin is None:
loci = np.unique(np.sort(I))
bins = np.diff(loci)
bin = mode(bins)[0][0]
# convert coordinate from basepair to locus index
bin = int(bin)
I = I // bin
J = J // bin
# make sure that the matrix is square
# if np.max(I) != np.max(J):
# b = np.max(np.append(I, J))
# I = np.append(I, b)
# J = np.append(J, b)
# values = np.append(values, 0.)
# Convert to sparse matrix format, then full matrix format
# and finally array type. Matrix format is avoided because
# diag() won't work as intended for Matrix instances.
M = np.array(coo_matrix((values, (I, J))).todense())
return M, bin
def parseHiCStream(stream, **kwargs):
"""Returns an :class:`.HiC` from a stream of Hi-C data lines.
:arg stream: Anything that implements the method ``read``, ``seek``
(e.g. :class:`file`, buffer, stdin)
"""
issparse = kwargs.get('sparse', None)
import csv
dialect = csv.Sniffer().sniff(stream.read(1024))
stream.seek(0)
reader = csv.reader(stream, dialect)
D = list()
for row in reader:
d = list()
for element in row:
d.append(np.double(element))
D.append(d)
D = np.array(D)
res = kwargs.get('bin', None)
if res is not None:
res = int(res)
size = D.shape
if len(D.shape) <= 1:
raise ValueError("cannot parse the file: input file only contains one column.")
if issparse is None:
issparse = size[1] == 3
if not issparse:
M = D
else:
try:
I, J, values = D.T[:3]
except ValueError:
raise ValueError('the sparse matrix format should have three columns')
M, res = _sparse2dense(I, J, values, bin=res)
return M, res
def parseHiCBinary(filename, **kwargs):
chrloc = kwargs.get('chrom', None)
if chrloc is None:
raise ValueError('chrom needs to be specified when parsing .hic format')
chrloc1 = kwargs.get('chrom1', chrloc)
chrloc2 = kwargs.get('chrom2', chrloc)
norm = kwargs.get('norm', 'NONE')
unit = kwargs.get('unit', 'BP')
res = kwargs.get('binsize', None)
res = kwargs.get('bin', res)
if res is None:
raise ValueError('bin needs to be specified when parsing .hic format')
res = int(res)
from .straw import straw
result = straw(norm, filename, chrloc1, chrloc2, unit, res)
M, res = _sparse2dense(*result, bin=res)
return M, res
def writeMap(filename, map, bin=None, format='%f'):
"""Writes *map* to the file designated by *filename*.
:arg filename: the file to be written.
:type filename: str
:arg map: a Hi-C contact map.
:type map: :class:`numpy.ndarray`
:arg bin: bin size of the *map*. If bin is `None`, *map* will be
written in full matrix format.
:type bin: int
:arg format: output format for map elements.
:type format: str
"""
assert isinstance(map, np.ndarray), 'map must be a numpy.ndarray.'
if bin is None:
return writeArray(filename, map, format=format)
else:
L = int(map.size - np.diag(map).size)//2 + np.diag(map).size
spmat = np.zeros((L, 3))
m,n = map.shape
l = 0
for i in range(m):
for j in range(i,n):
spmat[l, 0] = i * bin
spmat[l, 1] = j * bin
spmat[l, 2] = map[i, j]
l += 1
fmt = ['%d', '%d', format]
return writeArray(filename, spmat, format=fmt)
def saveHiC(hic, filename=None, map=True, **kwargs):
"""Saves *HiC* model data as :file:`filename.hic.npz`. If *map* is **True**,
Hi-C contact map will not be saved and it can be loaded from raw data file
later. If *filename* is **None**, name of the Hi-C instance will be used as
the filename, after ``" "`` (white spaces) in the name are replaced with
``"_"`` (underscores). Upon successful completion of saving, filename is
returned. This function makes use of :func:`numpy.savez` function."""
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.npz'
elif not filename.endswith('.hic.npz'):
filename += '.hic.npz'
attr_dict = hic.__dict__.copy()
if not map:
attr_dict.pop('_map')
ostream = openFile(filename, 'wb', **kwargs)
np.savez_compressed(ostream, **attr_dict)
ostream.close()
return filename
def loadHiC(filename):
"""Returns HiC instance after loading it from file (*filename*).
This function makes use of :func:`numpy.load` function. See also
:func:`saveHiC`."""
attr_dict = np.load(filename)
hic = HiC()
keys = attr_dict.keys()
for k in keys:
val = attr_dict[k]
if len(val.shape) == 0:
val = np.asscalar(val)
setattr(hic, k, val)
return hic
def saveHiC_h5(hic, filename=None, **kwargs):
"""Saves *HiC* model data as :file:`filename.hic.npz`. If *filename* is
**None**, name of the Hi-C instance will be used as
the filename, after ``" "`` (white spaces) in the name are replaced with
``"_"`` (underscores). Upon successful completion of saving, filename is
returned. This function makes use of :func:`numpy.savez` function."""
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.hic'
elif not filename.endswith('.hic.h5'):
filename += '.hic.h5'
attr_dict = hic.__dict__.copy()
with h5py.File(filename, 'w') as f:
for key in attr_dict:
value = attr_dict[key]
compression = None if np.isscalar(value) else 'gzip'
f.create_dataset(key, data=value, compression=compression)
return filename
def loadHiC_h5(filename):
"""Returns HiC instance after loading it from file (*filename*).
This function makes use of :func:`numpy.load` function. See also
:func:`saveHiC`."""
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
hic = HiC()
with h5py.File(filename, 'r') as f:
for key in f.keys():
try:
value = f[key][:]
except:
value = f[key][()]
setattr(hic, key, value)
return hic
| 31.887255 | 123 | 0.544146 | from numbers import Integral
from numpy import ma
import numpy as np
from scipy.sparse import coo_matrix
from scipy.stats import mode
from prody.chromatin.norm import VCnorm, SQRTVCnorm, Filenorm
from prody.chromatin.functions import div0, showDomains, _getEigvecs
from prody import PY2K
from prody.dynamics import GNM, MaskedGNM
from prody.dynamics.functions import writeArray
from prody.dynamics.mode import Mode
from prody.dynamics.modeset import ModeSet
from prody.utilities import openFile, importLA, showMatrix, isURL, fixArraySize, makeSymmetric
__all__ = ['HiC', 'parseHiC', 'parseHiCStream', 'parseHiCBinary', 'saveHiC', 'loadHiC', 'writeMap']
class HiC(object):
def __init__(self, title='Unknown', map=None, bin=None):
self._title = title
self._map = None
self.mask = False
self._labels = 0
self.masked = True
self.bin = bin
self.map = map
@property
def map(self):
if self.masked:
return self.getTrimedMap()
else:
return self._map
@map.setter
def map(self, value):
if value is None:
self._map = None
else:
self._map = np.asarray(value)
self._map = makeSymmetric(self._map)
self._maskUnmappedRegions()
self._labels = np.zeros(len(self._map), dtype=int)
def __repr__(self):
mask = self.mask
if np.isscalar(mask):
return '<HiC: {0} ({1} loci)>'.format(self._title, len(self._map))
else:
return '<HiC: {0} ({1} mapped loci; {2} in total)>'.format(self._title, np.count_nonzero(mask), len(self._map))
def __str__(self):
return 'HiC ' + self._title
def __getitem__(self, index):
if isinstance(index, Integral):
return self.map.flatten()[index]
else:
i, j = index
return self.map[i,j]
def __len__(self):
mask = self.mask
if np.isscalar(mask):
return len(self._map)
else:
return np.count_nonzero(mask)
def numAtoms(self):
return len(self.map)
def getTitle(self):
return self._title
def setTitle(self, title):
self._title = str(title)
def getCompleteMap(self):
return self._map
def getTrimedMap(self):
if self._map is None:
return None
if np.isscalar(self.mask):
return self._map
M = ma.array(self._map)
M.mask = np.diag(~self.mask)
return ma.compress_rowcols(M)
def align(self, array, axis=None):
if not isinstance(array, np.ndarray):
array = np.array(array)
ret = array = array.copy()
if np.isscalar(self.mask):
return ret
mask = self.mask.copy()
l_full = self.getCompleteMap().shape[0]
l_trim = self.getTrimedMap().shape[0]
if len(array.shape) == 0:
raise ValueError('array cannot be empty')
elif len(array.shape) == 1:
l = array.shape[0]
if l == l_trim:
N = len(mask)
ret = np.zeros(N, dtype=array.dtype)
ret[mask] = array
elif l == l_full:
ret = array[mask]
else:
raise ValueError('The length of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(l, l_full, l_trim))
elif len(array.shape) == 2:
s = array.shape
if axis is None:
if s[0] != s[1]:
raise ValueError('The array must be a square matrix '
'if axis is set to None.')
if s[0] == l_trim:
N = len(mask)
whole_mat = np.zeros((N,N), dtype=array.dtype)
mask = np.outer(mask, mask)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[0] == l_full:
M = ma.array(array)
M.mask = np.diag(mask)
ret = ma.compress_rowcols(M)
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
else:
new_shape = list(s)
otheraxis = 0 if axis!=0 else 1
if s[axis] == l_trim:
N = len(mask)
new_shape[axis] = N
whole_mat = np.zeros(new_shape)
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis], axis=otheraxis)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[axis] == l_full:
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis])
ret = self._map[mask]
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
return ret
def getKirchhoff(self):
if self._map is None:
return None
else:
M = self.map
I = np.eye(M.shape[0], dtype=bool)
A = M.copy()
A[I] = 0.
D = np.diag(np.sum(A, axis=0))
K = D - A
return K
def _maskUnmappedRegions(self, diag=False):
M = self._map
if M is None: return
if diag:
d = np.array(np.diag(M))
else:
d = np.array(M.sum(0))
mask_zero = np.array(d==0)
mask_nan = np.isnan(d)
mask = np.logical_or(mask_nan, mask_zero)
self.mask = ~mask
return self.mask
def calcGNM(self, n_modes=None, **kwargs):
if 'zeros' not in kwargs:
kwargs['zeros'] = True
if self.masked:
gnm = MaskedGNM(self._title, self.mask)
else:
gnm = GNM(self._title)
gnm.setKirchhoff(self.getKirchhoff())
gnm.calcModes(n_modes=n_modes, **kwargs)
return gnm
def normalize(self, method=VCnorm, **kwargs):
M = self._map
N = method(M, **kwargs)
self.map = N
return N
def setDomains(self, labels, **kwargs):
wastrimmed = self.masked
self.masked = True
if len(labels) == self.numAtoms():
full_length = self.numAtoms()
if full_length != len(labels):
_labels = np.empty(full_length)
_labels.fill(np.nan)
_labels[self.mask] = labels
currlbl = labels[0]
for i in range(len(_labels)):
l = _labels[i]
if np.isnan(l):
_labels[i] = currlbl
elif currlbl != l:
currlbl = l
labels = _labels
else:
self.masked = False
if len(labels) != self.numAtoms():
raise ValueError('The length of the labels should match either the length '
'of masked or complete Hi-C map. Turn off "masked" if '
'you intended to set the labels to the full map.')
self.masked = wastrimmed
self._labels = labels
return self.getDomains()
def getDomains(self):
lbl = self._labels
mask = self.mask
if self.masked:
lbl = lbl[mask]
return lbl
def getDomainList(self):
indicators = np.diff(self.getDomains())
indicators = np.append(1., indicators)
indicators[-1] = 1
sites = np.where(indicators != 0)[0]
starts = sites[:-1]
ends = sites[1:]
domains = np.array([starts, ends]).T
return domains
def view(self, spec='p', **kwargs):
dm_kwargs = {}
keys = list(kwargs.keys())
for k in keys:
if k.startswith('dm_'):
dm_kwargs[k[3:]] = kwargs.pop(k)
elif k.startswith('domain_'):
dm_kwargs[k[7:]] = kwargs.pop(k)
M = self.map
if 'p' in spec:
p = kwargs.pop('p', 5)
lp = kwargs.pop('lp', p)
hp = kwargs.pop('hp', 100-p)
vmin = np.percentile(M, lp)
vmax = np.percentile(M, hp)
else:
vmin = vmax = None
if not 'vmin' in kwargs:
kwargs['vmin'] = vmin
if not 'vmax' in kwargs:
kwargs['vmax'] = vmax
im = showMatrix(M, **kwargs)
domains = self.getDomainList()
if len(domains) > 1:
showDomains(domains, **dm_kwargs)
return im
def copy(self):
new = type(self)()
new.__dict__.update(self.__dict__)
return new
__copy__ = copy
def parseHiC(filename, **kwargs):
import os, struct
title = kwargs.get('title')
if title is None:
title = os.path.basename(filename)
else:
title = kwargs.pop('title')
if isURL(filename):
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename,'rb') as req:
magic_number = struct.unpack('<3s',req.read(3))[0]
if magic_number == b"HIC":
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename, 'r') as filestream:
M, res = parseHiCStream(filestream, title=title, **kwargs)
hic = HiC(title=title, map=M, bin=res)
return hic
def _sparse2dense(I, J, values, bin=None):
I = np.asarray(I, dtype=int)
J = np.asarray(J, dtype=int)
values = np.asarray(values, dtype=float)
if bin is None:
loci = np.unique(np.sort(I))
bins = np.diff(loci)
bin = mode(bins)[0][0]
bin = int(bin)
I = I // bin
J = J // bin
M = np.array(coo_matrix((values, (I, J))).todense())
return M, bin
def parseHiCStream(stream, **kwargs):
issparse = kwargs.get('sparse', None)
import csv
dialect = csv.Sniffer().sniff(stream.read(1024))
stream.seek(0)
reader = csv.reader(stream, dialect)
D = list()
for row in reader:
d = list()
for element in row:
d.append(np.double(element))
D.append(d)
D = np.array(D)
res = kwargs.get('bin', None)
if res is not None:
res = int(res)
size = D.shape
if len(D.shape) <= 1:
raise ValueError("cannot parse the file: input file only contains one column.")
if issparse is None:
issparse = size[1] == 3
if not issparse:
M = D
else:
try:
I, J, values = D.T[:3]
except ValueError:
raise ValueError('the sparse matrix format should have three columns')
M, res = _sparse2dense(I, J, values, bin=res)
return M, res
def parseHiCBinary(filename, **kwargs):
chrloc = kwargs.get('chrom', None)
if chrloc is None:
raise ValueError('chrom needs to be specified when parsing .hic format')
chrloc1 = kwargs.get('chrom1', chrloc)
chrloc2 = kwargs.get('chrom2', chrloc)
norm = kwargs.get('norm', 'NONE')
unit = kwargs.get('unit', 'BP')
res = kwargs.get('binsize', None)
res = kwargs.get('bin', res)
if res is None:
raise ValueError('bin needs to be specified when parsing .hic format')
res = int(res)
from .straw import straw
result = straw(norm, filename, chrloc1, chrloc2, unit, res)
M, res = _sparse2dense(*result, bin=res)
return M, res
def writeMap(filename, map, bin=None, format='%f'):
assert isinstance(map, np.ndarray), 'map must be a numpy.ndarray.'
if bin is None:
return writeArray(filename, map, format=format)
else:
L = int(map.size - np.diag(map).size)//2 + np.diag(map).size
spmat = np.zeros((L, 3))
m,n = map.shape
l = 0
for i in range(m):
for j in range(i,n):
spmat[l, 0] = i * bin
spmat[l, 1] = j * bin
spmat[l, 2] = map[i, j]
l += 1
fmt = ['%d', '%d', format]
return writeArray(filename, spmat, format=fmt)
def saveHiC(hic, filename=None, map=True, **kwargs):
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.npz'
elif not filename.endswith('.hic.npz'):
filename += '.hic.npz'
attr_dict = hic.__dict__.copy()
if not map:
attr_dict.pop('_map')
ostream = openFile(filename, 'wb', **kwargs)
np.savez_compressed(ostream, **attr_dict)
ostream.close()
return filename
def loadHiC(filename):
attr_dict = np.load(filename)
hic = HiC()
keys = attr_dict.keys()
for k in keys:
val = attr_dict[k]
if len(val.shape) == 0:
val = np.asscalar(val)
setattr(hic, k, val)
return hic
def saveHiC_h5(hic, filename=None, **kwargs):
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.hic'
elif not filename.endswith('.hic.h5'):
filename += '.hic.h5'
attr_dict = hic.__dict__.copy()
with h5py.File(filename, 'w') as f:
for key in attr_dict:
value = attr_dict[key]
compression = None if np.isscalar(value) else 'gzip'
f.create_dataset(key, data=value, compression=compression)
return filename
def loadHiC_h5(filename):
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
hic = HiC()
with h5py.File(filename, 'r') as f:
for key in f.keys():
try:
value = f[key][:]
except:
value = f[key][()]
setattr(hic, key, value)
return hic
| true | true |
f71b8454f0e6b786481174f05b105f50d177f810 | 568 | py | Python | tools/leetcode.125.Valid Palindrome/leetcode.125.Valid Palindrome.submission1.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | 4 | 2015-10-10T00:30:55.000Z | 2020-07-27T19:45:54.000Z | tools/leetcode.125.Valid Palindrome/leetcode.125.Valid Palindrome.submission1.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | null | null | null | tools/leetcode.125.Valid Palindrome/leetcode.125.Valid Palindrome.submission1.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | null | null | null | class Solution:
# @param {string} s
# @return {boolean}
def isPalindrome(self, s):
if not s:
return True
start = 0
end = len(s)-1
s = s.lower()
while start < end:
while start < end and not s[start].isalnum():
start += 1
while start < end and not s[end].isalnum():
end -= 1
if s[start] == s[end]:
start += 1
end -= 1
else:
return False
return True | 568 | 568 | 0.399648 | class Solution:
| true | true |
f71b84b71246068431940b564be47b7c900c6b87 | 3,164 | py | Python | gamestonk_terminal/common/quantitative_analysis/rolling_model.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | 1 | 2022-03-15T13:05:40.000Z | 2022-03-15T13:05:40.000Z | gamestonk_terminal/common/quantitative_analysis/rolling_model.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | null | null | null | gamestonk_terminal/common/quantitative_analysis/rolling_model.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | null | null | null | """Rolling Statistics"""
__docformat__ = "numpy"
import logging
from typing import Tuple
import pandas as pd
import pandas_ta as ta
from gamestonk_terminal.decorators import log_start_end
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
def get_rolling_avg(df: pd.DataFrame, length: int) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Return rolling mean and standard deviation
Parameters
----------
df_stock : pd.DataFrame
Dataframe of target data
length : int
Length of rolling window
Returns
-------
pd.DataFrame :
Dataframe of rolling mean
pd.DataFrame :
Dataframe of rolling standard deviation
"""
rolling_mean = df.rolling(length, center=True, min_periods=1).mean()
rolling_std = df.rolling(length, center=True, min_periods=1).std()
return pd.DataFrame(rolling_mean), pd.DataFrame(rolling_std)
@log_start_end(log=logger)
def get_spread(df: pd.DataFrame, length: int) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Standard Deviation and Variance
Parameters
----------
df_stock : pd.DataFrame
DataFrame of targeted data
Returns
-------
df_sd : pd.DataFrame
Dataframe of rolling standard deviation
df_var : pd.DataFrame
Dataframe of rolling standard deviation
"""
df_sd = ta.stdev(
close=df,
length=length,
).dropna()
df_var = ta.variance(
close=df,
length=length,
).dropna()
return pd.DataFrame(df_sd), pd.DataFrame(df_var)
@log_start_end(log=logger)
def get_quantile(
df: pd.DataFrame, length: int, quantile_pct: float
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Overlay Median & Quantile
Parameters
----------
df : pd.DataFrame
Dataframe of targeted data
length : int
Length of window
quantile : float
Quantile to display
Returns
-------
df_med : pd.DataFrame
Dataframe of median prices over window
df_quantile : pd.DataFrame
Dataframe of gievn quantile prices over window
"""
df_med = ta.median(close=df, length=length).dropna()
df_quantile = ta.quantile(
df,
length=length,
q=quantile_pct,
).dropna()
return pd.DataFrame(df_med), pd.DataFrame(df_quantile)
@log_start_end(log=logger)
def get_skew(df: pd.DataFrame, length: int) -> pd.DataFrame:
"""Skewness Indicator
Parameters
----------
df_stock : pd.DataFrame
Dataframe of targeted data
length : int
Length of window
Returns
-------
df_skew : pd.DataFrame
Dataframe of rolling skew
"""
df_skew = ta.skew(close=df, length=length).dropna()
return df_skew
@log_start_end(log=logger)
def get_kurtosis(df: pd.DataFrame, length: int) -> pd.DataFrame:
"""Kurtosis Indicator
Parameters
----------
df_stock : pd.DataFrame
Dataframe of targeted data
length : int
Length of window
Returns
-------
df_kurt : pd.DataFrame
Dataframe of rolling kurtosis
"""
df_kurt = ta.kurtosis(close=df, length=length).dropna()
return df_kurt
| 23.094891 | 88 | 0.640645 | __docformat__ = "numpy"
import logging
from typing import Tuple
import pandas as pd
import pandas_ta as ta
from gamestonk_terminal.decorators import log_start_end
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
def get_rolling_avg(df: pd.DataFrame, length: int) -> Tuple[pd.DataFrame, pd.DataFrame]:
rolling_mean = df.rolling(length, center=True, min_periods=1).mean()
rolling_std = df.rolling(length, center=True, min_periods=1).std()
return pd.DataFrame(rolling_mean), pd.DataFrame(rolling_std)
@log_start_end(log=logger)
def get_spread(df: pd.DataFrame, length: int) -> Tuple[pd.DataFrame, pd.DataFrame]:
df_sd = ta.stdev(
close=df,
length=length,
).dropna()
df_var = ta.variance(
close=df,
length=length,
).dropna()
return pd.DataFrame(df_sd), pd.DataFrame(df_var)
@log_start_end(log=logger)
def get_quantile(
df: pd.DataFrame, length: int, quantile_pct: float
) -> Tuple[pd.DataFrame, pd.DataFrame]:
df_med = ta.median(close=df, length=length).dropna()
df_quantile = ta.quantile(
df,
length=length,
q=quantile_pct,
).dropna()
return pd.DataFrame(df_med), pd.DataFrame(df_quantile)
@log_start_end(log=logger)
def get_skew(df: pd.DataFrame, length: int) -> pd.DataFrame:
df_skew = ta.skew(close=df, length=length).dropna()
return df_skew
@log_start_end(log=logger)
def get_kurtosis(df: pd.DataFrame, length: int) -> pd.DataFrame:
df_kurt = ta.kurtosis(close=df, length=length).dropna()
return df_kurt
| true | true |
f71b8506b37bb0252f9682c2fbba2ee5c82cb403 | 729 | py | Python | utils/see.py | jack09581013/Dual-GDNet | d9d65928208caee781cbe8f8f794241d06b4bf5d | [
"MIT"
] | null | null | null | utils/see.py | jack09581013/Dual-GDNet | d9d65928208caee781cbe8f8f794241d06b4bf5d | [
"MIT"
] | null | null | null | utils/see.py | jack09581013/Dual-GDNet | d9d65928208caee781cbe8f8f794241d06b4bf5d | [
"MIT"
] | null | null | null | import tools
import os
from dataset import RandomCropper, sub_sampling
from utils import plot_flying_things3D
height = 240
width = 576
ratio = 1
height = height//ratio
width = width//ratio
train_files = os.listdir('/media/jack/data/Dataset/pytorch/flyingthings3d/TRAIN')
test_files = os.listdir('/media/jack/data/Dataset/pytorch/flyingthings3d/TEST')
print('number of train files:', len(train_files))
print('number of test files:', len(test_files))
# (540, 960)
X, Y = tools.load('/media/jack/data/Dataset/pytorch/flyingthings3d/TRAIN/data_00000.np')
X, Y = sub_sampling(X, Y, ratio)
cropper = RandomCropper(X.shape[1:3], (height, width), seed=0)
X, Y = cropper.crop(X), cropper.crop(Y)
plot_flying_things3D(X, Y, None)
| 25.137931 | 88 | 0.747599 | import tools
import os
from dataset import RandomCropper, sub_sampling
from utils import plot_flying_things3D
height = 240
width = 576
ratio = 1
height = height//ratio
width = width//ratio
train_files = os.listdir('/media/jack/data/Dataset/pytorch/flyingthings3d/TRAIN')
test_files = os.listdir('/media/jack/data/Dataset/pytorch/flyingthings3d/TEST')
print('number of train files:', len(train_files))
print('number of test files:', len(test_files))
X, Y = tools.load('/media/jack/data/Dataset/pytorch/flyingthings3d/TRAIN/data_00000.np')
X, Y = sub_sampling(X, Y, ratio)
cropper = RandomCropper(X.shape[1:3], (height, width), seed=0)
X, Y = cropper.crop(X), cropper.crop(Y)
plot_flying_things3D(X, Y, None)
| true | true |
f71b86630d2154e3ec53d9c2d1bbc45428ac1669 | 498 | py | Python | Lib/site-packages/plotly/validators/sankey/link/concentrationscales/_label.py | tytanya/my-first-blog | 2b40adb0816c3546e90ad6ca1e7fb50d924c1536 | [
"bzip2-1.0.6"
] | 4 | 2020-02-05T11:26:47.000Z | 2021-05-26T07:48:46.000Z | Lib/site-packages/plotly/validators/sankey/link/concentrationscales/_label.py | tytanya/my-first-blog | 2b40adb0816c3546e90ad6ca1e7fb50d924c1536 | [
"bzip2-1.0.6"
] | 6 | 2021-03-18T22:27:08.000Z | 2022-03-11T23:40:50.000Z | venv/lib/python3.7/site-packages/plotly/validators/sankey/link/concentrationscales/_label.py | kylenahas/180LoginV1 | 8f64be6e6016d47dff8febfcfa3bbd56e9042f89 | [
"MIT"
] | 1 | 2020-02-02T21:17:12.000Z | 2020-02-02T21:17:12.000Z | import _plotly_utils.basevalidators
class LabelValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='label',
parent_name='sankey.link.concentrationscales',
**kwargs
):
super(LabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 26.210526 | 67 | 0.610442 | import _plotly_utils.basevalidators
class LabelValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='label',
parent_name='sankey.link.concentrationscales',
**kwargs
):
super(LabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| true | true |
f71b867bee311ba5e70b95ace8a6be7c624ca76a | 3,503 | py | Python | tensorflow_probability/python/experimental/auto_batching/numpy_backend_test.py | matthieucoquet/probability | 2426f4fc4743ceedc1a638a03d19ce6654ebff76 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/experimental/auto_batching/numpy_backend_test.py | matthieucoquet/probability | 2426f4fc4743ceedc1a638a03d19ce6654ebff76 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/experimental/auto_batching/numpy_backend_test.py | matthieucoquet/probability | 2426f4fc4743ceedc1a638a03d19ce6654ebff76 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for implementations of batched variables."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import hypothesis as hp
from hypothesis import strategies as hps
from hypothesis.extra import numpy as hpnp
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.experimental.auto_batching import backend_test_lib as backend_test
from tensorflow_probability.python.experimental.auto_batching import instructions as inst
from tensorflow_probability.python.experimental.auto_batching import numpy_backend
NP_BACKEND = numpy_backend.NumpyBackend()
def var_init(max_stack_depth, initial_value):
type_ = inst.TensorType(initial_value.dtype, initial_value.shape[1:])
var = NP_BACKEND.create_variable(
None, inst.VariableAllocation.FULL, type_,
max_stack_depth, batch_size=initial_value.shape[0])
return var.update(
initial_value, NP_BACKEND.full_mask(initial_value.shape[0]))
# A TF test case for self.assertAllEqual, but doesn't use TF so doesn't care
# about Eager vs Graph mode.
class NumpyVariableTest(tf.test.TestCase, backend_test.VariableTestCase):
def testNumpySmoke(self):
"""Test the property on specific example, without relying on Hypothesis."""
init = (12, np.random.randn(3, 2, 2).astype(np.float32))
ops = [('pop', [False, False, True]),
('push', [True, False, True]),
('update', np.ones((3, 2, 2), dtype=np.float32),
[True, True, False]),
('pop', [True, False, True])]
self.check_same_results(init, ops, var_init)
@hp.given(hps.data())
@hp.settings(
deadline=None,
max_examples=100)
def testNumpyVariableRandomOps(self, data):
# Hypothesis strategy:
# Generate a random max stack depth and value shape
# Deduce the batch size from the value shape
# Make a random dtype
# Generate a random initial value of that dtype and shape
# Generate ops, some of which write random values of that dtype and shape
max_stack_depth = data.draw(hps.integers(min_value=1, max_value=1000))
value_shape = data.draw(hpnp.array_shapes(min_dims=1))
batch_size = value_shape[0]
dtype = data.draw(hpnp.scalar_dtypes())
masks = hpnp.arrays(dtype=np.bool, shape=[batch_size])
values = hpnp.arrays(dtype, value_shape)
init_val = data.draw(values)
ops = data.draw(
hps.lists(
hps.one_of(
hps.tuples(hps.just('update'), values, masks),
hps.tuples(hps.just('push'), masks),
hps.tuples(hps.just('pop'), masks), # preserve line break
hps.tuples(hps.just('read')))))
self.check_same_results((max_stack_depth, init_val), ops, var_init)
if __name__ == '__main__':
tf.test.main()
| 39.806818 | 101 | 0.703968 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hypothesis as hp
from hypothesis import strategies as hps
from hypothesis.extra import numpy as hpnp
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.experimental.auto_batching import backend_test_lib as backend_test
from tensorflow_probability.python.experimental.auto_batching import instructions as inst
from tensorflow_probability.python.experimental.auto_batching import numpy_backend
NP_BACKEND = numpy_backend.NumpyBackend()
def var_init(max_stack_depth, initial_value):
type_ = inst.TensorType(initial_value.dtype, initial_value.shape[1:])
var = NP_BACKEND.create_variable(
None, inst.VariableAllocation.FULL, type_,
max_stack_depth, batch_size=initial_value.shape[0])
return var.update(
initial_value, NP_BACKEND.full_mask(initial_value.shape[0]))
class NumpyVariableTest(tf.test.TestCase, backend_test.VariableTestCase):
def testNumpySmoke(self):
init = (12, np.random.randn(3, 2, 2).astype(np.float32))
ops = [('pop', [False, False, True]),
('push', [True, False, True]),
('update', np.ones((3, 2, 2), dtype=np.float32),
[True, True, False]),
('pop', [True, False, True])]
self.check_same_results(init, ops, var_init)
@hp.given(hps.data())
@hp.settings(
deadline=None,
max_examples=100)
def testNumpyVariableRandomOps(self, data):
max_stack_depth = data.draw(hps.integers(min_value=1, max_value=1000))
value_shape = data.draw(hpnp.array_shapes(min_dims=1))
batch_size = value_shape[0]
dtype = data.draw(hpnp.scalar_dtypes())
masks = hpnp.arrays(dtype=np.bool, shape=[batch_size])
values = hpnp.arrays(dtype, value_shape)
init_val = data.draw(values)
ops = data.draw(
hps.lists(
hps.one_of(
hps.tuples(hps.just('update'), values, masks),
hps.tuples(hps.just('push'), masks),
hps.tuples(hps.just('pop'), masks),
hps.tuples(hps.just('read')))))
self.check_same_results((max_stack_depth, init_val), ops, var_init)
if __name__ == '__main__':
tf.test.main()
| true | true |
f71b87f9a34ad86788ead5a5a291dfc02bf3cc77 | 138 | py | Python | modules/2.79/bpy/types/GPENCIL_UL_brush.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/GPENCIL_UL_brush.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | modules/2.79/bpy/types/GPENCIL_UL_brush.py | cmbasnett/fake-bpy-module | acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55 | [
"MIT"
] | null | null | null | class GPENCIL_UL_brush:
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
pass
| 17.25 | 96 | 0.702899 | class GPENCIL_UL_brush:
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
pass
| true | true |
f71b891108d478f5ab27a7c4e1616fd4375c19ac | 5,222 | py | Python | codes/test.py | dvschultz/BasicSR | 69f360227f02cc86fa534a82ff969dd9084ac825 | [
"Apache-2.0"
] | null | null | null | codes/test.py | dvschultz/BasicSR | 69f360227f02cc86fa534a82ff969dd9084ac825 | [
"Apache-2.0"
] | null | null | null | codes/test.py | dvschultz/BasicSR | 69f360227f02cc86fa534a82ff969dd9084ac825 | [
"Apache-2.0"
] | null | null | null | import os.path as osp
import logging
import time
import argparse
from collections import OrderedDict
import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
from data import create_dataset, create_dataloader
from models import create_model
#### options
parser = argparse.ArgumentParser()
parser.add_argument('-opt', type=str, required=True, help='Path to options YMAL file.')
opt = option.parse(parser.parse_args().opt, is_train=False)
opt = option.dict_to_nonedict(opt)
util.mkdirs(
(path for key, path in opt['path'].items()
if not key == 'experiments_root' and 'pretrain_model' not in key and 'resume' not in key))
util.setup_logger('base', opt['path']['log'], 'test_' + opt['name'], level=logging.INFO,
screen=True, tofile=True)
logger = logging.getLogger('base')
logger.info(option.dict2str(opt))
#### Create test dataset and dataloader
test_loaders = []
for phase, dataset_opt in sorted(opt['datasets'].items()):
test_set = create_dataset(dataset_opt)
test_loader = create_dataloader(test_set, dataset_opt)
logger.info('Number of test images in [{:s}]: {:d}'.format(dataset_opt['name'], len(test_set)))
test_loaders.append(test_loader)
model = create_model(opt)
for test_loader in test_loaders:
test_set_name = test_loader.dataset.opt['name']
logger.info('\nTesting [{:s}]...'.format(test_set_name))
test_start_time = time.time()
dataset_dir = osp.join(opt['path']['results_root'], test_set_name)
util.mkdir(dataset_dir)
test_results = OrderedDict()
test_results['psnr'] = []
test_results['ssim'] = []
test_results['psnr_y'] = []
test_results['ssim_y'] = []
for data in test_loader:
# need_GT = False if test_loader.dataset.opt['dataroot_GT'] is None else True
need_GT = False
model.feed_data(data, need_GT=need_GT)
img_path = data['GT_path'][0] if need_GT else data['LQ_path'][0]
img_name = osp.splitext(osp.basename(img_path))[0]
model.test()
visuals = model.get_current_visuals(need_GT=need_GT)
sr_img = util.tensor2img(visuals['SR']) # uint8
# save images
suffix = opt['suffix']
if suffix:
save_img_path = osp.join(dataset_dir, img_name + suffix + '.png')
else:
save_img_path = osp.join(dataset_dir, img_name + '.png')
util.save_img(sr_img, save_img_path)
# calculate PSNR and SSIM
if need_GT:
gt_img = util.tensor2img(visuals['GT'])
gt_img = gt_img / 255.
sr_img = sr_img / 255.
crop_border = opt['crop_border'] if opt['crop_border'] else opt['scale']
if crop_border == 0:
cropped_sr_img = sr_img
cropped_gt_img = gt_img
else:
cropped_sr_img = sr_img[crop_border:-crop_border, crop_border:-crop_border, :]
cropped_gt_img = gt_img[crop_border:-crop_border, crop_border:-crop_border, :]
psnr = util.calculate_psnr(cropped_sr_img * 255, cropped_gt_img * 255)
ssim = util.calculate_ssim(cropped_sr_img * 255, cropped_gt_img * 255)
test_results['psnr'].append(psnr)
test_results['ssim'].append(ssim)
if gt_img.shape[2] == 3: # RGB image
sr_img_y = bgr2ycbcr(sr_img, only_y=True)
gt_img_y = bgr2ycbcr(gt_img, only_y=True)
if crop_border == 0:
cropped_sr_img_y = sr_img_y
cropped_gt_img_y = gt_img_y
else:
cropped_sr_img_y = sr_img_y[crop_border:-crop_border, crop_border:-crop_border]
cropped_gt_img_y = gt_img_y[crop_border:-crop_border, crop_border:-crop_border]
psnr_y = util.calculate_psnr(cropped_sr_img_y * 255, cropped_gt_img_y * 255)
ssim_y = util.calculate_ssim(cropped_sr_img_y * 255, cropped_gt_img_y * 255)
test_results['psnr_y'].append(psnr_y)
test_results['ssim_y'].append(ssim_y)
logger.info(
'{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}; PSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}.'.
format(img_name, psnr, ssim, psnr_y, ssim_y))
else:
logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}.'.format(img_name, psnr, ssim))
else:
logger.info(img_name)
if need_GT: # metrics
# Average PSNR/SSIM results
ave_psnr = sum(test_results['psnr']) / len(test_results['psnr'])
ave_ssim = sum(test_results['ssim']) / len(test_results['ssim'])
logger.info(
'----Average PSNR/SSIM results for {}----\n\tPSNR: {:.6f} dB; SSIM: {:.6f}\n'.format(
test_set_name, ave_psnr, ave_ssim))
if test_results['psnr_y'] and test_results['ssim_y']:
ave_psnr_y = sum(test_results['psnr_y']) / len(test_results['psnr_y'])
ave_ssim_y = sum(test_results['ssim_y']) / len(test_results['ssim_y'])
logger.info(
'----Y channel, average PSNR/SSIM----\n\tPSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}\n'.
format(ave_psnr_y, ave_ssim_y))
| 42.803279 | 99 | 0.617771 | import os.path as osp
import logging
import time
import argparse
from collections import OrderedDict
import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
from data import create_dataset, create_dataloader
from models import create_model
ser()
parser.add_argument('-opt', type=str, required=True, help='Path to options YMAL file.')
opt = option.parse(parser.parse_args().opt, is_train=False)
opt = option.dict_to_nonedict(opt)
util.mkdirs(
(path for key, path in opt['path'].items()
if not key == 'experiments_root' and 'pretrain_model' not in key and 'resume' not in key))
util.setup_logger('base', opt['path']['log'], 'test_' + opt['name'], level=logging.INFO,
screen=True, tofile=True)
logger = logging.getLogger('base')
logger.info(option.dict2str(opt))
aset_opt)
test_loader = create_dataloader(test_set, dataset_opt)
logger.info('Number of test images in [{:s}]: {:d}'.format(dataset_opt['name'], len(test_set)))
test_loaders.append(test_loader)
model = create_model(opt)
for test_loader in test_loaders:
test_set_name = test_loader.dataset.opt['name']
logger.info('\nTesting [{:s}]...'.format(test_set_name))
test_start_time = time.time()
dataset_dir = osp.join(opt['path']['results_root'], test_set_name)
util.mkdir(dataset_dir)
test_results = OrderedDict()
test_results['psnr'] = []
test_results['ssim'] = []
test_results['psnr_y'] = []
test_results['ssim_y'] = []
for data in test_loader:
need_GT = False
model.feed_data(data, need_GT=need_GT)
img_path = data['GT_path'][0] if need_GT else data['LQ_path'][0]
img_name = osp.splitext(osp.basename(img_path))[0]
model.test()
visuals = model.get_current_visuals(need_GT=need_GT)
sr_img = util.tensor2img(visuals['SR'])
suffix = opt['suffix']
if suffix:
save_img_path = osp.join(dataset_dir, img_name + suffix + '.png')
else:
save_img_path = osp.join(dataset_dir, img_name + '.png')
util.save_img(sr_img, save_img_path)
if need_GT:
gt_img = util.tensor2img(visuals['GT'])
gt_img = gt_img / 255.
sr_img = sr_img / 255.
crop_border = opt['crop_border'] if opt['crop_border'] else opt['scale']
if crop_border == 0:
cropped_sr_img = sr_img
cropped_gt_img = gt_img
else:
cropped_sr_img = sr_img[crop_border:-crop_border, crop_border:-crop_border, :]
cropped_gt_img = gt_img[crop_border:-crop_border, crop_border:-crop_border, :]
psnr = util.calculate_psnr(cropped_sr_img * 255, cropped_gt_img * 255)
ssim = util.calculate_ssim(cropped_sr_img * 255, cropped_gt_img * 255)
test_results['psnr'].append(psnr)
test_results['ssim'].append(ssim)
if gt_img.shape[2] == 3:
sr_img_y = bgr2ycbcr(sr_img, only_y=True)
gt_img_y = bgr2ycbcr(gt_img, only_y=True)
if crop_border == 0:
cropped_sr_img_y = sr_img_y
cropped_gt_img_y = gt_img_y
else:
cropped_sr_img_y = sr_img_y[crop_border:-crop_border, crop_border:-crop_border]
cropped_gt_img_y = gt_img_y[crop_border:-crop_border, crop_border:-crop_border]
psnr_y = util.calculate_psnr(cropped_sr_img_y * 255, cropped_gt_img_y * 255)
ssim_y = util.calculate_ssim(cropped_sr_img_y * 255, cropped_gt_img_y * 255)
test_results['psnr_y'].append(psnr_y)
test_results['ssim_y'].append(ssim_y)
logger.info(
'{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}; PSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}.'.
format(img_name, psnr, ssim, psnr_y, ssim_y))
else:
logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}.'.format(img_name, psnr, ssim))
else:
logger.info(img_name)
if need_GT:
ave_psnr = sum(test_results['psnr']) / len(test_results['psnr'])
ave_ssim = sum(test_results['ssim']) / len(test_results['ssim'])
logger.info(
'----Average PSNR/SSIM results for {}----\n\tPSNR: {:.6f} dB; SSIM: {:.6f}\n'.format(
test_set_name, ave_psnr, ave_ssim))
if test_results['psnr_y'] and test_results['ssim_y']:
ave_psnr_y = sum(test_results['psnr_y']) / len(test_results['psnr_y'])
ave_ssim_y = sum(test_results['ssim_y']) / len(test_results['ssim_y'])
logger.info(
'----Y channel, average PSNR/SSIM----\n\tPSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}\n'.
format(ave_psnr_y, ave_ssim_y))
| true | true |
f71b8944d7cb24a8c9e0c2e8ab0e255b732516de | 11,057 | py | Python | script/gen_requirements_all.py | TheDatNik/home-assistant | 12b451adf5e5e894cb0707b61535218260411189 | [
"Apache-2.0"
] | 2 | 2019-07-31T16:09:15.000Z | 2019-09-05T08:07:12.000Z | script/gen_requirements_all.py | TheDatNik/home-assistant | 12b451adf5e5e894cb0707b61535218260411189 | [
"Apache-2.0"
] | 2 | 2022-01-13T04:00:03.000Z | 2022-03-12T01:02:40.000Z | script/gen_requirements_all.py | TheDatNik/home-assistant | 12b451adf5e5e894cb0707b61535218260411189 | [
"Apache-2.0"
] | 2 | 2017-10-16T07:55:03.000Z | 2019-10-07T21:26:20.000Z | #!/usr/bin/env python3
"""Generate an updated requirements_all.txt."""
import fnmatch
import importlib
import os
import pathlib
import pkgutil
import re
import sys
from script.hassfest.model import Integration
COMMENT_REQUIREMENTS = (
'Adafruit-DHT',
'Adafruit_BBIO',
'avion',
'beacontools',
'blinkt',
'bluepy',
'bme680',
'credstash',
'decora',
'envirophat',
'evdev',
'face_recognition',
'fritzconnection',
'i2csense',
'opencv-python',
'py_noaa',
'VL53L1X2',
'pybluez',
'pycups',
'PySwitchbot',
'pySwitchmate',
'python-eq3bt',
'python-lirc',
'pyuserinput',
'raspihats',
'rpi-rf',
'RPi.GPIO',
'smbus-cffi',
)
TEST_REQUIREMENTS = (
'aioambient',
'aioautomatic',
'aiobotocore',
'aiohttp_cors',
'aiohue',
'aiounifi',
'apns2',
'av',
'axis',
'caldav',
'coinmarketcap',
'defusedxml',
'dsmr_parser',
'eebrightbox',
'emulated_roku',
'ephem',
'evohomeclient',
'feedparser-homeassistant',
'foobot_async',
'geojson_client',
'georss_generic_client',
'georss_ign_sismologia_client',
'google-api-python-client',
'gTTS-token',
'ha-ffmpeg',
'hangups',
'HAP-python',
'hass-nabucasa',
'haversine',
'hbmqtt',
'hdate',
'holidays',
'home-assistant-frontend',
'homekit[IP]',
'homematicip',
'httplib2',
'influxdb',
'jsonpath',
'libpurecool',
'libsoundtouch',
'luftdaten',
'mbddns',
'mficlient',
'numpy',
'oauth2client',
'paho-mqtt',
'pexpect',
'pilight',
'pmsensor',
'prometheus_client',
'pushbullet.py',
'py-canary',
'pyblackbird',
'pydeconz',
'pydispatcher',
'pyheos',
'pyhomematic',
'pylitejet',
'pymonoprice',
'pynx584',
'pyopenuv',
'pyotp',
'pyps4-homeassistant',
'pysmartapp',
'pysmartthings',
'pysonos',
'pyqwikswitch',
'PyRMVtransport',
'PyTransportNSW',
'pyspcwebgw',
'python-forecastio',
'python-nest',
'python_awair',
'pytradfri[async]',
'pyunifi',
'pyupnp-async',
'pywebpush',
'pyHS100',
'PyNaCl',
'regenmaschine',
'restrictedpython',
'rflink',
'ring_doorbell',
'rxv',
'simplisafe-python',
'sleepyq',
'smhi-pkg',
'somecomfort',
'sqlalchemy',
'srpenergy',
'statsd',
'toonapilib',
'uvcclient',
'vsure',
'warrant',
'pythonwhois',
'wakeonlan',
'vultr',
'YesssSMS',
'ruamel.yaml',
'zigpy-homeassistant',
'bellows-homeassistant',
)
IGNORE_PACKAGES = (
'homeassistant.components.hangouts.hangups_utils',
'homeassistant.components.cloud.client',
'homeassistant.components.homekit.*',
'homeassistant.components.recorder.models',
)
IGNORE_PIN = ('colorlog>2.1,<3', 'keyring>=9.3,<10.0', 'urllib3')
IGNORE_REQ = (
'colorama<=1', # Windows only requirement in check_config
)
URL_PIN = ('https://developers.home-assistant.io/docs/'
'creating_platform_code_review.html#1-requirements')
CONSTRAINT_PATH = os.path.join(os.path.dirname(__file__),
'../homeassistant/package_constraints.txt')
CONSTRAINT_BASE = """
pycryptodome>=3.6.6
# Breaks Python 3.6 and is not needed for our supported Python versions
enum34==1000000000.0.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# Contains code to modify Home Assistant to work around our rules
python-systemair-savecair==1000000000.0.0
# Newer version causes pylint to take forever
# https://github.com/timothycrosley/isort/issues/848
isort==4.3.4
"""
def explore_module(package, explore_children):
"""Explore the modules."""
module = importlib.import_module(package)
found = []
if not hasattr(module, '__path__'):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, package + '.'):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements():
"""Gather core requirements out of setup.py."""
with open('setup.py') as inp:
reqs_raw = re.search(
r'REQUIRES = \[(.*?)\]', inp.read(), re.S).group(1)
return re.findall(r"'(.*?)'", reqs_raw)
def comment_requirement(req):
"""Comment out requirement. Some don't install on all systems."""
return any(ign in req for ign in COMMENT_REQUIREMENTS)
def gather_modules():
"""Collect the information."""
reqs = {}
errors = []
gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs)
for key in reqs:
reqs[key] = sorted(reqs[key],
key=lambda name: (len(name.split('.')), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ', '.join(errors))
print("Make sure you import 3rd party libraries inside methods.")
return None
return reqs
def gather_requirements_from_manifests(errors, reqs):
"""Gather all of the requirements from manifests."""
integrations = Integration.load_dir(pathlib.Path(
'homeassistant/components'
))
for domain in sorted(integrations):
integration = integrations[domain]
if not integration.manifest:
errors.append(
'The manifest for component {} is invalid.'.format(domain)
)
continue
process_requirements(
errors,
integration.manifest['requirements'],
'homeassistant.components.{}'.format(domain),
reqs
)
def gather_requirements_from_modules(errors, reqs):
"""Collect the requirements from the modules directly."""
for package in sorted(
explore_module('homeassistant.scripts', True) +
explore_module('homeassistant.auth', True)):
try:
module = importlib.import_module(package)
except ImportError as err:
for pattern in IGNORE_PACKAGES:
if fnmatch.fnmatch(package, pattern):
break
else:
print("{}: {}".format(package.replace('.', '/') + '.py', err))
errors.append(package)
continue
if getattr(module, 'REQUIREMENTS', None):
process_requirements(errors, module.REQUIREMENTS, package, reqs)
def process_requirements(errors, module_requirements, package, reqs):
"""Process all of the requirements."""
for req in module_requirements:
if req in IGNORE_REQ:
continue
if '://' in req:
errors.append(
"{}[Only pypi dependencies are allowed: {}]".format(
package, req))
if req.partition('==')[1] == '' and req not in IGNORE_PIN:
errors.append(
"{}[Please pin requirement {}, see {}]".format(
package, req, URL_PIN))
reqs.setdefault(req, []).append(package)
def generate_requirements_list(reqs):
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
for req in sorted(requirements):
output.append('\n# {}'.format(req))
if comment_requirement(pkg):
output.append('\n# {}\n'.format(pkg))
else:
output.append('\n{}\n'.format(pkg))
return ''.join(output)
def requirements_all_output(reqs):
"""Generate output for requirements_all."""
output = []
output.append('# Home Assistant core')
output.append('\n')
output.append('\n'.join(core_requirements()))
output.append('\n')
output.append(generate_requirements_list(reqs))
return ''.join(output)
def requirements_test_output(reqs):
"""Generate output for test_requirements."""
output = []
output.append('# Home Assistant test')
output.append('\n')
with open('requirements_test.txt') as test_file:
output.append(test_file.read())
output.append('\n')
filtered = {key: value for key, value in reqs.items()
if any(
re.search(r'(^|#){}($|[=><])'.format(re.escape(ign)),
key) is not None for ign in TEST_REQUIREMENTS)}
output.append(generate_requirements_list(filtered))
return ''.join(output)
def gather_constraints():
"""Construct output for constraint file."""
return '\n'.join(core_requirements() + [''])
def write_requirements_file(data):
"""Write the modules to the requirements_all.txt."""
with open('requirements_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_test_requirements_file(data):
"""Write the modules to the requirements_test_all.txt."""
with open('requirements_test_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_constraints_file(data):
"""Write constraints to a file."""
with open(CONSTRAINT_PATH, 'w+', newline="\n") as req_file:
req_file.write(data + CONSTRAINT_BASE)
def validate_requirements_file(data):
"""Validate if requirements_all.txt is up to date."""
with open('requirements_all.txt', 'r') as req_file:
return data == req_file.read()
def validate_requirements_test_file(data):
"""Validate if requirements_test_all.txt is up to date."""
with open('requirements_test_all.txt', 'r') as req_file:
return data == req_file.read()
def validate_constraints_file(data):
"""Validate if constraints is up to date."""
with open(CONSTRAINT_PATH, 'r') as req_file:
return data + CONSTRAINT_BASE == req_file.read()
def main(validate):
"""Run the script."""
if not os.path.isfile('requirements_all.txt'):
print('Run this from HA root dir')
return 1
data = gather_modules()
if data is None:
return 1
constraints = gather_constraints()
reqs_file = requirements_all_output(data)
reqs_test_file = requirements_test_output(data)
if validate:
errors = []
if not validate_requirements_file(reqs_file):
errors.append("requirements_all.txt is not up to date")
if not validate_requirements_test_file(reqs_test_file):
errors.append("requirements_test_all.txt is not up to date")
if not validate_constraints_file(constraints):
errors.append(
"home-assistant/package_constraints.txt is not up to date")
if errors:
print("******* ERROR")
print('\n'.join(errors))
print("Please run script/gen_requirements_all.py")
return 1
return 0
write_requirements_file(reqs_file)
write_test_requirements_file(reqs_test_file)
write_constraints_file(constraints)
return 0
if __name__ == '__main__':
_VAL = sys.argv[-1] == 'validate'
sys.exit(main(_VAL))
| 25.955399 | 78 | 0.617889 |
import fnmatch
import importlib
import os
import pathlib
import pkgutil
import re
import sys
from script.hassfest.model import Integration
COMMENT_REQUIREMENTS = (
'Adafruit-DHT',
'Adafruit_BBIO',
'avion',
'beacontools',
'blinkt',
'bluepy',
'bme680',
'credstash',
'decora',
'envirophat',
'evdev',
'face_recognition',
'fritzconnection',
'i2csense',
'opencv-python',
'py_noaa',
'VL53L1X2',
'pybluez',
'pycups',
'PySwitchbot',
'pySwitchmate',
'python-eq3bt',
'python-lirc',
'pyuserinput',
'raspihats',
'rpi-rf',
'RPi.GPIO',
'smbus-cffi',
)
TEST_REQUIREMENTS = (
'aioambient',
'aioautomatic',
'aiobotocore',
'aiohttp_cors',
'aiohue',
'aiounifi',
'apns2',
'av',
'axis',
'caldav',
'coinmarketcap',
'defusedxml',
'dsmr_parser',
'eebrightbox',
'emulated_roku',
'ephem',
'evohomeclient',
'feedparser-homeassistant',
'foobot_async',
'geojson_client',
'georss_generic_client',
'georss_ign_sismologia_client',
'google-api-python-client',
'gTTS-token',
'ha-ffmpeg',
'hangups',
'HAP-python',
'hass-nabucasa',
'haversine',
'hbmqtt',
'hdate',
'holidays',
'home-assistant-frontend',
'homekit[IP]',
'homematicip',
'httplib2',
'influxdb',
'jsonpath',
'libpurecool',
'libsoundtouch',
'luftdaten',
'mbddns',
'mficlient',
'numpy',
'oauth2client',
'paho-mqtt',
'pexpect',
'pilight',
'pmsensor',
'prometheus_client',
'pushbullet.py',
'py-canary',
'pyblackbird',
'pydeconz',
'pydispatcher',
'pyheos',
'pyhomematic',
'pylitejet',
'pymonoprice',
'pynx584',
'pyopenuv',
'pyotp',
'pyps4-homeassistant',
'pysmartapp',
'pysmartthings',
'pysonos',
'pyqwikswitch',
'PyRMVtransport',
'PyTransportNSW',
'pyspcwebgw',
'python-forecastio',
'python-nest',
'python_awair',
'pytradfri[async]',
'pyunifi',
'pyupnp-async',
'pywebpush',
'pyHS100',
'PyNaCl',
'regenmaschine',
'restrictedpython',
'rflink',
'ring_doorbell',
'rxv',
'simplisafe-python',
'sleepyq',
'smhi-pkg',
'somecomfort',
'sqlalchemy',
'srpenergy',
'statsd',
'toonapilib',
'uvcclient',
'vsure',
'warrant',
'pythonwhois',
'wakeonlan',
'vultr',
'YesssSMS',
'ruamel.yaml',
'zigpy-homeassistant',
'bellows-homeassistant',
)
IGNORE_PACKAGES = (
'homeassistant.components.hangouts.hangups_utils',
'homeassistant.components.cloud.client',
'homeassistant.components.homekit.*',
'homeassistant.components.recorder.models',
)
IGNORE_PIN = ('colorlog>2.1,<3', 'keyring>=9.3,<10.0', 'urllib3')
IGNORE_REQ = (
'colorama<=1',
)
URL_PIN = ('https://developers.home-assistant.io/docs/'
'creating_platform_code_review.html#1-requirements')
CONSTRAINT_PATH = os.path.join(os.path.dirname(__file__),
'../homeassistant/package_constraints.txt')
CONSTRAINT_BASE = """
pycryptodome>=3.6.6
# Breaks Python 3.6 and is not needed for our supported Python versions
enum34==1000000000.0.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# Contains code to modify Home Assistant to work around our rules
python-systemair-savecair==1000000000.0.0
# Newer version causes pylint to take forever
# https://github.com/timothycrosley/isort/issues/848
isort==4.3.4
"""
def explore_module(package, explore_children):
module = importlib.import_module(package)
found = []
if not hasattr(module, '__path__'):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, package + '.'):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements():
with open('setup.py') as inp:
reqs_raw = re.search(
r'REQUIRES = \[(.*?)\]', inp.read(), re.S).group(1)
return re.findall(r"'(.*?)'", reqs_raw)
def comment_requirement(req):
return any(ign in req for ign in COMMENT_REQUIREMENTS)
def gather_modules():
reqs = {}
errors = []
gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs)
for key in reqs:
reqs[key] = sorted(reqs[key],
key=lambda name: (len(name.split('.')), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ', '.join(errors))
print("Make sure you import 3rd party libraries inside methods.")
return None
return reqs
def gather_requirements_from_manifests(errors, reqs):
integrations = Integration.load_dir(pathlib.Path(
'homeassistant/components'
))
for domain in sorted(integrations):
integration = integrations[domain]
if not integration.manifest:
errors.append(
'The manifest for component {} is invalid.'.format(domain)
)
continue
process_requirements(
errors,
integration.manifest['requirements'],
'homeassistant.components.{}'.format(domain),
reqs
)
def gather_requirements_from_modules(errors, reqs):
for package in sorted(
explore_module('homeassistant.scripts', True) +
explore_module('homeassistant.auth', True)):
try:
module = importlib.import_module(package)
except ImportError as err:
for pattern in IGNORE_PACKAGES:
if fnmatch.fnmatch(package, pattern):
break
else:
print("{}: {}".format(package.replace('.', '/') + '.py', err))
errors.append(package)
continue
if getattr(module, 'REQUIREMENTS', None):
process_requirements(errors, module.REQUIREMENTS, package, reqs)
def process_requirements(errors, module_requirements, package, reqs):
for req in module_requirements:
if req in IGNORE_REQ:
continue
if '://' in req:
errors.append(
"{}[Only pypi dependencies are allowed: {}]".format(
package, req))
if req.partition('==')[1] == '' and req not in IGNORE_PIN:
errors.append(
"{}[Please pin requirement {}, see {}]".format(
package, req, URL_PIN))
reqs.setdefault(req, []).append(package)
def generate_requirements_list(reqs):
output = []
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
for req in sorted(requirements):
output.append('\n# {}'.format(req))
if comment_requirement(pkg):
output.append('\n# {}\n'.format(pkg))
else:
output.append('\n{}\n'.format(pkg))
return ''.join(output)
def requirements_all_output(reqs):
output = []
output.append('# Home Assistant core')
output.append('\n')
output.append('\n'.join(core_requirements()))
output.append('\n')
output.append(generate_requirements_list(reqs))
return ''.join(output)
def requirements_test_output(reqs):
output = []
output.append('# Home Assistant test')
output.append('\n')
with open('requirements_test.txt') as test_file:
output.append(test_file.read())
output.append('\n')
filtered = {key: value for key, value in reqs.items()
if any(
re.search(r'(^|#){}($|[=><])'.format(re.escape(ign)),
key) is not None for ign in TEST_REQUIREMENTS)}
output.append(generate_requirements_list(filtered))
return ''.join(output)
def gather_constraints():
return '\n'.join(core_requirements() + [''])
def write_requirements_file(data):
with open('requirements_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_test_requirements_file(data):
with open('requirements_test_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_constraints_file(data):
with open(CONSTRAINT_PATH, 'w+', newline="\n") as req_file:
req_file.write(data + CONSTRAINT_BASE)
def validate_requirements_file(data):
with open('requirements_all.txt', 'r') as req_file:
return data == req_file.read()
def validate_requirements_test_file(data):
with open('requirements_test_all.txt', 'r') as req_file:
return data == req_file.read()
def validate_constraints_file(data):
with open(CONSTRAINT_PATH, 'r') as req_file:
return data + CONSTRAINT_BASE == req_file.read()
def main(validate):
if not os.path.isfile('requirements_all.txt'):
print('Run this from HA root dir')
return 1
data = gather_modules()
if data is None:
return 1
constraints = gather_constraints()
reqs_file = requirements_all_output(data)
reqs_test_file = requirements_test_output(data)
if validate:
errors = []
if not validate_requirements_file(reqs_file):
errors.append("requirements_all.txt is not up to date")
if not validate_requirements_test_file(reqs_test_file):
errors.append("requirements_test_all.txt is not up to date")
if not validate_constraints_file(constraints):
errors.append(
"home-assistant/package_constraints.txt is not up to date")
if errors:
print("******* ERROR")
print('\n'.join(errors))
print("Please run script/gen_requirements_all.py")
return 1
return 0
write_requirements_file(reqs_file)
write_test_requirements_file(reqs_test_file)
write_constraints_file(constraints)
return 0
if __name__ == '__main__':
_VAL = sys.argv[-1] == 'validate'
sys.exit(main(_VAL))
| true | true |
f71b8a631ab134a126402e2d0c05bb00449922c8 | 150,997 | py | Python | fastkml/test_main.py | dennereed/paleocore | d6da6c39cde96050ee4b9e7213ec1200530cbeee | [
"MIT"
] | 1 | 2021-02-05T19:50:13.000Z | 2021-02-05T19:50:13.000Z | fastkml/test_main.py | dennereed/paleocore | d6da6c39cde96050ee4b9e7213ec1200530cbeee | [
"MIT"
] | 59 | 2020-06-17T22:21:51.000Z | 2022-02-10T05:00:01.000Z | fastkml/test_main.py | dennereed/paleocore | d6da6c39cde96050ee4b9e7213ec1200530cbeee | [
"MIT"
] | 2 | 2020-07-01T14:11:09.000Z | 2020-08-10T17:27:26.000Z | # -*- coding: utf-8 -*-
# Copyright (C) 2012 Christian Ledermann
#
# This library is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
try:
import unittest2 as unittest # Needed in Python 2.6
except:
import unittest
from fastkml import kml
from fastkml import styles
from fastkml import base
from fastkml import atom
from fastkml import config
from fastkml import gx # NOQA
import datetime
from dateutil.tz import tzutc, tzoffset
from fastkml.config import etree
from fastkml.geometry import Point, LineString, Polygon
from fastkml.geometry import MultiPoint, MultiLineString, MultiPolygon
from fastkml.geometry import LinearRing, GeometryCollection
from fastkml.geometry import Geometry
class BaseClassesTestCase(unittest.TestCase):
""" BaseClasses must raise a NotImplementedError on etree_element
and a TypeError on from_element """
def test_base_object(self):
bo = base._BaseObject(id='id0')
self.assertEqual(bo.id, 'id0')
self.assertEqual(bo.ns, config.NS)
self.assertEqual(bo.targetId, None)
self.assertEqual(bo.__name__, None)
bo.targetId = 'target'
self.assertEqual(bo.targetId, 'target')
bo.ns = ''
bo.id = None
self.assertEqual(bo.id, None)
self.assertEqual(bo.ns, '')
self.assertRaises(NotImplementedError, bo.etree_element)
element = etree.Element(config.NS + 'Base')
self.assertRaises(TypeError, bo.from_element)
self.assertRaises(TypeError, bo.from_element, element)
bo.__name__ = 'NotABaseObject'
self.assertRaises(TypeError, bo.from_element, element)
# Note that we can coax baseclasses not to throw errors
bo.__name__ = 'Base'
bo.ns = config.NS
bo.from_element(element)
self.assertEqual(bo.id, None)
self.assertEqual(bo.ns, config.NS)
self.assertFalse(bo.etree_element(), None)
self.assertTrue(len(bo.to_string()) > 1)
def test_feature(self):
f = kml._Feature(name='A Feature')
self.assertRaises(NotImplementedError, f.etree_element)
self.assertEqual(f.name, 'A Feature')
self.assertEqual(f.visibility, 1)
self.assertEqual(f.isopen, 0)
self.assertEqual(f._atom_author, None)
self.assertEqual(f._atom_link, None)
self.assertEqual(f.address, None)
# self.assertEqual(f.phoneNumber, None)
self.assertEqual(f._snippet, None)
self.assertEqual(f.description, None)
self.assertEqual(f._styleUrl, None)
self.assertEqual(f._styles, [])
self.assertEqual(f._time_span, None)
self.assertEqual(f._time_stamp, None)
# self.assertEqual(f.region, None)
# self.assertEqual(f.extended_data, None)
f.__name__ = 'Feature'
f.styleUrl = '#default'
self.assertTrue('Feature>' in str(f.to_string()))
self.assertTrue('#default' in str(f.to_string()))
def test_container(self):
f = kml._Container(name='A Container')
# apparently you can add documents to containes
# d = kml.Document()
# self.assertRaises(TypeError, f.append, d)
p = kml.Placemark()
f.append(p)
self.assertRaises(NotImplementedError, f.etree_element)
def test_overlay(self):
o = kml._Overlay(name='An Overlay')
self.assertEqual(o._color, None)
self.assertEqual(o._drawOrder, None)
self.assertEqual(o._icon, None)
self.assertRaises(NotImplementedError, o.etree_element)
def test_atom_link(self):
ns = '{http://www.opengis.net/kml/2.2}'
l = atom.Link(ns=ns)
self.assertEqual(l.ns, ns)
def test_atom_person(self):
ns = '{http://www.opengis.net/kml/2.2}'
p = atom._Person(ns=ns)
self.assertEqual(p.ns, ns)
class BuildKmlTestCase(unittest.TestCase):
""" Build a simple KML File """
def test_kml(self):
""" kml file without contents """
k = kml.KML()
self.assertEqual(len(list(k.features())), 0)
if config.LXML:
self.assertEqual(
str(k.to_string())[:43],
'<kml xmlns="http://www.opengis.net/kml/2.2"/>' [:43])
else:
if hasattr(etree, 'register_namespace'):
self.assertEqual(str(k.to_string())[:51], '<kml:kml xmlns:kml="http://www.opengis.net/kml/2.2" />'[:51])
else:
self.assertEqual(str(k.to_string())[:51], '<ns0:kml xmlns:ns0="http://www.opengis.net/kml/2.2" />'[:51])
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_folder(self):
""" KML file with folders """
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML()
f = kml.Folder(ns, 'id', 'name', 'description')
nf = kml.Folder(ns, 'nested-id', 'nested-name', 'nested-description')
f.append(nf)
k.append(f)
f2 = kml.Folder(ns, 'id2', 'name2', 'description2')
k.append(f2)
self.assertEqual(len(list(k.features())), 2)
self.assertEqual(len(list(list(k.features())[0].features())), 1)
k2 = kml.KML()
s = k.to_string()
k2.from_string(s)
self.assertEqual(s, k2.to_string())
def test_placemark(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Point(0.0, 0.0, 0.0)
p2 = kml.Placemark(ns, 'id2', 'name2', 'description2')
p2.geometry = LineString([(0, 0, 0), (1, 1, 1)])
k.append(p)
k.append(p2)
self.assertEqual(len(list(k.features())), 2)
k2 = kml.KML()
k2.from_string(k.to_string(prettyprint=True))
self.assertEqual(k.to_string(), k2.to_string())
def test_schema(self):
ns = '{http://www.opengis.net/kml/2.2}'
self.assertRaises(ValueError, kml.Schema, ns)
s = kml.Schema(ns, 'some_id')
self.assertEqual(len(list(s.simple_fields)), 0)
s.append('int', 'Integer', 'An Integer')
self.assertEqual(list(s.simple_fields)[0]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[0]['displayName'], 'An Integer')
s.simple_fields = None
self.assertEqual(len(list(s.simple_fields)), 0)
self.assertRaises(
TypeError, s.append, ('none', 'Integer', 'An Integer'))
self.assertRaises(
TypeError, s.simple_fields, [('none', 'Integer', 'An Integer')])
self.assertRaises(
TypeError, s.simple_fields, ('int', 'Integer', 'An Integer'))
fields = {
'type': 'int',
'name': 'Integer',
'displayName': 'An Integer'
}
s.simple_fields = fields
self.assertEqual(list(s.simple_fields)[0]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[0]['displayName'], 'An Integer')
s.simple_fields = [['float', 'Float'], fields]
self.assertEqual(list(s.simple_fields)[0]['type'], 'float')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Float')
self.assertEqual(list(s.simple_fields)[0]['displayName'], None)
self.assertEqual(list(s.simple_fields)[1]['type'], 'int')
self.assertEqual(list(s.simple_fields)[1]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[1]['displayName'], 'An Integer')
def test_schema_data(self):
ns = '{http://www.opengis.net/kml/2.2}'
self.assertRaises(ValueError, kml.SchemaData, ns)
self.assertRaises(ValueError, kml.SchemaData, ns, '')
sd = kml.SchemaData(ns, '#default')
sd.append_data('text', 'Some Text')
self.assertEqual(len(sd.data), 1)
sd.append_data(value=1, name='Integer')
self.assertEqual(len(sd.data), 2)
self.assertEqual(sd.data[0], {'value': 'Some Text', 'name': 'text'})
self.assertEqual(sd.data[1], {'value': 1, 'name': 'Integer'})
data = (('text', 'Some new Text'), {'value': 2, 'name': 'Integer'})
sd.data = data
self.assertEqual(len(sd.data), 2)
self.assertEqual(
sd.data[0], {'value': 'Some new Text',
'name': 'text'})
self.assertEqual(sd.data[1], {'value': 2, 'name': 'Integer'})
def test_untyped_extended_data(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Point(0.0, 0.0, 0.0)
p.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(
name='info',
value='so much to see'), kml.UntypedExtendedDataElement(
name='weather',
display_name='Weather',
value='blue skies')
])
self.assertEqual(len(p.extended_data.elements), 2)
k.append(p)
k2 = kml.KML()
k2.from_string(k.to_string(prettyprint=True))
k.to_string()
extended_data = list(k2.features())[0].extended_data
self.assertTrue(extended_data is not None)
self.assertTrue(len(extended_data.elements), 2)
self.assertEqual(extended_data.elements[0].name, 'info')
self.assertEqual(extended_data.elements[0].value, 'so much to see')
self.assertEqual(extended_data.elements[0].display_name, None)
self.assertEqual(extended_data.elements[1].name, 'weather')
self.assertEqual(extended_data.elements[1].value, 'blue skies')
self.assertEqual(extended_data.elements[1].display_name, 'Weather')
def test_untyped_extended_data_nested(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
d = kml.Document(ns, 'docid', 'doc name', 'doc description')
d.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(name='type',
value='Document')
])
f = kml.Folder(ns, 'fid', 'f name', 'f description')
f.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(name='type',
value='Folder')
])
k.append(d)
d.append(f)
k2 = kml.KML()
k2.from_string(k.to_string())
document_data = list(k2.features())[0].extended_data
folder_data = list(list(k2.features())[0].features())[0].extended_data
self.assertEqual(document_data.elements[0].name, 'type')
self.assertEqual(document_data.elements[0].value, 'Document')
self.assertEqual(folder_data.elements[0].name, 'type')
self.assertEqual(folder_data.elements[0].value, 'Folder')
def test_document(self):
k = kml.KML()
ns = '{http://www.opengis.net/kml/2.2}'
d = kml.Document(ns, 'docid', 'doc name', 'doc description')
f = kml.Folder(ns, 'fid', 'f name', 'f description')
k.append(d)
d.append(f)
nf = kml.Folder(
ns, 'nested-fid', 'nested f name', 'nested f description')
f.append(nf)
f2 = kml.Folder(ns, 'id2', 'name2', 'description2')
d.append(f2)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 1)])
p2 = kml.Placemark(ns, 'id2', 'name2', 'description2')
# p2 does not have a geometry!
f2.append(p)
nf.append(p2)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list((list(k.features())[0].features()))), 2)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_author(self):
d = kml.Document()
d.author = 'Christian Ledermann'
self.assertTrue('Christian Ledermann' in str(d.to_string()))
a = atom.Author(
name='Nobody',
uri='http://localhost',
email='cl@donotreply.com')
d.author = a
self.assertEqual(d.author, 'Nobody')
self.assertFalse('Christian Ledermann' in str(d.to_string()))
self.assertTrue('Nobody' in str(d.to_string()))
self.assertTrue('http://localhost' in str(d.to_string()))
self.assertTrue('cl@donotreply.com' in str(d.to_string()))
d2 = kml.Document()
d2.from_string(d.to_string())
self.assertEqual(d.to_string(), d2.to_string())
d.author = None
def test_link(self):
d = kml.Document()
d.link = 'http://localhost'
self.assertTrue('http://localhost' in str(d.to_string()))
l = atom.Link(href='#here')
d.link = l
self.assertTrue('#here' in str(d.to_string()))
self.assertRaises(TypeError, d.link, object)
d2 = kml.Document()
d2.from_string(d.to_string())
self.assertEqual(d.to_string(), d2.to_string())
d.link = None
def test_address(self):
address = '1600 Amphitheatre Parkway, Mountain View, CA 94043, USA'
d = kml.Document()
d.address = address
self.assertTrue(address in str(d.to_string()))
self.assertTrue('address>' in str(d.to_string()))
def test_phone_number(self):
phone = '+1 234 567 8901'
d = kml.Document()
d.phoneNumber = phone
self.assertTrue(phone in str(d.to_string()))
self.assertTrue('phoneNumber>' in str(d.to_string()))
class KmlFromStringTestCase(unittest.TestCase):
def test_document(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
<Placemark>
<name>Document Feature 1</name>
<styleUrl>#exampleStyleDocument</styleUrl>
<Point>
<coordinates>-122.371,37.816,0</coordinates>
</Point>
</Placemark>
<Placemark targetId="someTargetId">
<name>Document Feature 2</name>
<styleUrl>#exampleStyleDocument</styleUrl>
<Point>
<coordinates>-122.370,37.817,0</coordinates>
</Point>
</Placemark>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list(list(k.features())[0].features())), 2)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_document_booleans(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<visibility>true</visibility>
<open>1</open>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].visibility, 1)
self.assertEqual(list(k.features())[0].isopen, 1)
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<visibility>0</visibility>
<open>false</open>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].visibility, 0)
self.assertEqual(list(k.features())[0].isopen, 0)
def test_folders(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Folder>
<name>Folder.kml</name>
<open>1</open>
<description>
A folder is a container that can hold multiple other objects
</description>
<Placemark>
<name>Folder object 1 (Placemark)</name>
<Point>
<coordinates>-122.377588,37.830266,0</coordinates>
</Point>
</Placemark>
<Placemark>
<name>Folder object 2 (Polygon)</name>
<Polygon>
<outerBoundaryIs>
<LinearRing>
<coordinates>
-122.377830,37.830445,0
-122.377576,37.830631,0
-122.377840,37.830642,0
-122.377830,37.830445,0
</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Folder object 3 (Path)</name>
<LineString>
<tessellate>1</tessellate>
<coordinates>
-122.378009,37.830128,0 -122.377885,37.830379,0
</coordinates>
</LineString>
</Placemark>
</Folder>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list(list(k.features())[0].features())), 3)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_placemark(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Simple placemark</name>
<description>Attached to the ground. Intelligently places itself
at the height of the underlying terrain.</description>
<Point>
<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>
</Point>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(list(k.features())[0].name, "Simple placemark")
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_extended_data(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Simple placemark</name>
<description></description>
<Point>
<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>
</Point>
<ExtendedData>
<Data name="holeNumber">
<displayName><![CDATA[
<b>This is hole </b>
]]></displayName>
<value>1</value>
</Data>
<Data name="holePar">
<displayName><![CDATA[
<i>The par for this hole is </i>
]]></displayName>
<value>4</value>
</Data>
<SchemaData schemaUrl="#TrailHeadTypeId">
<SimpleData name="TrailHeadName">Mount Everest</SimpleData>
<SimpleData name="TrailLength">347.45</SimpleData>
<SimpleData name="ElevationGain">10000</SimpleData>
</SchemaData>
</ExtendedData>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
extended_data = list(k.features())[0].extended_data
self.assertEqual(extended_data.elements[0].name, 'holeNumber')
self.assertEqual(extended_data.elements[0].value, '1')
self.assertTrue(
'<b>This is hole </b>' in extended_data.elements[0].display_name)
self.assertEqual(extended_data.elements[1].name, 'holePar')
self.assertEqual(extended_data.elements[1].value, '4')
self.assertTrue(
'<i>The par for this hole is </i>' in
extended_data.elements[1].display_name)
sd = extended_data.elements[2]
self.assertEqual(sd.data[0]['name'], 'TrailHeadName')
self.assertEqual(sd.data[1]['value'], '347.45')
def test_polygon(self):
doc = """
<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>South Africa</name>
<Polygon>
<outerBoundaryIs>
<LinearRing>
<coordinates>
31.521,-29.257,0
31.326,-29.402,0
30.902,-29.91,0
30.623,-30.424,0
30.056,-31.14,0
28.926,-32.172,0
28.22,-32.772,0
27.465,-33.227,0
26.419,-33.615,0
25.91,-33.667,0
25.781,-33.945,0
25.173,-33.797,0
24.678,-33.987,0
23.594,-33.794,0
22.988,-33.916,0
22.574,-33.864,0
21.543,-34.259,0
20.689,-34.417,0
20.071,-34.795,0
19.616,-34.819,0
19.193,-34.463,0
18.855,-34.444,0
18.425,-33.998,0
18.377,-34.137,0
18.244,-33.868,0
18.25,-33.281,0
17.925,-32.611,0
18.248,-32.429,0
18.222,-31.662,0
17.567,-30.726,0
17.064,-29.879,0
17.063,-29.876,0
16.345,-28.577,0
16.824,-28.082,0
17.219,-28.356,0
17.387,-28.784,0
17.836,-28.856,0
18.465,-29.045,0
19.002,-28.972,0
19.895,-28.461,0
19.896,-24.768,0
20.166,-24.918,0
20.759,-25.868,0
20.666,-26.477,0
20.89,-26.829,0
21.606,-26.727,0
22.106,-26.28,0
22.58,-25.979,0
22.824,-25.5,0
23.312,-25.269,0
23.734,-25.39,0
24.211,-25.67,0
25.025,-25.72,0
25.665,-25.487,0
25.766,-25.175,0
25.942,-24.696,0
26.486,-24.616,0
26.786,-24.241,0
27.119,-23.574,0
28.017,-22.828,0
29.432,-22.091,0
29.839,-22.102,0
30.323,-22.272,0
30.66,-22.152,0
31.191,-22.252,0
31.67,-23.659,0
31.931,-24.369,0
31.752,-25.484,0
31.838,-25.843,0
31.333,-25.66,0
31.044,-25.731,0
30.95,-26.023,0
30.677,-26.398,0
30.686,-26.744,0
31.283,-27.286,0
31.868,-27.178,0
32.072,-26.734,0
32.83,-26.742,0
32.58,-27.47,0
32.462,-28.301,0
32.203,-28.752,0
31.521,-29.257,0
</coordinates>
</LinearRing>
</outerBoundaryIs>
<innerBoundaryIs>
<LinearRing>
<coordinates>
28.978,-28.956,0
28.542,-28.648,0
28.074,-28.851,0
27.533,-29.243,0
26.999,-29.876,0
27.749,-30.645,0
28.107,-30.546,0
28.291,-30.226,0
28.848,-30.07,0
29.018,-29.744,0
29.325,-29.257,0
28.978,-28.956,0
</coordinates>
</LinearRing>
</innerBoundaryIs>
</Polygon>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(isinstance(list(k.features())[0].geometry, Polygon))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multipoints(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark id="feat_2">
<name>MultiPoint</name>
<styleUrl>#stylesel_9</styleUrl>
<MultiGeometry id="geom_0">
<Point id="geom_5">
<coordinates>16,-35,0.0</coordinates>
</Point>
<Point id="geom_6">
<coordinates>16,-33,0.0</coordinates>
</Point>
<Point id="geom_7">
<coordinates>16,-31,0.0</coordinates>
</Point>
<Point id="geom_8">
<coordinates>16,-29,0.0</coordinates>
</Point>
<Point id="geom_9">
<coordinates>16,-27,0.0</coordinates>
</Point>
<Point id="geom_10">
<coordinates>16,-25,0.0</coordinates>
</Point>
<Point id="geom_11">
<coordinates>16,-23,0.0</coordinates>
</Point>
<Point id="geom_12">
<coordinates>16,-21,0.0</coordinates>
</Point>
<Point id="geom_15">
<coordinates>18,-35,0.0</coordinates>
</Point>
<Point id="geom_16">
<coordinates>18,-33,0.0</coordinates>
</Point>
<Point id="geom_17">
<coordinates>18,-31,0.0</coordinates>
</Point>
<Point id="geom_18">
<coordinates>18,-29,0.0</coordinates>
</Point>
</MultiGeometry>
</Placemark></kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(isinstance(list(k.features())[0].geometry, MultiPoint))
self.assertEqual(len(list(k.features())[0].geometry.geoms), 12)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multilinestrings(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Dnipro (Dnieper)</name>
<MultiGeometry>
<LineString><coordinates>33.54,46.831,0 33.606,46.869,0 33.662,46.957,0 33.739,47.05,0 33.859,47.149,0 33.976,47.307,0 33.998,47.411,0 34.155,47.49,0 34.448,47.542,0 34.712,47.553,0 34.946,47.521,0 35.088,47.528,0 35.138,47.573,0 35.149,47.657,0 35.106,47.842,0 </coordinates></LineString>
<LineString><coordinates>33.194,49.094,0 32.884,49.225,0 32.603,49.302,0 31.886,49.555,0 </coordinates></LineString>
<LineString><coordinates>31.44,50,0 31.48,49.933,0 31.486,49.871,0 31.467,49.754,0 </coordinates></LineString>
<LineString><coordinates>30.508,51.217,0 30.478,50.904,0 30.479,50.749,0 30.515,50.597,0 </coordinates></LineString>
</MultiGeometry>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(k.features())[0].geometry, MultiLineString))
self.assertEqual(len(list(k.features())[0].geometry.geoms), 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multipolygon(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Italy</name>
<MultiGeometry><Polygon><outerBoundaryIs><LinearRing><coordinates>12.621,35.492,0 12.611,35.489,0 12.603,35.491,0 12.598,35.494,0 12.594,35.494,0 12.556,35.508,0 12.536,35.513,0 12.526,35.517,0 12.534,35.522,0 12.556,35.521,0 12.567,35.519,0 12.613,35.515,0 12.621,35.513,0 12.624,35.512,0 12.622,35.51,0 12.621,35.508,0 12.624,35.502,0 12.621,35.492,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.873,35.852,0 12.857,35.852,0 12.851,35.856,0 12.846,35.863,0 12.847,35.868,0 12.854,35.871,0 12.86,35.872,0 12.867,35.872,0 12.874,35.866,0 12.877,35.856,0 12.873,35.852,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>11.981,36.827,0 11.988,36.824,0 11.994,36.825,0 12,36.836,0 12.038,36.806,0 12.052,36.79,0 12.054,36.767,0 12.031,36.741,0 11.997,36.745,0 11.962,36.765,0 11.938,36.789,0 11.934,36.795,0 11.926,36.812,0 11.923,36.828,0 11.935,36.836,0 11.939,36.837,0 11.947,36.841,0 11.952,36.843,0 11.958,36.84,0 11.968,36.831,0 11.972,36.829,0 11.981,36.827,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.322,37.94,0 12.337,37.933,0 12.355,37.927,0 12.369,37.925,0 12.358,37.914,0 12.343,37.913,0 12.327,37.918,0 12.315,37.925,0 12.3,37.919,0 12.288,37.921,0 12.279,37.929,0 12.274,37.939,0 12.288,37.938,0 12.298,37.941,0 12.306,37.945,0 12.315,37.946,0 12.322,37.94,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.078,37.96,0 12.079,37.95,0 12.065,37.951,0 12.048,37.961,0 12.037,37.974,0 12.03,37.984,0 12.036,37.991,0 12.054,37.992,0 12.065,37.986,0 12.072,37.968,0 12.078,37.96,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>15.643,38.262,0 15.635,38.261,0 15.625,38.261,0 15.584,38.24,0 15.57,38.227,0 15.564,38.214,0 15.56,38.2,0 15.576,38.2,0 15.527,38.137,0 15.501,38.085,0 15.393,37.976,0 15.303,37.864,0 15.284,37.833,0 15.267,37.812,0 15.242,37.795,0 15.214,37.761,0 15.207,37.747,0 15.209,37.737,0 15.219,37.718,0 15.221,37.706,0 15.217,37.696,0 15.203,37.685,0 15.2,37.675,0 15.197,37.655,0 15.185,37.626,0 15.179,37.604,0 15.164,37.567,0 15.117,37.522,0 15.097,37.494,0 15.092,37.477,0 15.09,37.459,0 15.093,37.36,0 15.097,37.343,0 15.104,37.33,0 15.111,37.322,0 15.181,37.291,0 15.218,37.285,0 15.237,37.275,0 15.253,37.257,0 15.262,37.234,0 15.245,37.246,0 15.236,37.242,0 15.229,37.23,0 15.221,37.22,0 15.222,37.237,0 15.216,37.244,0 15.206,37.244,0 15.193,37.24,0 15.2,37.227,0 15.184,37.207,0 15.195,37.176,0 15.217,37.155,0 15.234,37.165,0 15.248,37.158,0 15.248,37.152,0 15.23,37.149,0 15.232,37.135,0 15.247,37.118,0 15.265,37.11,0 15.289,37.108,0 15.304,37.101,0 15.309,37.086,0 15.303,37.062,0 15.289,37.069,0 15.283,37.061,0 15.284,37.048,0 15.292,37.042,0 15.313,37.044,0 15.322,37.04,0 15.33,37.027,0 15.333,37.011,0 15.325,37.008,0 15.315,37.012,0 15.309,37.018,0 15.304,37.016,0 15.269,37,0 15.275,36.993,0 15.267,36.989,0 15.264,36.987,0 15.269,36.98,0 15.269,36.973,0 15.245,36.972,0 15.227,36.965,0 15.212,36.956,0 15.197,36.952,0 15.175,36.944,0 15.159,36.924,0 15.108,36.82,0 15.107,36.808,0 15.095,36.799,0 15.099,36.779,0 15.118,36.747,0 15.135,36.687,0 15.135,36.675,0 15.115,36.66,0 15.094,36.655,0 15.074,36.659,0 15.056,36.671,0 15.041,36.687,0 15.034,36.694,0 15.021,36.699,0 15.008,36.703,0 14.998,36.702,0 14.994,36.696,0 14.983,36.689,0 14.958,36.698,0 14.919,36.72,0 14.883,36.73,0 14.847,36.726,0 14.781,36.699,0 14.777,36.707,0 14.774,36.71,0 14.761,36.706,0 14.745,36.719,0 14.685,36.726,0 14.672,36.744,0 14.659,36.754,0 14.601,36.772,0 14.583,36.781,0 14.566,36.778,0 14.488,36.793,0 14.476,36.805,0 14.395,36.945,0 14.37,36.973,0 14.279,37.044,0 14.209,37.081,0 14.127,37.112,0 14.089,37.117,0 13.977,37.11,0 13.968,37.108,0 13.949,37.099,0 13.939,37.096,0 13.895,37.101,0 13.833,37.139,0 13.795,37.152,0 13.752,37.159,0 13.716,37.171,0 13.684,37.189,0 13.599,37.256,0 13.57,37.273,0 13.535,37.282,0 13.489,37.288,0 13.453,37.299,0 13.422,37.314,0 13.373,37.346,0 13.33,37.366,0 13.312,37.381,0 13.303,37.386,0 13.29,37.389,0 13.279,37.393,0 13.254,37.432,0 13.248,37.436,0 13.226,37.446,0 13.215,37.458,0 13.207,37.464,0 13.195,37.466,0 13.19,37.469,0 13.18,37.484,0 13.175,37.487,0 13.052,37.5,0 13.037,37.495,0 13.027,37.493,0 13.017,37.497,0 13.011,37.507,0 13.005,37.527,0 13.001,37.535,0 12.975,37.557,0 12.943,37.568,0 12.863,37.576,0 12.781,37.574,0 12.698,37.563,0 12.66,37.565,0 12.637,37.582,0 12.595,37.638,0 12.578,37.652,0 12.564,37.658,0 12.524,37.658,0 12.507,37.665,0 12.49,37.682,0 12.475,37.703,0 12.466,37.72,0 12.461,37.734,0 12.46,37.748,0 12.457,37.76,0 12.449,37.771,0 12.437,37.783,0 12.428,37.797,0 12.428,37.809,0 12.445,37.816,0 12.447,37.812,0 12.461,37.819,0 12.466,37.823,0 12.464,37.825,0 12.471,37.853,0 12.473,37.854,0 12.478,37.872,0 12.479,37.881,0 12.477,37.886,0 12.468,37.897,0 12.466,37.906,0 12.465,37.913,0 12.465,37.914,0 12.468,37.916,0 12.491,37.954,0 12.497,37.98,0 12.503,37.997,0 12.505,38.011,0 12.493,38.021,0 12.524,38.031,0 12.55,38.055,0 12.577,38.072,0 12.609,38.062,0 12.639,38.079,0 12.652,38.091,0 12.657,38.107,0 12.663,38.116,0 12.677,38.116,0 12.692,38.112,0 12.705,38.111,0 12.726,38.126,0 12.725,38.15,0 12.72,38.175,0 12.732,38.193,0 12.738,38.181,0 12.75,38.182,0 12.761,38.181,0 12.767,38.162,0 12.791,38.117,0 12.819,38.078,0 12.829,38.07,0 12.858,38.058,0 12.869,38.051,0 12.87,38.042,0 12.902,38.028,0 12.945,38.033,0 13.028,38.062,0 13.062,38.083,0 13.07,38.091,0 13.072,38.095,0 13.07,38.101,0 13.069,38.114,0 13.067,38.123,0 13.057,38.133,0 13.055,38.142,0 13.09,38.166,0 13.084,38.174,0 13.09,38.183,0 13.102,38.19,0 13.113,38.193,0 13.123,38.191,0 13.158,38.179,0 13.18,38.176,0 13.208,38.176,0 13.231,38.184,0 13.239,38.207,0 13.255,38.202,0 13.267,38.205,0 13.278,38.21,0 13.297,38.214,0 13.311,38.219,0 13.319,38.22,0 13.324,38.218,0 13.326,38.211,0 13.327,38.205,0 13.329,38.2,0 13.367,38.179,0 13.372,38.173,0 13.374,38.14,0 13.377,38.131,0 13.392,38.103,0 13.514,38.11,0 13.542,38.094,0 13.54,38.077,0 13.542,38.067,0 13.548,38.056,0 13.558,38.049,0 13.588,38.039,0 13.623,38.015,0 13.652,38.001,0 13.698,37.993,0 13.712,37.988,0 13.708,37.985,0 13.708,37.984,0 13.706,37.98,0 13.727,37.981,0 13.791,37.973,0 13.813,37.978,0 13.858,37.996,0 13.899,38.004,0 13.913,38.012,0 13.925,38.022,0 13.939,38.029,0 14.008,38.038,0 14.021,38.049,0 14.063,38.03,0 14.084,38.024,0 14.107,38.021,0 14.122,38.022,0 14.152,38.029,0 14.274,38.015,0 14.332,38.018,0 14.385,38.029,0 14.433,38.049,0 14.465,38.037,0 14.512,38.044,0 14.635,38.081,0 14.668,38.099,0 14.696,38.121,0 14.734,38.157,0 14.745,38.161,0 14.778,38.159,0 14.799,38.16,0 14.875,38.175,0 14.889,38.182,0 14.898,38.186,0 14.908,38.187,0 14.936,38.186,0 14.945,38.182,0 14.963,38.163,0 14.97,38.159,0 14.982,38.158,0 15.008,38.152,0 15.04,38.153,0 15.049,38.152,0 15.054,38.148,0 15.064,38.135,0 15.069,38.131,0 15.088,38.128,0 15.106,38.133,0 15.123,38.141,0 15.178,38.156,0 15.204,38.183,0 15.241,38.241,0 15.238,38.249,0 15.237,38.251,0 15.237,38.253,0 15.241,38.261,0 15.238,38.265,0 15.244,38.265,0 15.247,38.254,0 15.241,38.23,0 15.246,38.217,0 15.258,38.21,0 15.275,38.207,0 15.292,38.207,0 15.322,38.211,0 15.4,38.232,0 15.423,38.244,0 15.434,38.253,0 15.473,38.268,0 15.513,38.297,0 15.529,38.302,0 15.56,38.3,0 15.616,38.28,0 15.652,38.275,0 15.649,38.266,0 15.643,38.262,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.999,38.371,0 14.987,38.364,0 14.964,38.381,0 14.949,38.396,0 14.946,38.412,0 14.96,38.433,0 14.967,38.433,0 14.967,38.418,0 14.983,38.412,0 14.994,38.403,0 15.002,38.391,0 15.008,38.378,0 14.999,38.371,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.967,38.453,0 14.949,38.451,0 14.935,38.458,0 14.922,38.469,0 14.908,38.474,0 14.9,38.481,0 14.901,38.498,0 14.91,38.515,0 14.925,38.522,0 14.958,38.522,0 14.967,38.516,0 14.96,38.502,0 14.966,38.497,0 14.975,38.49,0 14.98,38.487,0 14.98,38.481,0 14.953,38.481,0 14.958,38.469,0 14.962,38.465,0 14.967,38.461,0 14.967,38.453,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.361,38.539,0 14.346,38.535,0 14.343,38.547,0 14.357,38.551,0 14.361,38.539,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.864,38.549,0 14.862,38.539,0 14.824,38.552,0 14.794,38.571,0 14.815,38.584,0 14.852,38.585,0 14.867,38.581,0 14.877,38.569,0 14.873,38.565,0 14.869,38.56,0 14.864,38.549,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.585,38.557,0 14.574,38.557,0 14.552,38.562,0 14.544,38.575,0 14.543,38.587,0 14.546,38.588,0 14.564,38.585,0 14.576,38.577,0 14.58,38.566,0 14.585,38.561,0 14.585,38.557,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.177,38.693,0 13.165,38.691,0 13.153,38.695,0 13.153,38.702,0 13.158,38.71,0 13.169,38.717,0 13.186,38.718,0 13.196,38.711,0 13.197,38.708,0 13.177,38.693,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>15.225,38.777,0 15.217,38.773,0 15.206,38.775,0 15.187,38.789,0 15.187,38.793,0 15.194,38.798,0 15.204,38.802,0 15.209,38.806,0 15.212,38.81,0 15.219,38.812,0 15.228,38.81,0 15.235,38.808,0 15.239,38.804,0 15.237,38.796,0 15.232,38.789,0 15.23,38.783,0 15.225,38.777,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.361,39.118,0 8.386,39.105,0 8.418,39.106,0 8.445,39.102,0 8.457,39.073,0 8.459,39.068,0 8.464,39.065,0 8.47,39.065,0 8.477,39.07,0 8.478,39.07,0 8.48,39.072,0 8.484,39.07,0 8.465,39.056,0 8.46,39.05,0 8.464,39.042,0 8.455,39.028,0 8.447,38.994,0 8.438,38.967,0 8.433,38.963,0 8.422,38.96,0 8.41,38.962,0 8.407,38.967,0 8.406,38.974,0 8.402,38.981,0 8.365,39.029,0 8.35,39.062,0 8.354,39.083,0 8.354,39.091,0 8.347,39.091,0 8.347,39.097,0 8.361,39.118,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.306,39.104,0 8.291,39.099,0 8.27,39.1,0 8.255,39.107,0 8.258,39.118,0 8.258,39.124,0 8.233,39.144,0 8.225,39.157,0 8.231,39.173,0 8.246,39.181,0 8.291,39.188,0 8.306,39.193,0 8.307,39.161,0 8.313,39.12,0 8.306,39.104,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.959,40.712,0 13.945,40.701,0 13.935,40.705,0 13.92,40.704,0 13.904,40.7,0 13.891,40.694,0 13.882,40.699,0 13.86,40.707,0 13.85,40.715,0 13.857,40.735,0 13.862,40.744,0 13.871,40.749,0 13.868,40.752,0 13.863,40.762,0 13.884,40.762,0 13.947,40.745,0 13.966,40.735,0 13.963,40.729,0 13.963,40.723,0 13.966,40.715,0 13.959,40.712,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.427,40.791,0 13.415,40.786,0 13.419,40.796,0 13.424,40.8,0 13.432,40.801,0 13.427,40.791,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.333,41.105,0 8.343,41.098,0 8.345,41.086,0 8.342,41.074,0 8.333,41.064,0 8.275,41.057,0 8.252,41.043,0 8.252,41.016,0 8.247,40.993,0 8.21,40.996,0 8.218,41.005,0 8.222,41.014,0 8.224,41.024,0 8.224,41.033,0 8.229,41.042,0 8.242,41.052,0 8.261,41.064,0 8.276,41.07,0 8.278,41.081,0 8.276,41.095,0 8.278,41.105,0 8.285,41.107,0 8.303,41.105,0 8.306,41.109,0 8.309,41.114,0 8.314,41.118,0 8.327,41.126,0 8.326,41.118,0 8.328,41.112,0 8.333,41.105,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.471,41.19,0 9.474,41.184,0 9.475,41.179,0 9.47,41.172,0 9.464,41.173,0 9.456,41.181,0 9.449,41.186,0 9.442,41.183,0 9.437,41.186,0 9.448,41.205,0 9.443,41.211,0 9.446,41.22,0 9.454,41.234,0 9.46,41.242,0 9.468,41.241,0 9.475,41.236,0 9.478,41.228,0 9.48,41.224,0 9.479,41.217,0 9.471,41.19,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.239,41.249,0 9.247,41.248,0 9.258,41.249,0 9.269,41.236,0 9.268,41.202,0 9.279,41.195,0 9.275,41.199,0 9.274,41.205,0 9.275,41.212,0 9.279,41.221,0 9.286,41.221,0 9.29,41.209,0 9.289,41.205,0 9.286,41.201,0 9.286,41.195,0 9.3,41.196,0 9.306,41.198,0 9.313,41.201,0 9.317,41.196,0 9.334,41.187,0 9.336,41.211,0 9.353,41.207,0 9.389,41.181,0 9.389,41.187,0 9.397,41.184,0 9.405,41.181,0 9.413,41.181,0 9.423,41.181,0 9.423,41.174,0 9.417,41.171,0 9.415,41.168,0 9.413,41.164,0 9.409,41.16,0 9.421,41.156,0 9.427,41.149,0 9.433,41.14,0 9.443,41.133,0 9.438,41.125,0 9.437,41.115,0 9.443,41.092,0 9.455,41.112,0 9.461,41.12,0 9.471,41.126,0 9.467,41.13,0 9.466,41.134,0 9.463,41.137,0 9.457,41.14,0 9.47,41.146,0 9.482,41.145,0 9.495,41.142,0 9.509,41.14,0 9.514,41.143,0 9.519,41.148,0 9.524,41.15,0 9.533,41.14,0 9.525,41.133,0 9.535,41.128,0 9.541,41.123,0 9.547,41.121,0 9.553,41.126,0 9.56,41.126,0 9.562,41.122,0 9.562,41.121,0 9.564,41.121,0 9.567,41.119,0 9.566,41.107,0 9.563,41.097,0 9.557,41.088,0 9.546,41.077,0 9.544,41.082,0 9.541,41.087,0 9.54,41.092,0 9.522,41.031,0 9.512,41.016,0 9.533,41.016,0 9.525,41.03,0 9.544,41.037,0 9.555,41.034,0 9.558,41.025,0 9.553,41.009,0 9.558,41.009,0 9.559,41.011,0 9.559,41.013,0 9.56,41.016,0 9.566,41.011,0 9.569,41.009,0 9.574,41.009,0 9.589,41.02,0 9.616,41.019,0 9.645,41.011,0 9.663,41.002,0 9.652,40.991,0 9.637,40.992,0 9.62,40.999,0 9.605,41.002,0 9.588,40.996,0 9.583,40.98,0 9.579,40.962,0 9.567,40.948,0 9.572,40.935,0 9.558,40.931,0 9.512,40.934,0 9.512,40.929,0 9.513,40.928,0 9.505,40.927,0 9.512,40.915,0 9.521,40.915,0 9.53,40.919,0 9.54,40.92,0 9.55,40.917,0 9.568,40.908,0 9.574,40.906,0 9.593,40.91,0 9.608,40.918,0 9.623,40.924,0 9.643,40.92,0 9.638,40.911,0 9.632,40.905,0 9.624,40.9,0 9.615,40.899,0 9.615,40.893,0 9.651,40.879,0 9.656,40.876,0 9.658,40.864,0 9.664,40.858,0 9.672,40.859,0 9.684,40.865,0 9.69,40.856,0 9.7,40.85,0 9.712,40.847,0 9.725,40.845,0 9.691,40.836,0 9.682,40.829,0 9.69,40.817,0 9.69,40.811,0 9.675,40.814,0 9.662,40.809,0 9.658,40.8,0 9.669,40.79,0 9.67,40.801,0 9.676,40.788,0 9.705,40.759,0 9.711,40.745,0 9.715,40.727,0 9.745,40.68,0 9.749,40.667,0 9.754,40.605,0 9.757,40.595,0 9.762,40.587,0 9.769,40.584,0 9.782,40.582,0 9.786,40.576,0 9.787,40.567,0 9.793,40.557,0 9.821,40.536,0 9.827,40.529,0 9.827,40.519,0 9.816,40.502,0 9.813,40.492,0 9.809,40.471,0 9.801,40.455,0 9.779,40.427,0 9.762,40.39,0 9.75,40.377,0 9.728,40.372,0 9.713,40.366,0 9.701,40.353,0 9.684,40.324,0 9.671,40.312,0 9.646,40.296,0 9.635,40.282,0 9.627,40.263,0 9.625,40.248,0 9.629,40.205,0 9.632,40.196,0 9.655,40.144,0 9.666,40.131,0 9.68,40.126,0 9.688,40.12,0 9.711,40.096,0 9.733,40.084,0 9.731,40.068,0 9.694,39.993,0 9.688,39.961,0 9.697,39.934,0 9.703,39.937,0 9.71,39.94,0 9.716,39.94,0 9.718,39.934,0 9.715,39.924,0 9.709,39.922,0 9.702,39.922,0 9.697,39.919,0 9.69,39.906,0 9.685,39.894,0 9.684,39.882,0 9.69,39.871,0 9.684,39.871,0 9.684,39.865,0 9.688,39.863,0 9.693,39.86,0 9.697,39.858,0 9.697,39.852,0 9.685,39.84,0 9.676,39.819,0 9.671,39.793,0 9.669,39.769,0 9.67,39.756,0 9.676,39.732,0 9.677,39.718,0 9.675,39.708,0 9.665,39.691,0 9.663,39.677,0 9.661,39.67,0 9.656,39.663,0 9.652,39.652,0 9.65,39.639,0 9.656,39.594,0 9.654,39.567,0 9.629,39.502,0 9.645,39.484,0 9.64,39.452,0 9.615,39.399,0 9.603,39.355,0 9.601,39.341,0 9.604,39.326,0 9.612,39.316,0 9.635,39.303,0 9.635,39.297,0 9.608,39.289,0 9.582,39.266,0 9.568,39.238,0 9.574,39.214,0 9.566,39.205,0 9.569,39.199,0 9.577,39.194,0 9.581,39.187,0 9.578,39.179,0 9.569,39.159,0 9.567,39.149,0 9.558,39.139,0 9.54,39.134,0 9.523,39.125,0 9.519,39.104,0 9.511,39.108,0 9.508,39.111,0 9.508,39.116,0 9.512,39.124,0 9.497,39.133,0 9.481,39.135,0 9.466,39.132,0 9.451,39.124,0 9.443,39.124,0 9.439,39.133,0 9.429,39.138,0 9.409,39.146,0 9.384,39.169,0 9.378,39.173,0 9.368,39.177,0 9.346,39.196,0 9.337,39.201,0 9.327,39.203,0 9.313,39.208,0 9.3,39.214,0 9.293,39.221,0 9.286,39.214,0 9.272,39.22,0 9.253,39.225,0 9.217,39.228,0 9.198,39.221,0 9.182,39.207,0 9.17,39.193,0 9.167,39.187,0 9.137,39.194,0 9.114,39.211,0 9.073,39.248,0 9.064,39.243,0 9.056,39.247,0 9.048,39.256,0 9.039,39.262,0 9.025,39.265,0 9.015,39.264,0 9.013,39.26,0 9.026,39.256,0 9.026,39.248,0 9.022,39.24,0 9.027,39.236,0 9.036,39.232,0 9.038,39.227,0 9.039,39.228,0 9.051,39.225,0 9.075,39.23,0 9.08,39.224,0 9.08,39.216,0 9.08,39.212,0 9.039,39.179,0 9.027,39.165,0 9.019,39.146,0 9.017,39.124,0 9.019,39.104,0 9.025,39.086,0 9.033,39.07,0 9.038,39.063,0 9.044,39.058,0 9.046,39.051,0 9.03,39.03,0 9.019,38.995,0 9.026,38.995,0 9.016,38.989,0 9.013,38.99,0 9.005,38.995,0 8.997,38.983,0 8.895,38.902,0 8.889,38.9,0 8.878,38.899,0 8.873,38.896,0 8.862,38.882,0 8.854,38.878,0 8.842,38.88,0 8.828,38.889,0 8.806,38.906,0 8.806,38.885,0 8.791,38.904,0 8.767,38.92,0 8.74,38.93,0 8.717,38.932,0 8.695,38.925,0 8.669,38.91,0 8.652,38.891,0 8.656,38.871,0 8.641,38.864,0 8.635,38.871,0 8.643,38.89,0 8.634,38.895,0 8.616,38.896,0 8.6,38.899,0 8.6,38.906,0 8.616,38.923,0 8.616,38.947,0 8.604,38.965,0 8.581,38.96,0 8.573,39.013,0 8.56,39.057,0 8.553,39.057,0 8.545,39.051,0 8.521,39.061,0 8.505,39.063,0 8.51,39.068,0 8.519,39.083,0 8.505,39.091,0 8.483,39.08,0 8.483,39.084,0 8.478,39.09,0 8.474,39.107,0 8.466,39.119,0 8.455,39.125,0 8.443,39.118,0 8.439,39.128,0 8.439,39.153,0 8.436,39.166,0 8.429,39.173,0 8.419,39.177,0 8.413,39.175,0 8.416,39.166,0 8.41,39.169,0 8.406,39.174,0 8.403,39.181,0 8.402,39.19,0 8.399,39.201,0 8.393,39.204,0 8.386,39.204,0 8.381,39.207,0 8.373,39.222,0 8.372,39.23,0 8.377,39.238,0 8.427,39.283,0 8.433,39.302,0 8.416,39.323,0 8.418,39.339,0 8.383,39.359,0 8.375,39.379,0 8.379,39.388,0 8.396,39.404,0 8.402,39.412,0 8.406,39.427,0 8.404,39.436,0 8.39,39.462,0 8.387,39.465,0 8.387,39.47,0 8.395,39.481,0 8.422,39.508,0 8.436,39.525,0 8.452,39.558,0 8.464,39.577,0 8.457,39.584,0 8.465,39.598,0 8.463,39.617,0 8.45,39.659,0 8.447,39.704,0 8.443,39.714,0 8.443,39.721,0 8.447,39.731,0 8.445,39.757,0 8.447,39.762,0 8.46,39.76,0 8.469,39.755,0 8.5,39.716,0 8.518,39.702,0 8.539,39.696,0 8.566,39.701,0 8.515,39.713,0 8.505,39.721,0 8.507,39.738,0 8.521,39.755,0 8.536,39.771,0 8.546,39.783,0 8.539,39.783,0 8.536,39.776,0 8.531,39.77,0 8.525,39.766,0 8.519,39.762,0 8.53,39.772,0 8.541,39.789,0 8.549,39.807,0 8.553,39.821,0 8.556,39.852,0 8.554,39.864,0 8.546,39.878,0 8.524,39.899,0 8.495,39.912,0 8.464,39.914,0 8.436,39.899,0 8.443,39.893,0 8.446,39.898,0 8.45,39.899,0 8.456,39.898,0 8.464,39.899,0 8.452,39.893,0 8.445,39.883,0 8.436,39.858,0 8.429,39.865,0 8.438,39.877,0 8.432,39.885,0 8.419,39.892,0 8.404,39.903,0 8.401,39.903,0 8.399,39.905,0 8.395,39.912,0 8.394,39.92,0 8.397,39.927,0 8.4,39.933,0 8.402,39.94,0 8.394,39.977,0 8.395,39.988,0 8.407,40.01,0 8.408,40.022,0 8.395,40.036,0 8.381,40.03,0 8.378,40.033,0 8.385,40.042,0 8.402,40.05,0 8.405,40.049,0 8.435,40.051,0 8.453,40.056,0 8.46,40.057,0 8.469,40.062,0 8.48,40.074,0 8.488,40.089,0 8.491,40.104,0 8.486,40.118,0 8.468,40.144,0 8.464,40.163,0 8.46,40.216,0 8.477,40.262,0 8.477,40.292,0 8.463,40.314,0 8.442,40.331,0 8.416,40.345,0 8.409,40.338,0 8.387,40.352,0 8.384,40.372,0 8.395,40.424,0 8.391,40.442,0 8.38,40.468,0 8.366,40.492,0 8.35,40.502,0 8.332,40.51,0 8.324,40.531,0 8.32,40.555,0 8.313,40.578,0 8.292,40.595,0 8.268,40.594,0 8.217,40.57,0 8.196,40.578,0 8.206,40.598,0 8.217,40.612,0 8.194,40.617,0 8.177,40.606,0 8.167,40.586,0 8.162,40.564,0 8.154,40.578,0 8.148,40.593,0 8.141,40.619,0 8.141,40.625,0 8.158,40.632,0 8.174,40.641,0 8.186,40.656,0 8.189,40.68,0 8.192,40.68,0 8.196,40.685,0 8.198,40.691,0 8.193,40.694,0 8.18,40.695,0 8.174,40.697,0 8.168,40.701,0 8.154,40.719,0 8.146,40.726,0 8.134,40.729,0 8.21,40.865,0 8.216,40.881,0 8.217,40.899,0 8.21,40.914,0 8.193,40.92,0 8.179,40.928,0 8.183,40.945,0 8.194,40.963,0 8.203,40.975,0 8.21,40.975,0 8.213,40.963,0 8.221,40.962,0 8.229,40.962,0 8.237,40.955,0 8.236,40.946,0 8.232,40.934,0 8.23,40.921,0 8.234,40.91,0 8.278,40.865,0 8.311,40.85,0 8.422,40.839,0 8.478,40.826,0 8.501,40.824,0 8.521,40.827,0 8.599,40.853,0 8.619,40.866,0 8.635,40.881,0 8.641,40.896,0 8.71,40.92,0 8.734,40.921,0 8.752,40.919,0 8.765,40.914,0 8.823,40.947,0 8.84,40.961,0 8.876,41.008,0 8.889,41.016,0 8.887,41.02,0 8.887,41.021,0 8.886,41.022,0 8.882,41.023,0 8.914,41.032,0 8.923,41.037,0 8.93,41.043,0 8.941,41.061,0 8.947,41.064,0 8.959,41.07,0 8.976,41.082,0 8.991,41.097,0 9.006,41.122,0 9.025,41.129,0 9.094,41.135,0 9.108,41.139,0 9.136,41.16,0 9.142,41.153,0 9.158,41.169,0 9.164,41.184,0 9.163,41.225,0 9.172,41.243,0 9.191,41.251,0 9.213,41.256,0 9.231,41.262,0 9.233,41.253,0 9.239,41.249,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.435,41.217,0 9.395,41.211,0 9.377,41.213,0 9.373,41.222,0 9.373,41.23,0 9.378,41.234,0 9.385,41.237,0 9.392,41.241,0 9.396,41.248,0 9.398,41.256,0 9.402,41.258,0 9.408,41.258,0 9.414,41.262,0 9.422,41.261,0 9.427,41.254,0 9.431,41.246,0 9.43,41.238,0 9.429,41.229,0 9.431,41.225,0 9.434,41.221,0 9.435,41.217,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.316,42.341,0 10.313,42.324,0 10.294,42.328,0 10.297,42.345,0 10.306,42.352,0 10.316,42.341,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.922,42.334,0 10.909,42.325,0 10.874,42.36,0 10.862,42.366,0 10.871,42.376,0 10.877,42.387,0 10.884,42.392,0 10.896,42.386,0 10.907,42.378,0 10.919,42.356,0 10.931,42.346,0 10.926,42.339,0 10.922,42.334,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.095,42.577,0 10.086,42.572,0 10.072,42.573,0 10.059,42.576,0 10.05,42.582,0 10.053,42.589,0 10.063,42.592,0 10.073,42.6,0 10.08,42.614,0 10.084,42.615,0 10.088,42.604,0 10.092,42.596,0 10.096,42.591,0 10.098,42.588,0 10.098,42.584,0 10.095,42.577,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.431,42.816,0 10.437,42.804,0 10.431,42.787,0 10.421,42.776,0 10.407,42.769,0 10.389,42.763,0 10.408,42.757,0 10.426,42.741,0 10.431,42.722,0 10.416,42.709,0 10.411,42.718,0 10.404,42.719,0 10.394,42.718,0 10.382,42.722,0 10.378,42.728,0 10.368,42.746,0 10.365,42.75,0 10.352,42.755,0 10.338,42.765,0 10.326,42.765,0 10.314,42.743,0 10.305,42.76,0 10.266,42.744,0 10.246,42.757,0 10.241,42.742,0 10.236,42.736,0 10.23,42.735,0 10.148,42.737,0 10.125,42.743,0 10.107,42.757,0 10.102,42.784,0 10.112,42.801,0 10.134,42.812,0 10.159,42.817,0 10.18,42.819,0 10.19,42.817,0 10.213,42.808,0 10.225,42.804,0 10.243,42.803,0 10.266,42.804,0 10.266,42.809,0 10.265,42.81,0 10.263,42.81,0 10.26,42.812,0 10.273,42.819,0 10.273,42.826,0 10.273,42.827,0 10.29,42.825,0 10.327,42.826,0 10.323,42.811,0 10.333,42.806,0 10.348,42.806,0 10.355,42.808,0 10.359,42.817,0 10.366,42.823,0 10.375,42.827,0 10.382,42.832,0 10.393,42.858,0 10.401,42.869,0 10.413,42.873,0 10.422,42.871,0 10.432,42.864,0 10.439,42.855,0 10.444,42.845,0 10.437,42.838,0 10.432,42.828,0 10.431,42.816,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.844,43.06,0 9.848,43.058,0 9.854,43.059,0 9.843,43.035,0 9.828,43.019,0 9.81,43.017,0 9.793,43.037,0 9.812,43.071,0 9.827,43.081,0 9.841,43.065,0 9.842,43.063,0 9.844,43.06,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.122,46.972,0 12.128,46.949,0 12.135,46.937,0 12.142,46.928,0 12.142,46.919,0 12.127,46.909,0 12.137,46.906,0 12.161,46.903,0 12.172,46.899,0 12.184,46.891,0 12.189,46.885,0 12.195,46.88,0 12.209,46.877,0 12.251,46.876,0 12.267,46.868,0 12.276,46.846,0 12.276,46.834,0 12.273,46.827,0 12.27,46.82,0 12.267,46.808,0 12.267,46.795,0 12.269,46.789,0 12.275,46.785,0 12.284,46.78,0 12.305,46.774,0 12.326,46.772,0 12.343,46.765,0 12.351,46.743,0 12.37,46.711,0 12.405,46.69,0 12.446,46.679,0 12.5,46.672,0 12.531,46.658,0 12.547,46.652,0 12.562,46.651,0 12.62,46.656,0 12.67,46.653,0 12.679,46.65,0 12.697,46.641,0 12.707,46.638,0 12.716,46.638,0 12.732,46.642,0 12.74,46.643,0 12.774,46.635,0 12.83,46.61,0 13.065,46.598,0 13.146,46.585,0 13.21,46.558,0 13.231,46.552,0 13.271,46.551,0 13.373,46.566,0 13.417,46.56,0 13.478,46.564,0 13.485,46.562,0 13.499,46.551,0 13.507,46.547,0 13.549,46.546,0 13.67,46.519,0 13.685,46.518,0 13.701,46.52,0 13.701,46.512,0 13.699,46.505,0 13.695,46.499,0 13.69,46.493,0 13.688,46.468,0 13.677,46.452,0 13.659,46.445,0 13.634,46.446,0 13.6,46.443,0 13.576,46.427,0 13.554,46.406,0 13.53,46.388,0 13.484,46.371,0 13.46,46.359,0 13.447,46.355,0 13.434,46.354,0 13.423,46.345,0 13.41,46.324,0 13.391,46.302,0 13.365,46.29,0 13.373,46.28,0 13.379,46.268,0 13.385,46.243,0 13.385,46.243,0 13.385,46.243,0 13.398,46.231,0 13.402,46.217,0 13.41,46.208,0 13.437,46.211,0 13.423,46.229,0 13.438,46.225,0 13.468,46.223,0 13.482,46.218,0 13.51,46.214,0 13.529,46.205,0 13.559,46.184,0 13.584,46.181,0 13.614,46.184,0 13.637,46.18,0 13.645,46.162,0 13.616,46.125,0 13.505,46.066,0 13.482,46.045,0 13.49,46.039,0 13.493,46.032,0 13.49,46.026,0 13.482,46.018,0 13.477,46.016,0 13.462,46.006,0 13.475,45.996,0 13.479,45.993,0 13.48,45.992,0 13.481,45.991,0 13.482,45.99,0 13.482,45.989,0 13.509,45.967,0 13.539,45.969,0 13.572,45.98,0 13.606,45.985,0 13.623,45.966,0 13.608,45.927,0 13.569,45.865,0 13.566,45.83,0 13.581,45.809,0 13.609,45.799,0 13.644,45.796,0 13.66,45.792,0 13.709,45.765,0 13.779,45.743,0 13.858,45.649,0 13.869,45.641,0 13.884,45.635,0 13.893,45.635,0 13.895,45.632,0 13.887,45.619,0 13.848,45.585,0 13.801,45.581,0 13.761,45.596,0 13.712,45.593,0 13.719,45.6,0 13.731,45.613,0 13.757,45.613,0 13.787,45.611,0 13.809,45.614,0 13.796,45.617,0 13.787,45.624,0 13.778,45.635,0 13.74,45.649,0 13.758,45.655,0 13.754,45.672,0 13.74,45.691,0 13.727,45.703,0 13.648,45.762,0 13.63,45.772,0 13.575,45.789,0 13.552,45.792,0 13.535,45.782,0 13.525,45.76,0 13.529,45.74,0 13.555,45.737,0 13.519,45.725,0 13.514,45.721,0 13.508,45.714,0 13.481,45.71,0 13.47,45.707,0 13.452,45.694,0 13.429,45.681,0 13.402,45.675,0 13.377,45.683,0 13.392,45.686,0 13.41,45.691,0 13.425,45.698,0 13.432,45.707,0 13.423,45.724,0 13.382,45.73,0 13.37,45.744,0 13.352,45.74,0 13.255,45.756,0 13.246,45.759,0 13.222,45.776,0 13.216,45.779,0 13.206,45.778,0 13.17,45.768,0 13.158,45.754,0 13.15,45.751,0 13.14,45.755,0 13.132,45.769,0 13.12,45.772,0 13.111,45.767,0 13.109,45.758,0 13.112,45.749,0 13.124,45.744,0 13.124,45.737,0 13.101,45.736,0 13.081,45.727,0 13.07,45.713,0 13.076,45.697,0 13.092,45.689,0 13.112,45.691,0 13.15,45.703,0 13.139,45.689,0 13.104,45.669,0 13.096,45.652,0 13.086,45.642,0 13.061,45.636,0 12.982,45.635,0 12.944,45.628,0 12.781,45.553,0 12.612,45.496,0 12.513,45.47,0 12.497,45.46,0 12.488,45.456,0 12.452,45.45,0 12.424,45.438,0 12.411,45.436,0 12.419,45.451,0 12.43,45.464,0 12.436,45.475,0 12.431,45.484,0 12.441,45.483,0 12.448,45.484,0 12.452,45.489,0 12.452,45.498,0 12.459,45.498,0 12.463,45.489,0 12.468,45.485,0 12.472,45.486,0 12.479,45.491,0 12.466,45.504,0 12.477,45.503,0 12.488,45.504,0 12.498,45.506,0 12.5,45.504,0 12.501,45.506,0 12.504,45.503,0 12.507,45.499,0 12.507,45.498,0 12.504,45.498,0 12.493,45.498,0 12.493,45.491,0 12.516,45.492,0 12.521,45.505,0 12.522,45.519,0 12.531,45.525,0 12.549,45.527,0 12.563,45.531,0 12.574,45.54,0 12.582,45.553,0 12.57,45.549,0 12.545,45.536,0 12.538,45.536,0 12.519,45.55,0 12.511,45.559,0 12.507,45.573,0 12.486,45.565,0 12.459,45.548,0 12.443,45.53,0 12.452,45.518,0 12.452,45.512,0 12.435,45.512,0 12.418,45.523,0 12.411,45.518,0 12.404,45.518,0 12.397,45.539,0 12.385,45.523,0 12.391,45.514,0 12.425,45.504,0 12.425,45.498,0 12.412,45.493,0 12.394,45.491,0 12.381,45.494,0 12.384,45.504,0 12.351,45.505,0 12.31,45.489,0 12.273,45.463,0 12.253,45.436,0 12.253,45.43,0 12.259,45.43,0 12.251,45.42,0 12.247,45.411,0 12.249,45.402,0 12.259,45.395,0 12.25,45.385,0 12.248,45.378,0 12.249,45.371,0 12.246,45.361,0 12.238,45.358,0 12.229,45.357,0 12.224,45.354,0 12.233,45.34,0 12.221,45.327,0 12.217,45.316,0 12.209,45.309,0 12.188,45.306,0 12.175,45.31,0 12.164,45.316,0 12.155,45.313,0 12.15,45.292,0 12.16,45.283,0 12.169,45.262,0 12.181,45.258,0 12.192,45.263,0 12.2,45.274,0 12.203,45.288,0 12.198,45.299,0 12.218,45.294,0 12.222,45.283,0 12.221,45.269,0 12.225,45.251,0 12.214,45.248,0 12.212,45.243,0 12.216,45.237,0 12.225,45.23,0 12.222,45.216,0 12.231,45.204,0 12.248,45.197,0 12.267,45.196,0 12.264,45.2,0 12.263,45.201,0 12.259,45.203,0 12.274,45.211,0 12.296,45.226,0 12.308,45.23,0 12.299,45.215,0 12.305,45.201,0 12.316,45.186,0 12.322,45.172,0 12.322,45.139,0 12.329,45.101,0 12.319,45.103,0 12.308,45.108,0 12.309,45.114,0 12.308,45.124,0 12.308,45.128,0 12.298,45.106,0 12.297,45.088,0 12.307,45.078,0 12.329,45.08,0 12.326,45.083,0 12.324,45.086,0 12.322,45.093,0 12.341,45.081,0 12.354,45.067,0 12.364,45.052,0 12.377,45.039,0 12.377,45.032,0 12.369,45.031,0 12.365,45.029,0 12.361,45.027,0 12.356,45.024,0 12.369,45.011,0 12.384,45.026,0 12.387,45.039,0 12.381,45.051,0 12.369,45.065,0 12.384,45.056,0 12.402,45.05,0 12.414,45.043,0 12.411,45.032,0 12.427,45.02,0 12.435,45.015,0 12.445,45.011,0 12.465,44.992,0 12.487,44.976,0 12.5,44.983,0 12.497,44.984,0 12.49,44.983,0 12.487,44.983,0 12.487,44.991,0 12.503,44.991,0 12.517,44.987,0 12.528,44.98,0 12.535,44.97,0 12.534,44.961,0 12.524,44.95,0 12.528,44.943,0 12.519,44.934,0 12.516,44.928,0 12.513,44.922,0 12.507,44.922,0 12.5,44.921,0 12.495,44.91,0 12.493,44.878,0 12.488,44.862,0 12.475,44.845,0 12.445,44.82,0 12.444,44.825,0 12.439,44.835,0 12.433,44.846,0 12.425,44.854,0 12.44,44.877,0 12.444,44.89,0 12.439,44.901,0 12.427,44.905,0 12.416,44.9,0 12.407,44.891,0 12.404,44.884,0 12.393,44.868,0 12.392,44.859,0 12.417,44.851,0 12.416,44.843,0 12.409,44.836,0 12.397,44.833,0 12.397,44.826,0 12.404,44.825,0 12.417,44.821,0 12.425,44.82,0 12.417,44.803,0 12.398,44.794,0 12.376,44.792,0 12.358,44.804,0 12.347,44.815,0 12.322,44.833,0 12.304,44.843,0 12.293,44.843,0 12.267,44.826,0 12.267,44.82,0 12.281,44.82,0 12.254,44.751,0 12.247,44.711,0 12.253,44.668,0 12.266,44.636,0 12.276,44.62,0 12.284,44.614,0 12.286,44.602,0 12.281,44.532,0 12.284,44.487,0 12.315,44.387,0 12.319,44.361,0 12.322,44.353,0 12.326,44.348,0 12.34,44.334,0 12.343,44.329,0 12.345,44.308,0 12.351,44.288,0 12.369,44.25,0 12.391,44.222,0 12.418,44.195,0 12.459,44.166,0 12.479,44.139,0 12.511,44.114,0 12.548,44.093,0 12.575,44.085,0 12.632,44.03,0 12.662,44.008,0 12.692,43.99,0 12.711,43.983,0 12.757,43.972,0 12.804,43.967,0 12.823,43.958,0 12.863,43.935,0 12.929,43.916,0 12.939,43.904,0 12.948,43.897,0 13.254,43.703,0 13.371,43.65,0 13.39,43.644,0 13.4,43.635,0 13.447,43.623,0 13.474,43.612,0 13.484,43.616,0 13.491,43.623,0 13.497,43.627,0 13.5,43.628,0 13.502,43.63,0 13.505,43.633,0 13.511,43.633,0 13.517,43.631,0 13.52,43.627,0 13.522,43.622,0 13.525,43.62,0 13.544,43.613,0 13.558,43.596,0 13.57,43.58,0 13.579,43.573,0 13.599,43.569,0 13.616,43.56,0 13.625,43.547,0 13.618,43.531,0 13.761,43.264,0 13.777,43.243,0 13.781,43.236,0 13.787,43.2,0 13.791,43.192,0 13.803,43.178,0 13.835,43.127,0 13.849,43.092,0 13.866,43.007,0 13.945,42.798,0 13.981,42.73,0 14.002,42.698,0 14.064,42.625,0 14.069,42.609,0 14.076,42.599,0 14.221,42.47,0 14.285,42.428,0 14.357,42.393,0 14.388,42.373,0 14.43,42.321,0 14.561,42.225,0 14.596,42.208,0 14.654,42.191,0 14.694,42.185,0 14.71,42.175,0 14.718,42.16,0 14.723,42.119,0 14.73,42.099,0 14.741,42.084,0 14.758,42.079,0 14.781,42.075,0 14.8,42.066,0 14.836,42.044,0 14.871,42.032,0 14.953,42.021,0 14.994,42.01,0 15.008,42.001,0 15.035,41.974,0 15.046,41.969,0 15.064,41.964,0 15.105,41.942,0 15.124,41.934,0 15.166,41.927,0 15.282,41.928,0 15.401,41.908,0 15.447,41.907,0 15.612,41.928,0 15.775,41.921,0 16.028,41.944,0 16.112,41.928,0 16.112,41.926,0 16.141,41.92,0 16.161,41.892,0 16.18,41.893,0 16.177,41.877,0 16.184,41.858,0 16.193,41.821,0 16.194,41.808,0 16.193,41.791,0 16.185,41.779,0 16.167,41.763,0 16.146,41.749,0 16.128,41.742,0 16.108,41.737,0 16.09,41.726,0 16.064,41.701,0 16.028,41.68,0 15.926,41.64,0 15.901,41.614,0 15.892,41.577,0 15.897,41.536,0 15.912,41.503,0 15.934,41.479,0 15.962,41.459,0 16.022,41.428,0 16.086,41.412,0 16.101,41.403,0 16.115,41.393,0 16.302,41.328,0 16.461,41.262,0 16.521,41.25,0 16.539,41.239,0 16.555,41.227,0 16.594,41.207,0 16.831,41.146,0 16.852,41.133,0 16.859,41.133,0 16.859,41.14,0 16.865,41.14,0 16.886,41.124,0 17.058,41.082,0 17.204,41.021,0 17.277,40.98,0 17.311,40.955,0 17.348,40.912,0 17.362,40.906,0 17.378,40.902,0 17.414,40.881,0 17.476,40.83,0 17.493,40.824,0 17.513,40.82,0 17.549,40.802,0 17.635,40.785,0 17.646,40.78,0 17.749,40.747,0 17.844,40.694,0 17.922,40.683,0 17.956,40.67,0 17.956,40.647,0 17.967,40.647,0 17.993,40.653,0 18.008,40.65,0 18.012,40.644,0 18.012,40.635,0 18.016,40.625,0 18.04,40.608,0 18.044,40.602,0 18.038,40.557,0 18.12,40.504,0 18.212,40.464,0 18.232,40.461,0 18.239,40.457,0 18.259,40.43,0 18.271,40.421,0 18.304,40.4,0 18.33,40.366,0 18.344,40.351,0 18.362,40.345,0 18.371,40.338,0 18.438,40.268,0 18.501,40.152,0 18.505,40.146,0 18.51,40.142,0 18.517,40.139,0 18.512,40.127,0 18.514,40.12,0 18.518,40.114,0 18.517,40.104,0 18.509,40.094,0 18.492,40.084,0 18.484,40.055,0 18.471,40.043,0 18.435,40.022,0 18.412,39.979,0 18.408,39.968,0 18.405,39.947,0 18.395,39.925,0 18.393,39.916,0 18.4,39.89,0 18.401,39.878,0 18.387,39.825,0 18.39,39.817,0 18.384,39.814,0 18.374,39.8,0 18.369,39.796,0 18.347,39.798,0 18.339,39.8,0 18.331,39.803,0 18.283,39.833,0 18.266,39.837,0 18.225,39.837,0 18.212,39.839,0 18.187,39.852,0 18.162,39.86,0 18.131,39.883,0 18.095,39.903,0 18.082,39.906,0 18.072,39.911,0 18.008,39.986,0 17.996,39.995,0 17.996,40.002,0 18.012,40.003,0 18.021,40.01,0 18.023,40.021,0 18.016,40.036,0 18.006,40.045,0 17.979,40.051,0 17.968,40.057,0 18.003,40.074,0 18.012,40.096,0 17.998,40.12,0 17.968,40.146,0 17.941,40.163,0 17.927,40.176,0 17.92,40.191,0 17.92,40.21,0 17.917,40.227,0 17.912,40.24,0 17.9,40.249,0 17.913,40.249,0 17.913,40.255,0 17.864,40.285,0 17.848,40.29,0 17.513,40.303,0 17.494,40.307,0 17.441,40.331,0 17.431,40.331,0 17.41,40.33,0 17.4,40.331,0 17.393,40.335,0 17.375,40.348,0 17.369,40.351,0 17.352,40.355,0 17.297,40.379,0 17.241,40.395,0 17.213,40.406,0 17.201,40.42,0 17.224,40.428,0 17.244,40.441,0 17.248,40.457,0 17.228,40.474,0 17.248,40.48,0 17.296,40.473,0 17.317,40.482,0 17.324,40.498,0 17.305,40.499,0 17.262,40.488,0 17.264,40.491,0 17.269,40.496,0 17.248,40.503,0 17.23,40.497,0 17.211,40.487,0 17.191,40.482,0 17.182,40.485,0 17.177,40.493,0 17.172,40.502,0 17.167,40.509,0 17.157,40.512,0 17.134,40.512,0 17.125,40.515,0 17.05,40.519,0 16.977,40.492,0 16.913,40.445,0 16.783,40.301,0 16.762,40.269,0 16.738,40.211,0 16.731,40.2,0 16.716,40.193,0 16.68,40.146,0 16.625,40.108,0 16.605,40.084,0 16.597,40.046,0 16.6,40.034,0 16.614,39.996,0 16.632,39.966,0 16.622,39.953,0 16.606,39.943,0 16.59,39.92,0 16.543,39.885,0 16.509,39.837,0 16.492,39.805,0 16.49,39.775,0 16.503,39.747,0 16.529,39.721,0 16.529,39.714,0 16.516,39.689,0 16.546,39.661,0 16.592,39.636,0 16.625,39.625,0 16.75,39.62,0 16.783,39.611,0 16.799,39.603,0 16.817,39.591,0 16.831,39.576,0 16.838,39.56,0 16.847,39.552,0 16.906,39.529,0 16.954,39.499,0 16.971,39.495,0 16.996,39.492,0 17.012,39.486,0 17.024,39.475,0 17.036,39.461,0 17.058,39.441,0 17.089,39.422,0 17.125,39.409,0 17.159,39.406,0 17.123,39.338,0 17.115,39.283,0 17.115,39.269,0 17.118,39.256,0 17.125,39.244,0 17.143,39.222,0 17.146,39.21,0 17.141,39.179,0 17.123,39.121,0 17.125,39.091,0 17.148,39.054,0 17.152,39.046,0 17.159,39.04,0 17.193,39.031,0 17.207,39.029,0 17.187,39.019,0 17.177,39.012,0 17.173,39.005,0 17.172,38.966,0 17.173,38.96,0 17.139,38.936,0 17.136,38.932,0 17.128,38.929,0 17.119,38.919,0 17.105,38.899,0 17.096,38.919,0 17.071,38.923,0 17.043,38.916,0 17.023,38.906,0 16.997,38.929,0 16.982,38.937,0 16.958,38.94,0 16.936,38.938,0 16.839,38.918,0 16.728,38.879,0 16.688,38.856,0 16.68,38.847,0 16.671,38.84,0 16.611,38.816,0 16.586,38.798,0 16.575,38.785,0 16.564,38.756,0 16.551,38.741,0 16.539,38.723,0 16.535,38.7,0 16.547,38.693,0 16.55,38.69,0 16.549,38.672,0 16.559,38.596,0 16.578,38.528,0 16.578,38.503,0 16.57,38.429,0 16.562,38.416,0 16.523,38.387,0 16.509,38.371,0 16.498,38.369,0 16.468,38.348,0 16.436,38.34,0 16.34,38.301,0 16.307,38.277,0 16.17,38.143,0 16.152,38.111,0 16.126,38.005,0 16.112,37.973,0 16.102,37.96,0 16.091,37.949,0 16.078,37.94,0 16.064,37.932,0 16.016,37.924,0 16.002,37.919,0 15.943,37.933,0 15.762,37.925,0 15.736,37.931,0 15.709,37.941,0 15.685,37.953,0 15.666,37.967,0 15.646,37.988,0 15.636,38.009,0 15.639,38.027,0 15.659,38.042,0 15.633,38.074,0 15.625,38.092,0 15.628,38.107,0 15.642,38.126,0 15.648,38.143,0 15.647,38.162,0 15.639,38.186,0 15.633,38.22,0 15.651,38.241,0 15.685,38.253,0 15.787,38.278,0 15.796,38.285,0 15.799,38.291,0 15.813,38.3,0 15.817,38.306,0 15.83,38.351,0 15.905,38.474,0 15.918,38.517,0 15.916,38.55,0 15.901,38.578,0 15.871,38.604,0 15.864,38.608,0 15.851,38.613,0 15.845,38.618,0 15.836,38.628,0 15.834,38.634,0 15.836,38.639,0 15.837,38.649,0 15.845,38.66,0 15.864,38.668,0 15.905,38.679,0 15.969,38.712,0 16.003,38.725,0 16.049,38.728,0 16.121,38.721,0 16.137,38.724,0 16.153,38.731,0 16.18,38.748,0 16.201,38.776,0 16.216,38.814,0 16.222,38.856,0 16.221,38.899,0 16.215,38.919,0 16.205,38.934,0 16.19,38.943,0 16.169,38.947,0 16.155,38.955,0 16.14,38.974,0 16.084,39.075,0 16.043,39.31,0 16.032,39.345,0 15.955,39.489,0 15.934,39.513,0 15.905,39.536,0 15.877,39.551,0 15.868,39.564,0 15.865,39.588,0 15.851,39.615,0 15.837,39.652,0 15.816,39.679,0 15.807,39.695,0 15.789,39.796,0 15.789,39.79,0 15.784,39.81,0 15.779,39.82,0 15.772,39.824,0 15.77,39.83,0 15.783,39.868,0 15.775,39.891,0 15.742,39.929,0 15.735,39.943,0 15.729,39.964,0 15.714,39.981,0 15.679,40.009,0 15.652,40.043,0 15.631,40.057,0 15.625,40.065,0 15.625,40.078,0 15.611,40.073,0 15.536,40.078,0 15.51,40.07,0 15.493,40.059,0 15.46,40.029,0 15.425,40.004,0 15.405,39.999,0 15.377,40.002,0 15.354,40.012,0 15.315,40.034,0 15.303,40.036,0 15.294,40.032,0 15.284,40.03,0 15.273,40.028,0 15.262,40.029,0 15.262,40.036,0 15.28,40.047,0 15.264,40.074,0 15.234,40.1,0 15.21,40.112,0 15.191,40.119,0 15.128,40.169,0 15.113,40.175,0 15.096,40.173,0 15.066,40.166,0 15.048,40.169,0 15.035,40.175,0 15.015,40.194,0 14.974,40.223,0 14.967,40.224,0 14.959,40.231,0 14.923,40.238,0 14.912,40.241,0 14.907,40.258,0 14.932,40.285,0 14.94,40.307,0 14.933,40.324,0 14.933,40.334,0 14.943,40.338,0 14.954,40.34,0 14.965,40.345,0 14.973,40.352,0 14.98,40.359,0 14.99,40.394,0 14.976,40.431,0 14.889,40.573,0 14.862,40.607,0 14.836,40.632,0 14.81,40.653,0 14.783,40.67,0 14.753,40.676,0 14.72,40.667,0 14.691,40.649,0 14.679,40.646,0 14.626,40.649,0 14.614,40.646,0 14.572,40.617,0 14.545,40.613,0 14.517,40.62,0 14.487,40.632,0 14.472,40.624,0 14.423,40.615,0 14.402,40.602,0 14.356,40.583,0 14.343,40.57,0 14.331,40.584,0 14.329,40.605,0 14.338,40.624,0 14.36,40.632,0 14.38,40.634,0 14.388,40.637,0 14.395,40.65,0 14.403,40.657,0 14.471,40.699,0 14.48,40.711,0 14.475,40.729,0 14.461,40.744,0 14.443,40.755,0 14.426,40.762,0 14.415,40.765,0 14.399,40.767,0 14.391,40.77,0 14.385,40.774,0 14.372,40.787,0 14.367,40.79,0 14.349,40.797,0 14.313,40.828,0 14.295,40.839,0 14.276,40.84,0 14.249,40.837,0 14.224,40.831,0 14.213,40.821,0 14.204,40.801,0 14.182,40.8,0 14.112,40.829,0 14.096,40.834,0 14.083,40.831,0 14.077,40.822,0 14.078,40.81,0 14.082,40.797,0 14.083,40.783,0 14.075,40.788,0 14.041,40.798,0 14.053,40.837,0 14.044,40.875,0 13.966,40.996,0 13.931,41.014,0 13.918,41.023,0 13.915,41.033,0 13.913,41.054,0 13.911,41.064,0 13.885,41.104,0 13.786,41.203,0 13.722,41.252,0 13.709,41.256,0 13.679,41.25,0 13.664,41.25,0 13.657,41.259,0 13.595,41.253,0 13.564,41.238,0 13.576,41.208,0 13.544,41.206,0 13.535,41.208,0 13.526,41.215,0 13.52,41.225,0 13.515,41.229,0 13.508,41.221,0 13.5,41.221,0 13.481,41.239,0 13.325,41.295,0 13.286,41.295,0 13.205,41.284,0 13.187,41.278,0 13.152,41.26,0 13.115,41.251,0 13.091,41.226,0 13.069,41.221,0 13.045,41.227,0 13.037,41.24,0 13.034,41.257,0 13.024,41.273,0 13.013,41.286,0 12.993,41.315,0 12.98,41.331,0 12.924,41.379,0 12.894,41.399,0 12.863,41.413,0 12.842,41.418,0 12.764,41.421,0 12.749,41.423,0 12.679,41.458,0 12.655,41.465,0 12.643,41.458,0 12.636,41.447,0 12.62,41.459,0 12.546,41.544,0 12.449,41.63,0 12.343,41.702,0 12.328,41.711,0 12.301,41.717,0 12.286,41.727,0 12.277,41.729,0 12.247,41.733,0 12.24,41.736,0 12.224,41.75,0 12.216,41.768,0 12.212,41.787,0 12.212,41.808,0 12.207,41.827,0 12.195,41.847,0 12.171,41.879,0 12.148,41.903,0 12.05,41.96,0 12.039,41.965,0 12.03,41.973,0 12.027,41.986,0 12.021,41.993,0 11.993,41.996,0 11.983,42,0 11.97,42.011,0 11.953,42.022,0 11.935,42.031,0 11.917,42.038,0 11.84,42.036,0 11.828,42.034,0 11.823,42.047,0 11.81,42.066,0 11.794,42.084,0 11.78,42.092,0 11.772,42.106,0 11.751,42.128,0 11.746,42.136,0 11.744,42.152,0 11.737,42.169,0 11.683,42.252,0 11.659,42.279,0 11.54,42.349,0 11.49,42.359,0 11.421,42.386,0 11.397,42.393,0 11.397,42.4,0 11.387,42.404,0 11.377,42.407,0 11.366,42.408,0 11.355,42.407,0 11.363,42.4,0 11.334,42.4,0 11.26,42.421,0 11.246,42.422,0 11.228,42.422,0 11.212,42.419,0 11.205,42.411,0 11.201,42.395,0 11.187,42.379,0 11.185,42.366,0 11.175,42.369,0 11.165,42.369,0 11.158,42.368,0 11.157,42.366,0 11.148,42.371,0 11.135,42.384,0 11.107,42.391,0 11.095,42.402,0 11.087,42.418,0 11.081,42.435,0 11.1,42.443,0 11.123,42.446,0 11.167,42.448,0 11.175,42.458,0 11.184,42.48,0 11.19,42.504,0 11.188,42.521,0 11.167,42.546,0 11.159,42.564,0 11.149,42.563,0 11.138,42.559,0 11.129,42.558,0 11.117,42.572,0 11.108,42.591,0 11.098,42.607,0 11.081,42.612,0 11.078,42.632,0 11.054,42.647,0 11.006,42.668,0 11.001,42.68,0 10.996,42.696,0 10.99,42.71,0 10.982,42.716,0 10.973,42.72,0 10.944,42.743,0 10.891,42.764,0 10.732,42.804,0 10.756,42.819,0 10.766,42.835,0 10.767,42.854,0 10.766,42.877,0 10.769,42.884,0 10.775,42.888,0 10.778,42.894,0 10.774,42.908,0 10.764,42.918,0 10.751,42.925,0 10.682,42.949,0 10.633,42.958,0 10.584,42.959,0 10.54,42.949,0 10.544,42.939,0 10.547,42.935,0 10.519,42.925,0 10.5,42.94,0 10.478,42.99,0 10.503,43.005,0 10.518,43.024,0 10.54,43.079,0 10.536,43.091,0 10.536,43.112,0 10.54,43.134,0 10.547,43.147,0 10.539,43.164,0 10.535,43.185,0 10.533,43.226,0 10.529,43.246,0 10.517,43.267,0 10.438,43.388,0 10.374,43.453,0 10.36,43.465,0 10.327,43.477,0 10.318,43.492,0 10.295,43.568,0 10.265,43.809,0 10.252,43.846,0 10.211,43.92,0 10.181,43.955,0 10.137,43.978,0 10.106,44.016,0 10.091,44.025,0 10.073,44.029,0 10.036,44.048,0 10.015,44.052,0 9.999,44.058,0 9.989,44.06,0 9.985,44.055,0 9.981,44.05,0 9.973,44.045,0 9.963,44.044,0 9.954,44.048,0 9.938,44.06,0 9.905,44.08,0 9.888,44.093,0 9.877,44.088,0 9.845,44.108,0 9.827,44.107,0 9.834,44.1,0 9.829,44.098,0 9.825,44.095,0 9.82,44.093,0 9.825,44.085,0 9.831,44.079,0 9.839,44.075,0 9.848,44.072,0 9.848,44.066,0 9.842,44.063,0 9.839,44.06,0 9.834,44.052,0 9.847,44.046,0 9.843,44.041,0 9.833,44.042,0 9.827,44.055,0 9.82,44.063,0 9.772,44.079,0 9.722,44.113,0 9.71,44.118,0 9.683,44.136,0 9.673,44.141,0 9.644,44.142,0 9.632,44.144,0 9.622,44.148,0 9.587,44.178,0 9.581,44.179,0 9.573,44.191,0 9.557,44.2,0 9.512,44.215,0 9.5,44.222,0 9.49,44.231,0 9.485,44.244,0 9.473,44.24,0 9.454,44.237,0 9.437,44.239,0 9.43,44.247,0 9.423,44.257,0 9.375,44.272,0 9.368,44.294,0 9.263,44.336,0 9.231,44.353,0 9.222,44.344,0 9.214,44.333,0 9.21,44.321,0 9.211,44.305,0 9.166,44.318,0 9.147,44.328,0 9.149,44.34,0 9.131,44.363,0 9.103,44.374,0 9.002,44.387,0 8.953,44.4,0 8.924,44.411,0 8.915,44.409,0 8.869,44.409,0 8.846,44.413,0 8.838,44.417,0 8.828,44.428,0 8.763,44.432,0 8.738,44.429,0 8.725,44.424,0 8.696,44.406,0 8.686,44.398,0 8.679,44.394,0 8.671,44.394,0 8.663,44.395,0 8.656,44.394,0 8.594,44.363,0 8.577,44.36,0 8.565,44.357,0 8.541,44.34,0 8.467,44.304,0 8.445,44.284,0 8.45,44.264,0 8.44,44.253,0 8.437,44.247,0 8.436,44.24,0 8.433,44.238,0 8.418,44.23,0 8.412,44.227,0 8.407,44.215,0 8.409,44.204,0 8.409,44.193,0 8.395,44.182,0 8.37,44.173,0 8.314,44.16,0 8.285,44.148,0 8.27,44.138,0 8.257,44.128,0 8.234,44.103,0 8.231,44.096,0 8.232,44.08,0 8.231,44.072,0 8.224,44.057,0 8.217,44.045,0 8.17,44.006,0 8.153,43.983,0 8.168,43.962,0 8.168,43.956,0 8.145,43.952,0 8.116,43.927,0 8.09,43.92,0 8.082,43.915,0 8.076,43.909,0 8.073,43.904,0 8.068,43.896,0 8.056,43.892,0 8.032,43.887,0 7.96,43.853,0 7.786,43.822,0 7.737,43.798,0 7.695,43.791,0 7.573,43.791,0 7.545,43.784,0 7.532,43.784,0 7.524,43.789,0 7.513,43.792,0 7.503,43.792,0 7.483,43.84,0 7.478,43.866,0 7.493,43.886,0 7.537,43.921,0 7.557,43.944,0 7.609,43.976,0 7.631,43.994,0 7.639,44.005,0 7.647,44.027,0 7.653,44.04,0 7.664,44.049,0 7.679,44.057,0 7.69,44.067,0 7.692,44.085,0 7.676,44.109,0 7.654,44.125,0 7.642,44.144,0 7.656,44.176,0 7.625,44.18,0 7.584,44.161,0 7.555,44.159,0 7.381,44.123,0 7.341,44.124,0 7.331,44.125,0 7.322,44.132,0 7.316,44.14,0 7.309,44.147,0 7.296,44.151,0 7.27,44.154,0 7.251,44.16,0 7.145,44.207,0 7.105,44.218,0 7.046,44.24,0 7.033,44.243,0 7.02,44.242,0 7.008,44.239,0 6.996,44.238,0 6.983,44.242,0 6.973,44.249,0 6.969,44.258,0 6.966,44.268,0 6.959,44.277,0 6.95,44.285,0 6.93,44.295,0 6.921,44.302,0 6.916,44.31,0 6.904,44.33,0 6.896,44.34,0 6.874,44.358,0 6.87,44.363,0 6.866,44.372,0 6.866,44.377,0 6.869,44.383,0 6.877,44.414,0 6.884,44.423,0 6.918,44.436,0 6.892,44.452,0 6.861,44.475,0 6.839,44.503,0 6.836,44.534,0 6.846,44.547,0 6.897,44.575,0 6.932,44.618,0 6.946,44.625,0 6.934,44.647,0 6.941,44.667,0 6.96,44.683,0 6.983,44.692,0 7.001,44.692,0 7.037,44.685,0 7.055,44.685,0 7.049,44.698,0 7.019,44.739,0 7.015,44.747,0 7.01,44.772,0 6.998,44.794,0 6.999,44.795,0 7.004,44.811,0 7.006,44.812,0 7.006,44.816,0 7.007,44.819,0 7.007,44.822,0 7.005,44.828,0 7.001,44.833,0 6.983,44.847,0 6.933,44.862,0 6.915,44.863,0 6.866,44.856,0 6.847,44.859,0 6.778,44.888,0 6.745,44.908,0 6.728,44.929,0 6.73,44.985,0 6.723,45.013,0 6.697,45.027,0 6.662,45.029,0 6.652,45.036,0 6.64,45.05,0 6.637,45.059,0 6.638,45.067,0 6.637,45.074,0 6.62,45.084,0 6.603,45.103,0 6.615,45.115,0 6.633,45.126,0 6.667,45.14,0 6.676,45.141,0 6.694,45.14,0 6.702,45.141,0 6.711,45.145,0 6.729,45.155,0 6.736,45.157,0 6.771,45.153,0 6.808,45.139,0 6.844,45.13,0 6.877,45.141,0 6.879,45.147,0 6.873,45.152,0 6.868,45.157,0 6.873,45.166,0 6.881,45.168,0 6.905,45.169,0 6.914,45.17,0 6.928,45.18,0 6.946,45.201,0 6.959,45.21,0 6.994,45.221,0 7.03,45.228,0 7.038,45.226,0 7.05,45.215,0 7.055,45.214,0 7.062,45.219,0 7.081,45.243,0 7.108,45.259,0 7.108,45.275,0 7.098,45.295,0 7.093,45.324,0 7.098,45.33,0 7.13,45.357,0 7.151,45.383,0 7.16,45.398,0 7.161,45.411,0 7.153,45.415,0 7.11,45.428,0 7.097,45.435,0 7.089,45.447,0 7.082,45.459,0 7.072,45.47,0 7.028,45.493,0 6.983,45.511,0 6.975,45.526,0 6.97,45.567,0 6.966,45.574,0 6.955,45.586,0 6.953,45.594,0 6.956,45.603,0 6.967,45.62,0 6.969,45.626,0 6.963,45.641,0 6.951,45.647,0 6.919,45.653,0 6.905,45.66,0 6.883,45.676,0 6.869,45.679,0 6.843,45.683,0 6.816,45.697,0 6.796,45.718,0 6.785,45.76,0 6.782,45.777,0 6.783,45.795,0 6.788,45.812,0 6.801,45.826,0 6.816,45.833,0 6.846,45.836,0 6.846,45.838,0 6.849,45.842,0 6.853,45.847,0 6.858,45.849,0 6.862,45.849,0 6.87,45.845,0 6.873,45.845,0 6.88,45.846,0 6.905,45.845,0 6.926,45.85,0 6.949,45.858,0 6.969,45.87,0 6.983,45.886,0 6.989,45.899,0 6.997,45.911,0 7.008,45.921,0 7.022,45.925,0 7.067,45.89,0 7.09,45.881,0 7.121,45.876,0 7.154,45.877,0 7.184,45.88,0 7.245,45.898,0 7.274,45.91,0 7.287,45.913,0 7.362,45.908,0 7.394,45.916,0 7.453,45.946,0 7.483,45.955,0 7.504,45.957,0 7.515,45.967,0 7.524,45.978,0 7.541,45.984,0 7.643,45.966,0 7.659,45.96,0 7.674,45.95,0 7.693,45.931,0 7.694,45.929,0 7.706,45.926,0 7.715,45.927,0 7.722,45.93,0 7.732,45.93,0 7.78,45.918,0 7.808,45.918,0 7.825,45.915,0 7.831,45.914,0 7.844,45.919,0 7.846,45.923,0 7.845,45.928,0 7.848,45.938,0 7.872,45.969,0 7.898,45.982,0 7.969,45.993,0 7.979,45.995,0 7.986,45.999,0 7.998,46.011,0 7.999,46.013,0 8.009,46.028,0 8.011,46.03,0 8.016,46.058,0 8.016,46.069,0 8.018,46.081,0 8.025,46.091,0 8.035,46.097,0 8.056,46.098,0 8.067,46.101,0 8.111,46.127,0 8.132,46.159,0 8.13,46.196,0 8.1,46.236,0 8.077,46.25,0 8.073,46.254,0 8.077,46.262,0 8.087,46.272,0 8.107,46.286,0 8.128,46.292,0 8.172,46.299,0 8.193,46.309,0 8.242,46.354,0 8.27,46.364,0 8.282,46.37,0 8.291,46.378,0 8.297,46.388,0 8.297,46.398,0 8.29,46.401,0 8.287,46.405,0 8.295,46.418,0 8.316,46.434,0 8.343,46.444,0 8.399,46.452,0 8.428,46.449,0 8.442,46.435,0 8.446,46.412,0 8.446,46.382,0 8.443,46.353,0 8.427,46.302,0 8.423,46.276,0 8.427,46.251,0 8.438,46.235,0 8.457,46.225,0 8.483,46.218,0 8.51,46.208,0 8.539,46.188,0 8.602,46.123,0 8.612,46.119,0 8.631,46.115,0 8.677,46.096,0 8.695,46.095,0 8.702,46.098,0 8.718,46.108,0 8.724,46.11,0 8.732,46.107,0 8.739,46.098,0 8.747,46.094,0 8.763,46.093,0 8.794,46.093,0 8.809,46.09,0 8.834,46.066,0 8.82,46.043,0 8.791,46.019,0 8.773,45.991,0 8.77,45.986,0 8.768,45.983,0 8.785,45.982,0 8.8,45.979,0 8.858,45.957,0 8.864,45.953,0 8.871,45.947,0 8.881,45.931,0 8.898,45.91,0 8.907,45.896,0 8.912,45.883,0 8.914,45.866,0 8.91,45.854,0 8.904,45.842,0 8.9,45.826,0 8.94,45.835,0 8.972,45.825,0 9.002,45.821,0 9.034,45.848,0 9.059,45.882,0 9.063,45.899,0 9.052,45.916,0 9.042,45.92,0 9.021,45.923,0 9.011,45.927,0 9.002,45.936,0 8.993,45.954,0 8.983,45.962,0 8.981,45.964,0 8.98,45.967,0 8.981,45.969,0 8.983,45.972,0 9.016,45.993,0 8.998,46.028,0 9.002,46.039,0 9.028,46.053,0 9.05,46.058,0 9.059,46.062,0 9.067,46.071,0 9.07,46.083,0 9.068,46.106,0 9.072,46.119,0 9.091,46.138,0 9.163,46.172,0 9.171,46.183,0 9.176,46.194,0 9.181,46.204,0 9.192,46.21,0 9.204,46.214,0 9.216,46.221,0 9.225,46.231,0 9.24,46.267,0 9.269,46.309,0 9.275,46.331,0 9.274,46.344,0 9.26,46.38,0 9.26,46.394,0 9.263,46.407,0 9.261,46.417,0 9.248,46.423,0 9.238,46.437,0 9.246,46.461,0 9.263,46.485,0 9.282,46.497,0 9.331,46.502,0 9.351,46.498,0 9.352,46.485,0 9.377,46.469,0 9.385,46.466,0 9.395,46.469,0 9.4,46.475,0 9.404,46.483,0 9.411,46.489,0 9.427,46.497,0 9.435,46.498,0 9.438,46.492,0 9.444,46.396,0 9.442,46.381,0 9.444,46.375,0 9.452,46.37,0 9.474,46.362,0 9.483,46.357,0 9.503,46.321,0 9.515,46.309,0 9.536,46.299,0 9.56,46.293,0 9.674,46.292,0 9.693,46.297,0 9.708,46.312,0 9.709,46.32,0 9.707,46.331,0 9.709,46.342,0 9.72,46.351,0 9.731,46.351,0 9.755,46.341,0 9.768,46.339,0 9.789,46.343,0 9.855,46.367,0 9.899,46.372,0 9.918,46.371,0 9.939,46.367,0 9.964,46.356,0 9.971,46.34,0 9.971,46.32,0 9.978,46.298,0 9.992,46.284,0 10.032,46.26,0 10.042,46.243,0 10.043,46.22,0 10.076,46.22,0 10.118,46.231,0 10.146,46.243,0 10.159,46.262,0 10.146,46.28,0 10.105,46.309,0 10.096,46.321,0 10.092,46.329,0 10.092,46.338,0 10.097,46.352,0 10.105,46.361,0 10.126,46.374,0 10.133,46.381,0 10.141,46.403,0 10.133,46.414,0 10.116,46.419,0 10.071,46.425,0 10.042,46.433,0 10.026,46.446,0 10.044,46.467,0 10.035,46.471,0 10.03,46.477,0 10.028,46.484,0 10.027,46.493,0 10.031,46.504,0 10.031,46.526,0 10.033,46.533,0 10.041,46.542,0 10.063,46.557,0 10.071,46.564,0 10.083,46.597,0 10.088,46.604,0 10.097,46.608,0 10.192,46.627,0 10.218,46.627,0 10.234,46.618,0 10.236,46.607,0 10.23,46.586,0 10.235,46.575,0 10.276,46.566,0 10.284,46.561,0 10.289,46.556,0 10.295,46.551,0 10.307,46.547,0 10.319,46.546,0 10.354,46.548,0 10.426,46.535,0 10.444,46.538,0 10.458,46.554,0 10.466,46.578,0 10.467,46.604,0 10.459,46.624,0 10.438,46.636,0 10.396,46.639,0 10.378,46.653,0 10.369,46.672,0 10.374,46.682,0 10.385,46.689,0 10.394,46.701,0 10.397,46.715,0 10.396,46.726,0 10.4,46.736,0 10.417,46.743,0 10.429,46.756,0 10.426,46.769,0 10.419,46.784,0 10.417,46.799,0 10.439,46.817,0 10.445,46.823,0 10.449,46.832,0 10.454,46.864,0 10.486,46.846,0 10.528,46.843,0 10.629,46.862,0 10.647,46.864,0 10.662,46.861,0 10.739,46.83,0 10.749,46.819,0 10.744,46.813,0 10.722,46.8,0 10.717,46.795,0 10.723,46.786,0 10.734,46.786,0 10.755,46.791,0 10.766,46.788,0 10.795,46.777,0 10.805,46.777,0 10.824,46.78,0 10.834,46.78,0 10.843,46.777,0 10.86,46.767,0 10.87,46.764,0 10.88,46.765,0 10.914,46.772,0 10.931,46.774,0 10.966,46.772,0 10.983,46.768,0 10.997,46.769,0 11.011,46.779,0 11.033,46.806,0 11.037,46.808,0 11.049,46.812,0 11.053,46.815,0 11.055,46.82,0 11.053,46.83,0 11.054,46.834,0 11.073,46.865,0 11.084,46.9,0 11.092,46.912,0 11.157,46.957,0 11.174,46.964,0 11.244,46.979,0 11.314,46.987,0 11.349,46.982,0 11.381,46.972,0 11.411,46.97,0 11.445,46.993,0 11.445,46.993,0 11.453,47.001,0 11.462,47.006,0 11.472,47.007,0 11.489,47.004,0 11.496,47.002,0 11.502,46.998,0 11.507,46.993,0 11.515,46.989,0 11.524,46.988,0 11.534,46.99,0 11.543,46.993,0 11.543,46.993,0 11.544,46.993,0 11.544,46.993,0 11.573,46.999,0 11.596,47,0 11.648,46.993,0 11.648,46.993,0 11.65,46.993,0 11.657,46.993,0 11.665,46.993,0 11.684,46.992,0 11.716,46.975,0 11.735,46.971,0 11.746,46.972,0 11.766,46.983,0 11.777,46.988,0 11.823,46.993,0 11.857,47.012,0 11.9,47.028,0 11.944,47.038,0 12.015,47.04,0 12.116,47.077,0 12.181,47.085,0 12.204,47.08,0 12.204,47.053,0 12.182,47.034,0 12.122,47.011,0 12.111,46.993,0 12.118,46.983,0 12.122,46.972,0 </coordinates></LinearRing></outerBoundaryIs><innerBoundaryIs><LinearRing><coordinates>12.4,43.903,0 12.429,43.892,0 12.461,43.895,0 12.479,43.917,0 12.478,43.92,0 12.478,43.923,0 12.48,43.926,0 12.483,43.929,0 12.49,43.939,0 12.492,43.956,0 12.489,43.973,0 12.482,43.983,0 12.453,43.979,0 12.421,43.967,0 12.396,43.948,0 12.386,43.925,0 12.4,43.903,0 </coordinates></LinearRing></innerBoundaryIs><innerBoundaryIs><LinearRing><coordinates>12.444,41.902,0 12.449,41.9,0 12.455,41.9,0 12.458,41.902,0 12.455,41.908,0 12.447,41.907,0 12.444,41.902,0 </coordinates></LinearRing></innerBoundaryIs></Polygon></MultiGeometry>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(k.features())[0].geometry, MultiPolygon))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_atom(self):
pass
def test_schema(self):
doc = """<Schema name="TrailHeadType" id="TrailHeadTypeId">
<SimpleField type="string" name="TrailHeadName">
<displayName><![CDATA[<b>Trail Head Name</b>]]></displayName>
</SimpleField>
<SimpleField type="double" name="TrailLength">
<displayName><![CDATA[<i>The length in miles</i>]]></displayName>
</SimpleField>
<SimpleField type="int" name="ElevationGain">
<displayName><![CDATA[<i>change in altitude</i>]]></displayName>
</SimpleField>
</Schema> """
s = kml.Schema(ns='', id='default')
s.from_string(doc)
self.assertEqual(len(list(s.simple_fields)), 3)
self.assertEqual(list(s.simple_fields)[0]['type'], 'string')
self.assertEqual(list(s.simple_fields)[1]['type'], 'double')
self.assertEqual(list(s.simple_fields)[2]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'TrailHeadName')
self.assertEqual(list(s.simple_fields)[1]['name'], 'TrailLength')
self.assertEqual(list(s.simple_fields)[2]['name'], 'ElevationGain')
self.assertEqual(list(s.simple_fields)[0][
'displayName'
], '<b>Trail Head Name</b>')
self.assertEqual(list(s.simple_fields)[1][
'displayName'
], '<i>The length in miles</i>')
self.assertEqual(list(s.simple_fields)[2][
'displayName'
], '<i>change in altitude</i>')
s1 = kml.Schema(ns='', id='default')
s1.from_string(s.to_string())
self.assertEqual(len(list(s1.simple_fields)), 3)
self.assertEqual(list(s1.simple_fields)[0]['type'], 'string')
self.assertEqual(list(s1.simple_fields)[1]['name'], 'TrailLength')
self.assertEqual(list(s1.simple_fields)[2][
'displayName'
], '<i>change in altitude</i>')
self.assertEqual(s.to_string(), s1.to_string())
doc1 = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
%s
</Document>
</kml>""" % doc
k = kml.KML()
k.from_string(doc1)
d = list(k.features())[0]
s2 = list(d.schemata())[0]
s.ns = config.NS
self.assertEqual(s.to_string(), s2.to_string())
k1 = kml.KML()
k1.from_string(k.to_string())
self.assertTrue('Schema' in k1.to_string())
self.assertTrue('SimpleField' in k1.to_string())
self.assertEqual(k1.to_string(), k.to_string())
def test_schema_data(self):
doc = """<SchemaData schemaUrl="#TrailHeadTypeId">
<SimpleData name="TrailHeadName">Pi in the sky</SimpleData>
<SimpleData name="TrailLength">3.14159</SimpleData>
<SimpleData name="ElevationGain">10</SimpleData>
</SchemaData>"""
sd = kml.SchemaData(ns='', schema_url='#default')
sd.from_string(doc)
self.assertEqual(sd.schema_url, '#TrailHeadTypeId')
self.assertEqual(
sd.data[0], {'name': 'TrailHeadName',
'value': 'Pi in the sky'})
self.assertEqual(
sd.data[1], {'name': 'TrailLength',
'value': '3.14159'})
self.assertEqual(sd.data[2], {'name': 'ElevationGain', 'value': '10'})
sd1 = kml.SchemaData(ns='', schema_url='#default')
sd1.from_string(sd.to_string())
self.assertEqual(sd1.schema_url, '#TrailHeadTypeId')
self.assertEqual(sd.to_string(), sd1.to_string())
def test_snippet(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<Snippet maxLines="2" >Short Desc</Snippet>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].snippet['text'], 'Short Desc')
self.assertEqual(list(k.features())[0].snippet['maxLines'], 2)
list(k.features())[0]._snippet['maxLines'] = 3
self.assertEqual(list(k.features())[0].snippet['maxLines'], 3)
self.assertTrue('maxLines="3"' in k.to_string())
list(k.features())[0].snippet = {'text': 'Annother Snippet'}
self.assertFalse('maxLines' in k.to_string())
self.assertTrue('Annother Snippet' in k.to_string())
list(k.features())[0].snippet = 'Diffrent Snippet'
self.assertFalse('maxLines' in k.to_string())
self.assertTrue('Diffrent Snippet' in k.to_string())
def test_from_wrong_string(self):
doc = kml.KML()
self.assertRaises(TypeError, doc.from_string, '<xml></xml>')
def test_address(self):
doc = kml.Document()
doc.from_string("""
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2" id="pm-id">
<kml:name>pm-name</kml:name>
<kml:description>pm-description</kml:description>
<kml:visibility>1</kml:visibility>
<kml:address>1600 Amphitheatre Parkway, Mountain View, CA 94043, USA</kml:address>
</kml:Document>
""")
doc2 = kml.Document()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_phone_number(self):
doc = kml.Document()
doc.from_string("""
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2" id="pm-id">
<kml:name>pm-name</kml:name>
<kml:description>pm-description</kml:description>
<kml:visibility>1</kml:visibility>
<kml:phoneNumber>+1 234 567 8901</kml:phoneNumber>
</kml:Document>
""")
doc2 = kml.Document()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_groundoverlay(self):
doc = kml.KML()
doc.from_string(
"""
<kml xmlns="http://www.opengis.net/kml/2.2">
<Folder>
<name>Ground Overlays</name>
<description>Examples of ground overlays</description>
<GroundOverlay>
<name>Large-scale overlay on terrain</name>
<description>Overlay shows Mount Etna erupting
on July 13th, 2001.</description>
<Icon>
<href>http://developers.google.com/kml/documentation/images/etna.jpg</href>
</Icon>
<LatLonBox>
<north>37.91904192681665</north>
<south>37.46543388598137</south>
<east>15.35832653742206</east>
<west>14.60128369746704</west>
<rotation>-0.1556640799496235</rotation>
</LatLonBox>
</GroundOverlay>
</Folder>
</kml>
""")
doc2 = kml.KML()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_linarring_placemark(self):
doc = kml.KML()
doc.from_string( """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<LinearRing>
<coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,0.0</coordinates>
</LinearRing>
</Placemark> </kml>""")
doc2 = kml.KML()
doc2.from_string(doc.to_string())
self.assertTrue(
isinstance(list(doc.features())[0].geometry, LinearRing))
self.assertEqual(doc.to_string(), doc2.to_string())
class StyleTestCase(unittest.TestCase):
def test_styleurl(self):
f = kml.Document()
f.styleUrl = '#somestyle'
self.assertEqual(f.styleUrl, '#somestyle')
self.assertTrue(isinstance(f._styleUrl, styles.StyleUrl))
s = styles.StyleUrl(config.NS, url='#otherstyle')
f.styleUrl = s
self.assertTrue(isinstance(f._styleUrl, styles.StyleUrl))
self.assertEqual(f.styleUrl, '#otherstyle')
f2 = kml.Document()
f2.from_string(f.to_string())
self.assertEqual(f.to_string(), f2.to_string())
def test_style(self):
lstyle = styles.LineStyle(color='red', width=2.0)
style = styles.Style(styles=[lstyle])
f = kml.Document(styles=[style])
f2 = kml.Document()
f2.from_string(f.to_string(prettyprint=True))
self.assertEqual(f.to_string(), f2.to_string())
def test_polystyle_fill(self):
style = styles.PolyStyle()
def test_polystyle_outline(self):
style = styles.PolyStyle()
class StyleUsageTestCase(unittest.TestCase):
def test_create_document_style(self):
style = styles.Style(styles=[styles.PolyStyle(color='7f000000')])
doc = kml.Document(styles=[style])
doc2 = kml.Document()
doc2.append_style(style)
expected = """
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:visibility>1</kml:visibility>
<kml:Style>
<kml:PolyStyle>
<kml:color>7f000000</kml:color>
<kml:fill>1</kml:fill>
<kml:outline>1</kml:outline>
</kml:PolyStyle>
</kml:Style>
</kml:Document>
"""
doc3 = kml.Document()
doc3.from_string(expected)
self.assertEqual(doc.to_string(), doc2.to_string())
self.assertEqual(doc2.to_string(), doc3.to_string())
self.assertEqual(doc.to_string(), doc3.to_string())
def test_create_placemark_style(self):
style = styles.Style(styles=[styles.PolyStyle(color='7f000000')])
place = kml.Placemark(styles=[style])
place2 = kml.Placemark()
place2.append_style(style)
expected = """
<kml:Placemark xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:visibility>1</kml:visibility>
<kml:Style>
<kml:PolyStyle>
<kml:color>7f000000</kml:color>
<kml:fill>1</kml:fill>
<kml:outline>1</kml:outline>
</kml:PolyStyle>
</kml:Style>
</kml:Placemark>
"""
place3 = kml.Placemark()
place3.from_string(expected)
self.assertEqual(place.to_string(), place2.to_string())
self.assertEqual(place2.to_string(), place3.to_string())
self.assertEqual(place.to_string(), place3.to_string())
class StyleFromStringTestCase(unittest.TestCase):
def test_styleurl(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<styleUrl>#default</styleUrl>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(list(k.features())[0].styleUrl, '#default')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_balloonstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<Style id="exampleBalloonStyle">
<BalloonStyle>
<!-- a background color for the balloon -->
<bgColor>ffffffbb</bgColor>
<!-- styling of the balloon text -->
<textColor>ff000000</textColor>
<text><![CDATA[
<b><font color="#CC0000" size="+3">$[name]</font></b>
<br/><br/>
<font face="Courier">$[description]</font>
<br/><br/>
Extra text that will appear in the description balloon
<br/><br/>
<!-- insert the to/from hyperlinks -->
$[geDirections]
]]></text>
<!-- kml:displayModeEnum -->
<displayMode>default</displayMode>
</BalloonStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.BalloonStyle))
self.assertEqual(style.bgColor, 'ffffffbb')
self.assertEqual(style.textColor, 'ff000000')
self.assertEqual(style.displayMode, 'default')
self.assertTrue('$[geDirections]' in style.text)
self.assertTrue('$[description]' in style.text)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k2.to_string(), k.to_string())
def test_balloonstyle_old_color(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<Style id="exampleBalloonStyle">
<BalloonStyle>
<!-- a background color for the balloon -->
<color>ffffffbb</color>
</BalloonStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.BalloonStyle))
self.assertEqual(style.bgColor, 'ffffffbb')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k2.to_string(), k.to_string())
def test_labelstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.LabelStyle))
self.assertEqual(style.color, 'ff0000cc')
self.assertEqual(style.colorMode, None)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_iconstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<Style id="randomColorIcon">
<IconStyle>
<color>ff00ff00</color>
<colorMode>random</colorMode>
<scale>1.1</scale>
<heading>0</heading>
<Icon>
<href>http://maps.google.com/icon21.png</href>
</Icon>
</IconStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list((k.features()))), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.IconStyle))
self.assertEqual(style.color, 'ff00ff00')
self.assertEqual(style.scale, 1.1)
self.assertEqual(style.colorMode, 'random')
self.assertEqual(style.heading, 0.0)
self.assertEqual(style.icon_href, 'http://maps.google.com/icon21.png')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_linestyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>LineStyle.kml</name>
<open>1</open>
<Style id="linestyleExample">
<LineStyle>
<color>7f0000ff</color>
<width>4</width>
</LineStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.LineStyle))
self.assertEqual(style.color, '7f0000ff')
self.assertEqual(style.width, 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<color>ff0000cc</color>
<colorMode>random</colorMode>
</PolyStyle>
</Style>
</Document>
</kml>"""
# XXX fill and outline
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.color, 'ff0000cc')
self.assertEqual(style.colorMode, 'random')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle_float_fill(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<fill>0.0</fill>
</PolyStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.fill, 0)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle_float_outline(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<outline>0.0</outline>
</PolyStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.outline, 0)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_styles(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<!-- Begin Style Definitions -->
<Style id="myDefaultStyles">
<IconStyle>
<color>a1ff00ff</color>
<scale>1.399999976158142</scale>
<Icon>
<href>http://myserver.com/icon.jpg</href>
</Icon>
</IconStyle>
<LabelStyle>
<color>7fffaaff</color>
<scale>1.5</scale>
</LabelStyle>
<LineStyle>
<color>ff0000ff</color>
<width>15</width>
</LineStyle>
<PolyStyle>
<color>7f7faaaa</color>
<colorMode>random</colorMode>
</PolyStyle>
</Style>
<!-- End Style Definitions -->
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())
self.assertEqual(len(style), 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_stylemapurl(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<styleUrl>#normalState</styleUrl>
</Pair>
<Pair>
<key>highlight</key>
<styleUrl>#highlightState</styleUrl>
</Pair>
</StyleMap>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(
list(list(k.features())[0].styles())[0], styles.StyleMap))
sm = list(list(list(k.features())[0].styles()))[0]
self.assertTrue(isinstance(sm.normal, styles.StyleUrl))
self.assertEqual(sm.normal.url, '#normalState')
self.assertTrue(isinstance(sm.highlight, styles.StyleUrl))
self.assertEqual(sm.highlight.url, '#highlightState')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_stylemapstyles(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
</Pair>
<Pair>
<key>highlight</key>
<Style id="examplePolyStyle">
<PolyStyle>
<color>ff0000cc</color>
<colorMode>random</colorMode>
</PolyStyle>
<LineStyle>
<color>ff0000ff</color>
<width>15</width>
</LineStyle>
</Style>
</Pair>
</StyleMap>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(
list(list(k.features())[0].styles())[0], styles.StyleMap))
sm = list(list(list(k.features())[0].styles()))[0]
self.assertTrue(isinstance(sm.normal, styles.Style))
self.assertEqual(len(list(sm.normal.styles())), 1)
self.assertTrue(
isinstance(list(sm.normal.styles())[0], styles.LabelStyle))
self.assertTrue(isinstance(sm.highlight, styles.Style))
self.assertTrue(isinstance(sm.highlight, styles.Style))
self.assertEqual(len(list(sm.highlight.styles())), 2)
self.assertTrue(
isinstance(list(sm.highlight.styles())[0], styles.LineStyle))
self.assertTrue(
isinstance(list(sm.highlight.styles())[1], styles.PolyStyle))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_get_style_by_url(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<styleUrl>#normalState</styleUrl>
</Pair>
<Pair>
<key>highlight</key>
<styleUrl>#highlightState</styleUrl>
</Pair>
</StyleMap>
<Style id="linestyleExample">
<LineStyle>
<color>7f0000ff</color>
<width>4</width>
</LineStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
document = list(k.features())[0]
style = document.get_style_by_url(
'http://localhost:8080/somepath#exampleStyleDocument')
self.assertTrue(isinstance(list(style.styles())[0], styles.LabelStyle))
style = document.get_style_by_url('somepath#linestyleExample')
self.assertTrue(isinstance(list(style.styles())[0], styles.LineStyle))
style = document.get_style_by_url('#styleMapExample')
self.assertTrue(isinstance(style, styles.StyleMap))
class DateTimeTestCase(unittest.TestCase):
def test_timestamp(self):
now = datetime.datetime.now()
ts = kml.TimeStamp(timestamp=now)
self.assertEqual(ts.timestamp, [now, 'dateTime'])
self.assertTrue('TimeStamp>' in str(ts.to_string()))
self.assertTrue('when>' in str(ts.to_string()))
self.assertTrue(now.isoformat() in str(ts.to_string()))
y2k = datetime.date(2000, 1, 1)
ts = kml.TimeStamp(timestamp=y2k)
self.assertEqual(ts.timestamp, [y2k, 'date'])
self.assertTrue('2000-01-01' in str(ts.to_string()))
def test_timestamp_resolution(self):
now = datetime.datetime.now()
ts = kml.TimeStamp(timestamp=now)
self.assertTrue(now.isoformat() in str(ts.to_string()))
ts.timestamp[1] = 'date'
self.assertTrue(now.date().isoformat() in str(ts.to_string()))
self.assertFalse(now.isoformat() in str(ts.to_string()))
year = str(now.year)
ym = now.strftime('%Y-%m')
ts.timestamp[1] = 'gYearMonth'
self.assertTrue(ym in str(ts.to_string()))
self.assertFalse(now.date().isoformat() in str(ts.to_string()))
ts.timestamp[1] = 'gYear'
self.assertTrue(year in str(ts.to_string()))
self.assertFalse(ym in str(ts.to_string()))
ts.timestamp = None
self.assertRaises(TypeError, ts.to_string)
def test_timespan(self):
now = datetime.datetime.now()
y2k = datetime.datetime(2000, 1, 1)
ts = kml.TimeSpan(end=now, begin=y2k)
self.assertEqual(ts.end, [now, 'dateTime'])
self.assertEqual(ts.begin, [y2k, 'dateTime'])
self.assertTrue('TimeSpan>' in str(ts.to_string()))
self.assertTrue('begin>' in str(ts.to_string()))
self.assertTrue('end>' in str(ts.to_string()))
self.assertTrue(now.isoformat() in str(ts.to_string()))
self.assertTrue(y2k.isoformat() in str(ts.to_string()))
ts.end = None
self.assertFalse(now.isoformat() in str(ts.to_string()))
self.assertTrue(y2k.isoformat() in str(ts.to_string()))
ts.begin = None
self.assertRaises(ValueError, ts.to_string)
def test_feature_timestamp(self):
now = datetime.datetime.now()
f = kml.Document()
f.timeStamp = now
self.assertEqual(f.timeStamp, now)
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('TimeStamp>' in str(f.to_string()))
self.assertTrue('when>' in str(f.to_string()))
f.timeStamp = now.date()
self.assertTrue(now.date().isoformat() in str(f.to_string()))
self.assertFalse(now.isoformat() in str(f.to_string()))
f.timeStamp = None
self.assertFalse('TimeStamp>' in str(f.to_string()))
def test_feature_timespan(self):
now = datetime.datetime.now()
y2k = datetime.date(2000, 1, 1)
f = kml.Document()
f.begin = y2k
f.end = now
self.assertEqual(f.begin, y2k)
self.assertEqual(f.end, now)
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
f.end = None
self.assertFalse(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertFalse('end>' in str(f.to_string()))
f.begin = None
self.assertFalse('TimeSpan>' in str(f.to_string()))
def test_feature_timespan_stamp(self):
now = datetime.datetime.now()
y2k = datetime.date(2000, 1, 1)
f = kml.Document()
f.begin = y2k
f.end = now
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
self.assertFalse('TimeStamp>' in str(f.to_string()))
self.assertFalse('when>' in str(f.to_string()))
# when we set a timestamp an existing timespan will be deleted
f.timeStamp = now
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('TimeStamp>' in str(f.to_string()))
self.assertTrue('when>' in str(f.to_string()))
self.assertFalse('2000-01-01' in str(f.to_string()))
self.assertFalse('TimeSpan>' in str(f.to_string()))
self.assertFalse('begin>' in str(f.to_string()))
self.assertFalse('end>' in str(f.to_string()))
# when we set a timespan an existing timestamp will be deleted
f.end = y2k
self.assertFalse(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertFalse('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
self.assertFalse('TimeStamp>' in str(f.to_string()))
self.assertFalse('when>' in str(f.to_string()))
# We manipulate our Feature so it has timespan and stamp
ts = kml.TimeStamp(timestamp=now)
f._time_stamp = ts
# this raises an exception as only either timespan or timestamp
# are allowed not both
self.assertRaises(ValueError, f.to_string)
def test_read_timestamp(self):
ts = kml.TimeStamp(ns='')
doc = """
<TimeStamp>
<when>1997</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYear')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 1, 1, 0, 0))
doc = """
<TimeStamp>
<when>1997-07</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYearMonth')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 7, 1, 0, 0))
doc = """
<TimeStamp>
<when>199808</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYearMonth')
self.assertEqual(ts.timestamp[0], datetime.datetime(1998, 8, 1, 0, 0))
doc = """
<TimeStamp>
<when>1997-07-16</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'date')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 7, 16, 0, 0))
# dateTime (YYYY-MM-DDThh:mm:ssZ)
# Here, T is the separator between the calendar and the hourly notation
# of time, and Z indicates UTC. (Seconds are required.)
doc = """
<TimeStamp>
<when>1997-07-16T07:30:15Z</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'dateTime')
self.assertEqual(ts.timestamp[0], datetime.datetime(
1997, 7, 16, 7, 30, 15,
tzinfo=tzutc()))
doc = """
<TimeStamp>
<when>1997-07-16T10:30:15+03:00</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'dateTime')
self.assertEqual(ts.timestamp[0], datetime.datetime(
1997, 7, 16, 10, 30, 15,
tzinfo=tzoffset(None, 10800)))
def test_read_timespan(self):
ts = kml.TimeSpan(ns='')
doc = """
<TimeSpan>
<begin>1876-08-01</begin>
<end>1997-07-16T07:30:15Z</end>
</TimeSpan>
"""
ts.from_string(doc)
self.assertEqual(ts.begin[1], 'date')
self.assertEqual(ts.begin[0], datetime.datetime(1876, 8, 1, 0, 0))
self.assertEqual(ts.end[1], 'dateTime')
self.assertEqual(ts.end[0], datetime.datetime(
1997, 7, 16, 7, 30, 15,
tzinfo=tzutc()))
def test_featurefromstring(self):
d = kml.Document(ns='')
doc = """<Document>
<name>Document.kml</name>
<open>1</open>
<TimeStamp>
<when>1997-07-16T10:30:15+03:00</when>
</TimeStamp>
<TimeSpan>
<begin>1876-08-01</begin>
<end>1997-07-16T07:30:15Z</end>
</TimeSpan>
</Document>"""
d.from_string(doc)
class AtomTestCase(unittest.TestCase):
def test_author(self):
a = atom.Author(name="Christian Ledermann")
self.assertEqual(a.name, "Christian Ledermann")
a.uri = 'http://iwlearn.net'
a.email = 'christian@gmail.com'
self.assertTrue("Christian Ledermann" in str(a.to_string()))
self.assertTrue('http://iwlearn.net' in str(a.to_string()))
self.assertTrue('christian@gmail.com' in str(a.to_string()))
self.assertTrue('name>' in str(a.to_string()))
self.assertTrue('uri>' in str(a.to_string()))
self.assertTrue('email>' in str(a.to_string()))
# print (a.to_string())
a.email = 'christian'
self.assertFalse('email>' in str(a.to_string()))
a2 = atom.Author()
a2.from_string(a.to_string())
self.assertEqual(a.to_string(), a2.to_string())
def test_link(self):
l = atom.Link(href="http://localhost/", rel="alternate")
self.assertEqual(l.href, "http://localhost/")
self.assertEqual(l.rel, "alternate")
l.title = "Title"
l.type = "text/html"
l.hreflang = 'en'
l.length = "4096"
self.assertTrue('href="http://localhost/"' in str(l.to_string()))
self.assertTrue('rel="alternate"' in str(l.to_string()))
self.assertTrue('title="Title"' in str(l.to_string()))
self.assertTrue('hreflang="en"' in str(l.to_string()))
self.assertTrue('type="text/html"' in str(l.to_string()))
self.assertTrue('length="4096"' in str(l.to_string()))
self.assertTrue('link' in str(l.to_string()))
self.assertTrue('="http://www.w3.org/2005/Atom"' in str(l.to_string()))
l2 = atom.Link()
l2.from_string(l.to_string())
self.assertEqual(l.to_string(), l2.to_string())
l.href = None
self.assertRaises(ValueError, l.to_string)
class SetGeometryTestCase(unittest.TestCase):
def test_altitude_mode(self):
geom = Geometry()
geom.geometry = Point(0, 1)
self.assertEqual(geom.altitude_mode, None)
self.assertFalse('altitudeMode' in str(geom.to_string()))
geom.altitude_mode = 'unknown'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'clampToSeaFloor'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'relativeToSeaFloor'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'clampToGround'
self.assertFalse('altitudeMode' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertTrue(
'altitudeMode>relativeToGround</' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertTrue('altitudeMode>absolute</' in str(geom.to_string()))
def test_extrude(self):
geom = Geometry()
self.assertEqual(geom.extrude, False)
geom.geometry = Point(0, 1)
geom.extrude = False
self.assertFalse('extrude' in str(geom.to_string()))
geom.extrude = True
geom.altitude_mode = 'clampToGround'
self.assertFalse('extrude' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertTrue('extrude>1</' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertTrue('extrude>1</' in str(geom.to_string()))
def test_tesselate(self):
geom = Geometry()
self.assertEqual(geom.tessellate, False)
geom.geometry = LineString([(0, 0), (1, 1)])
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'clampToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.tessellate = True
geom.altitude_mode = None
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'clampToGround'
self.assertTrue('tessellate>1</' in str(geom.to_string()))
# for geometries != LineString tesselate is ignored
geom.geometry = Point(0, 1)
self.assertFalse('tessellate' in str(geom.to_string()))
geom.geometry = Polygon([(0, 0), (1, 0), (1, 1), (0, 0)])
self.assertFalse('tessellate' in str(geom.to_string()))
def test_point(self):
p = Point(0, 1)
g = Geometry(geometry=p)
self.assertEqual(g.geometry, p)
g = Geometry(geometry=p.__geo_interface__)
self.assertEqual(g.geometry.__geo_interface__, p.__geo_interface__)
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
def test_linestring(self):
l = LineString([(0, 0), (1, 1)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,1.000000</' in
str(g.to_string()))
g2 = Geometry()
g2.from_string(g.to_string())
self.assertEqual(g.to_string(), g2.to_string())
def test_linearring(self):
l = LinearRing([(0, 0), (1, 0), (1, 1), (0, 0)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
def test_polygon(self):
# without holes
l = Polygon([(0, 0), (1, 0), (1, 1), (0, 0)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertFalse('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
# with holes
p = Polygon(
[(-1, -1), (2, -1), (2, 2), (-1, -1)], [[(0, 0), (1, 0), (1, 1),
(0, 0)]], )
g = Geometry(geometry=p)
self.assertEqual(g.geometry, p)
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertTrue('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</'
in str(g.to_string()))
def test_multipoint(self):
p0 = Point(0, 1)
p1 = Point(1, 1)
g = Geometry(geometry=MultiPoint([p0, p1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
self.assertTrue(
'coordinates>1.000000,1.000000</' in str(g.to_string()))
def test_multilinestring(self):
l0 = LineString([(0, 0), (1, 0)])
l1 = LineString([(0, 1), (1, 1)])
g = Geometry(geometry=MultiLineString([l0, l1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000</' in
str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000 1.000000,1.000000</' in
str(g.to_string()))
def test_multipolygon(self):
# with holes
p0 = Polygon(
[(-1, -1), (2, -1), (2, 2), (-1, -1)], [[(0, 0), (1, 0), (1, 1),
(0, 0)]])
# without holes
p1 = Polygon([(3, 0), (4, 0), (4, 1), (3, 0)])
g = Geometry(geometry=MultiPolygon([p0, p1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertTrue('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</'
in str(g.to_string()))
def test_geometrycollection(self):
po = Polygon([(3, 0), (4, 0), (4, 1), (3, 0)])
lr = LinearRing([(0, -1), (1, -1), (1, 1), (0, -1)])
ls = LineString([(0, 0), (1, 1)])
p = Point(0, 1)
# geo_if = {'type': 'GeometryCollection', 'geometries': [
# po.__geo_interface__, p.__geo_interface__,
# ls.__geo_interface__, lr.__geo_interface__]}
g = Geometry(geometry=GeometryCollection([po, p, ls, lr]))
# g1 = Geometry(geometry=as_shape(geo_if))
# self.assertEqual(g1.__geo_interface__, g.__geo_interface__)
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertFalse('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</'
in str(g.to_string()))
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,1.000000</' in
str(g.to_string()))
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
class GetGeometryTestCase(unittest.TestCase):
def test_altitude_mode(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:altitudeMode>clampToGround</kml:altitudeMode>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.altitude_mode, None)
g.from_string(doc)
self.assertEqual(g.altitude_mode, 'clampToGround')
def test_extrude(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:extrude>1</kml:extrude>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.extrude, False)
g.from_string(doc)
self.assertEqual(g.extrude, True)
def test_tesselate(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:tessellate>1</kml:tessellate>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.tessellate, False)
g.from_string(doc)
self.assertEqual(g.tessellate, True)
def test_point(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__,
{'type': 'Point',
'coordinates': (0.0, 1.0)})
def test_linestring(self):
doc = """<kml:LineString xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,0.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__,
{'type': 'LineString',
'coordinates': ((0.0, 0.0), (1.0, 1.0))})
def test_linearring(self):
doc = """<kml:LinearRing xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'LinearRing',
'coordinates': ((0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0))
})
def test_polygon(self):
doc = """<kml:Polygon xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'Polygon',
'coordinates': ((
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0)
), )
})
doc = """<kml:Polygon xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
<kml:innerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:innerBoundaryIs>
</kml:Polygon>
"""
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'Polygon',
'coordinates': (
((-1.0, -1.0), (2.0, -1.0), (2.0, 2.0),
(-1.0, -1.0)), ((0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(0.0, 0.0)),
)
})
def test_multipoint(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Point>
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>
<kml:Point>
<kml:coordinates>1.000000,1.000000</kml:coordinates>
</kml:Point>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_multilinestring(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:LineString>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000</kml:coordinates>
</kml:LineString>
<kml:LineString>
<kml:coordinates>0.000000,1.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_multipolygon(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
<kml:innerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:innerBoundaryIs>
</kml:Polygon>
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_geometrycollection(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>3,0 4,0 4,1 3,0</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
<kml:Point>
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>
<kml:LineString>
<kml:coordinates>0.000000,0.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>
<kml:LinearRing>
<kml:coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,1.0 0.0,0.0</kml:coordinates>
</kml:LinearRing>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 4)
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:LinearRing>
<kml:coordinates>3.0,0.0 4.0,0.0 4.0,1.0 3.0,0.0</kml:coordinates>
</kml:LinearRing>
<kml:LinearRing>
<kml:coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,0.0</kml:coordinates>
</kml:LinearRing>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
self.assertEqual(g.geometry.geom_type, 'GeometryCollection')
class Force3DTestCase(unittest.TestCase):
def setUp(self):
config.FORCE3D = False
def tearDown(self):
# Important: Set FORCE3D back to False!
config.FORCE3D = False
def test3d(self):
config.FORCE3D = True
ns = ''
p2 = kml.Placemark(ns, 'id', 'name', 'description')
p2.geometry = Polygon([(0, 0), (1, 1), (1, 0)])
p3 = kml.Placemark(ns, 'id', 'name', 'description')
p3.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 0)])
self.assertEqual(p2.to_string(), p3.to_string())
def testno3d(self):
config.FORCE3D = False
ns = ''
p2 = kml.Placemark(ns, 'id', 'name', 'description')
p2.geometry = Polygon([(0, 0), (1, 1), (1, 0)])
p3 = kml.Placemark(ns, 'id', 'name', 'description')
p3.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 0)])
self.assertNotEqual(p2.to_string(), p3.to_string())
class BaseFeatureTestCase(unittest.TestCase):
def test_address_string(self):
f = kml._Feature()
address = '1600 Amphitheatre Parkway, Mountain View, CA 94043, USA'
f.address = address
self.assertEqual(f.address, address)
def test_address_none(self):
f = kml._Feature()
f.address = None
self.assertEqual(f.address, None)
def test_address_value_error(self):
f = kml._Feature()
with self.assertRaises(ValueError):
f.address = 123
def test_phone_number_string(self):
f = kml._Feature()
f.phoneNumber = '+1-234-567-8901'
self.assertEqual(f.phoneNumber, '+1-234-567-8901')
def test_phone_number_none(self):
f = kml._Feature()
f.phoneNumber = None
self.assertEqual(f.phoneNumber, None)
def test_phone_number_value_error(self):
f = kml._Feature()
with self.assertRaises(ValueError):
f.phoneNumber = 123
class BaseOverlayTestCase(unittest.TestCase):
def test_color_string(self):
o = kml._Overlay(name='An Overlay')
o.color = '00010203'
self.assertEqual(o.color, '00010203')
def test_color_none(self):
o = kml._Overlay(name='An Overlay')
o.color = '00010203'
self.assertEqual(o.color, '00010203')
o.color = None
self.assertEqual(o.color, None)
def test_color_value_error(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.color = object()
def test_draw_order_string(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = '1'
self.assertEqual(o.drawOrder, '1')
def test_draw_order_int(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = 1
self.assertEqual(o.drawOrder, '1')
def test_draw_order_none(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = '1'
self.assertEqual(o.drawOrder, '1')
o.drawOrder = None
self.assertEqual(o.drawOrder, None)
def test_draw_order_value_error(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.drawOrder = object()
def test_icon_without_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = 'http://example.com/'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_open_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_close_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = 'http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_to_none(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
o.icon = None
self.assertEqual(o.icon, None)
def test_icon_raise_exception(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.icon = 12345
class GroundOverlayTestCase(unittest.TestCase):
def setUp(self):
self.g = kml.GroundOverlay()
def test_altitude_int(self):
self.g.altitude = 123
self.assertEqual(self.g.altitude, '123')
def test_altitude_float(self):
self.g.altitude = 123.4
self.assertEqual(self.g.altitude, '123.4')
def test_altitude_string(self):
self.g.altitude = '123'
self.assertEqual(self.g.altitude, '123')
def test_altitude_value_error(self):
with self.assertRaises(ValueError):
self.g.altitude = object()
def test_altitude_none(self):
self.g.altitude = '123'
self.assertEqual(self.g.altitude, '123')
self.g.altitude = None
self.assertEqual(self.g.altitude, None)
def test_altitude_mode_default(self):
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_error(self):
self.g.altitudeMode = ''
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_clamp(self):
self.g.altitudeMode = 'clampToGround'
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_absolute(self):
self.g.altitudeMode = 'absolute'
self.assertEqual(self.g.altitudeMode, 'absolute')
def test_latlonbox_function(self):
self.g.latLonBox(10, 20, 30, 40, 50)
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_string(self):
self.g.north = '10'
self.g.south = '20'
self.g.east = '30'
self.g.west = '40'
self.g.rotation = '50'
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_int(self):
self.g.north = 10
self.g.south = 20
self.g.east = 30
self.g.west = 40
self.g.rotation = 50
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_float(self):
self.g.north = 10.0
self.g.south = 20.0
self.g.east = 30.0
self.g.west = 40.0
self.g.rotation = 50.0
self.assertEqual(self.g.north, '10.0')
self.assertEqual(self.g.south, '20.0')
self.assertEqual(self.g.east, '30.0')
self.assertEqual(self.g.west, '40.0')
self.assertEqual(self.g.rotation, '50.0')
def test_latlonbox_value_error(self):
with self.assertRaises(ValueError):
self.g.north = object()
with self.assertRaises(ValueError):
self.g.south = object()
with self.assertRaises(ValueError):
self.g.east = object()
with self.assertRaises(ValueError):
self.g.west = object()
with self.assertRaises(ValueError):
self.g.rotation = object()
self.assertEqual(self.g.north, None)
self.assertEqual(self.g.south, None)
self.assertEqual(self.g.east, None)
self.assertEqual(self.g.west, None)
self.assertEqual(self.g.rotation, None)
def test_latlonbox_empty_string(self):
self.g.north = ''
self.g.south = ''
self.g.east = ''
self.g.west = ''
self.g.rotation = ''
self.assertEqual(self.g.north, '')
self.assertEqual(self.g.south, '')
self.assertEqual(self.g.east, '')
self.assertEqual(self.g.west, '')
self.assertEqual(self.g.rotation, '')
def test_latlonbox_none(self):
self.g.north = None
self.g.south = None
self.g.east = None
self.g.west = None
self.g.rotation = None
self.assertEqual(self.g.north, None)
self.assertEqual(self.g.south, None)
self.assertEqual(self.g.east, None)
self.assertEqual(self.g.west, None)
self.assertEqual(self.g.rotation, None)
class GroundOverlayStringTestCase(unittest.TestCase):
def test_default_to_string(self):
g = kml.GroundOverlay()
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_to_string(self):
g = kml.GroundOverlay()
g.icon = 'http://example.com'
g.drawOrder = 1
g.color = '00010203'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:color>00010203</kml:color>'
'<kml:drawOrder>1</kml:drawOrder>'
'<kml:icon><href>http://example.com</href></kml:icon>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_int(self):
g = kml.GroundOverlay()
g.altitude = 123
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_float(self):
g = kml.GroundOverlay()
g.altitude = 123.4
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_string(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_absolute(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 'absolute'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>absolute</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_unknown_string(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 'unknown string'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_value(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 1234
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_no_rotation(self):
g = kml.GroundOverlay()
g.latLonBox(10, 20, 30, 40)
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>0</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_rotation(self):
g = kml.GroundOverlay()
g.latLonBox(10, 20, 30, 40, 50)
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>50</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_nswer(self):
g = kml.GroundOverlay()
g.north = 10
g.south = 20
g.east = 30
g.west = 40
g.rotation = 50
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>50</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BaseClassesTestCase))
suite.addTest(unittest.makeSuite(BuildKmlTestCase))
suite.addTest(unittest.makeSuite(KmlFromStringTestCase))
suite.addTest(unittest.makeSuite(StyleTestCase))
suite.addTest(unittest.makeSuite(StyleFromStringTestCase))
suite.addTest(unittest.makeSuite(DateTimeTestCase))
suite.addTest(unittest.makeSuite(AtomTestCase))
suite.addTest(unittest.makeSuite(SetGeometryTestCase))
suite.addTest(unittest.makeSuite(GetGeometryTestCase))
suite.addTest(unittest.makeSuite(Force3DTestCase))
suite.addTest(unittest.makeSuite(BaseOverlayTestCase))
suite.addTest(unittest.makeSuite(GroundOverlayTestCase))
return suite
if __name__ == '__main__':
unittest.main()
| 58.187669 | 53,811 | 0.602939 |
try:
import unittest2 as unittest
except:
import unittest
from fastkml import kml
from fastkml import styles
from fastkml import base
from fastkml import atom
from fastkml import config
from fastkml import gx
import datetime
from dateutil.tz import tzutc, tzoffset
from fastkml.config import etree
from fastkml.geometry import Point, LineString, Polygon
from fastkml.geometry import MultiPoint, MultiLineString, MultiPolygon
from fastkml.geometry import LinearRing, GeometryCollection
from fastkml.geometry import Geometry
class BaseClassesTestCase(unittest.TestCase):
def test_base_object(self):
bo = base._BaseObject(id='id0')
self.assertEqual(bo.id, 'id0')
self.assertEqual(bo.ns, config.NS)
self.assertEqual(bo.targetId, None)
self.assertEqual(bo.__name__, None)
bo.targetId = 'target'
self.assertEqual(bo.targetId, 'target')
bo.ns = ''
bo.id = None
self.assertEqual(bo.id, None)
self.assertEqual(bo.ns, '')
self.assertRaises(NotImplementedError, bo.etree_element)
element = etree.Element(config.NS + 'Base')
self.assertRaises(TypeError, bo.from_element)
self.assertRaises(TypeError, bo.from_element, element)
bo.__name__ = 'NotABaseObject'
self.assertRaises(TypeError, bo.from_element, element)
bo.__name__ = 'Base'
bo.ns = config.NS
bo.from_element(element)
self.assertEqual(bo.id, None)
self.assertEqual(bo.ns, config.NS)
self.assertFalse(bo.etree_element(), None)
self.assertTrue(len(bo.to_string()) > 1)
def test_feature(self):
f = kml._Feature(name='A Feature')
self.assertRaises(NotImplementedError, f.etree_element)
self.assertEqual(f.name, 'A Feature')
self.assertEqual(f.visibility, 1)
self.assertEqual(f.isopen, 0)
self.assertEqual(f._atom_author, None)
self.assertEqual(f._atom_link, None)
self.assertEqual(f.address, None)
self.assertEqual(f._snippet, None)
self.assertEqual(f.description, None)
self.assertEqual(f._styleUrl, None)
self.assertEqual(f._styles, [])
self.assertEqual(f._time_span, None)
self.assertEqual(f._time_stamp, None)
f.__name__ = 'Feature'
f.styleUrl = '#default'
self.assertTrue('Feature>' in str(f.to_string()))
self.assertTrue('#default' in str(f.to_string()))
def test_container(self):
f = kml._Container(name='A Container')
p = kml.Placemark()
f.append(p)
self.assertRaises(NotImplementedError, f.etree_element)
def test_overlay(self):
o = kml._Overlay(name='An Overlay')
self.assertEqual(o._color, None)
self.assertEqual(o._drawOrder, None)
self.assertEqual(o._icon, None)
self.assertRaises(NotImplementedError, o.etree_element)
def test_atom_link(self):
ns = '{http://www.opengis.net/kml/2.2}'
l = atom.Link(ns=ns)
self.assertEqual(l.ns, ns)
def test_atom_person(self):
ns = '{http://www.opengis.net/kml/2.2}'
p = atom._Person(ns=ns)
self.assertEqual(p.ns, ns)
class BuildKmlTestCase(unittest.TestCase):
def test_kml(self):
k = kml.KML()
self.assertEqual(len(list(k.features())), 0)
if config.LXML:
self.assertEqual(
str(k.to_string())[:43],
'<kml xmlns="http://www.opengis.net/kml/2.2"/>' [:43])
else:
if hasattr(etree, 'register_namespace'):
self.assertEqual(str(k.to_string())[:51], '<kml:kml xmlns:kml="http://www.opengis.net/kml/2.2" />'[:51])
else:
self.assertEqual(str(k.to_string())[:51], '<ns0:kml xmlns:ns0="http://www.opengis.net/kml/2.2" />'[:51])
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_folder(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML()
f = kml.Folder(ns, 'id', 'name', 'description')
nf = kml.Folder(ns, 'nested-id', 'nested-name', 'nested-description')
f.append(nf)
k.append(f)
f2 = kml.Folder(ns, 'id2', 'name2', 'description2')
k.append(f2)
self.assertEqual(len(list(k.features())), 2)
self.assertEqual(len(list(list(k.features())[0].features())), 1)
k2 = kml.KML()
s = k.to_string()
k2.from_string(s)
self.assertEqual(s, k2.to_string())
def test_placemark(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Point(0.0, 0.0, 0.0)
p2 = kml.Placemark(ns, 'id2', 'name2', 'description2')
p2.geometry = LineString([(0, 0, 0), (1, 1, 1)])
k.append(p)
k.append(p2)
self.assertEqual(len(list(k.features())), 2)
k2 = kml.KML()
k2.from_string(k.to_string(prettyprint=True))
self.assertEqual(k.to_string(), k2.to_string())
def test_schema(self):
ns = '{http://www.opengis.net/kml/2.2}'
self.assertRaises(ValueError, kml.Schema, ns)
s = kml.Schema(ns, 'some_id')
self.assertEqual(len(list(s.simple_fields)), 0)
s.append('int', 'Integer', 'An Integer')
self.assertEqual(list(s.simple_fields)[0]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[0]['displayName'], 'An Integer')
s.simple_fields = None
self.assertEqual(len(list(s.simple_fields)), 0)
self.assertRaises(
TypeError, s.append, ('none', 'Integer', 'An Integer'))
self.assertRaises(
TypeError, s.simple_fields, [('none', 'Integer', 'An Integer')])
self.assertRaises(
TypeError, s.simple_fields, ('int', 'Integer', 'An Integer'))
fields = {
'type': 'int',
'name': 'Integer',
'displayName': 'An Integer'
}
s.simple_fields = fields
self.assertEqual(list(s.simple_fields)[0]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[0]['displayName'], 'An Integer')
s.simple_fields = [['float', 'Float'], fields]
self.assertEqual(list(s.simple_fields)[0]['type'], 'float')
self.assertEqual(list(s.simple_fields)[0]['name'], 'Float')
self.assertEqual(list(s.simple_fields)[0]['displayName'], None)
self.assertEqual(list(s.simple_fields)[1]['type'], 'int')
self.assertEqual(list(s.simple_fields)[1]['name'], 'Integer')
self.assertEqual(list(s.simple_fields)[1]['displayName'], 'An Integer')
def test_schema_data(self):
ns = '{http://www.opengis.net/kml/2.2}'
self.assertRaises(ValueError, kml.SchemaData, ns)
self.assertRaises(ValueError, kml.SchemaData, ns, '')
sd = kml.SchemaData(ns, '#default')
sd.append_data('text', 'Some Text')
self.assertEqual(len(sd.data), 1)
sd.append_data(value=1, name='Integer')
self.assertEqual(len(sd.data), 2)
self.assertEqual(sd.data[0], {'value': 'Some Text', 'name': 'text'})
self.assertEqual(sd.data[1], {'value': 1, 'name': 'Integer'})
data = (('text', 'Some new Text'), {'value': 2, 'name': 'Integer'})
sd.data = data
self.assertEqual(len(sd.data), 2)
self.assertEqual(
sd.data[0], {'value': 'Some new Text',
'name': 'text'})
self.assertEqual(sd.data[1], {'value': 2, 'name': 'Integer'})
def test_untyped_extended_data(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Point(0.0, 0.0, 0.0)
p.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(
name='info',
value='so much to see'), kml.UntypedExtendedDataElement(
name='weather',
display_name='Weather',
value='blue skies')
])
self.assertEqual(len(p.extended_data.elements), 2)
k.append(p)
k2 = kml.KML()
k2.from_string(k.to_string(prettyprint=True))
k.to_string()
extended_data = list(k2.features())[0].extended_data
self.assertTrue(extended_data is not None)
self.assertTrue(len(extended_data.elements), 2)
self.assertEqual(extended_data.elements[0].name, 'info')
self.assertEqual(extended_data.elements[0].value, 'so much to see')
self.assertEqual(extended_data.elements[0].display_name, None)
self.assertEqual(extended_data.elements[1].name, 'weather')
self.assertEqual(extended_data.elements[1].value, 'blue skies')
self.assertEqual(extended_data.elements[1].display_name, 'Weather')
def test_untyped_extended_data_nested(self):
ns = '{http://www.opengis.net/kml/2.2}'
k = kml.KML(ns=ns)
d = kml.Document(ns, 'docid', 'doc name', 'doc description')
d.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(name='type',
value='Document')
])
f = kml.Folder(ns, 'fid', 'f name', 'f description')
f.extended_data = kml.UntypedExtendedData(elements=[
kml.UntypedExtendedDataElement(name='type',
value='Folder')
])
k.append(d)
d.append(f)
k2 = kml.KML()
k2.from_string(k.to_string())
document_data = list(k2.features())[0].extended_data
folder_data = list(list(k2.features())[0].features())[0].extended_data
self.assertEqual(document_data.elements[0].name, 'type')
self.assertEqual(document_data.elements[0].value, 'Document')
self.assertEqual(folder_data.elements[0].name, 'type')
self.assertEqual(folder_data.elements[0].value, 'Folder')
def test_document(self):
k = kml.KML()
ns = '{http://www.opengis.net/kml/2.2}'
d = kml.Document(ns, 'docid', 'doc name', 'doc description')
f = kml.Folder(ns, 'fid', 'f name', 'f description')
k.append(d)
d.append(f)
nf = kml.Folder(
ns, 'nested-fid', 'nested f name', 'nested f description')
f.append(nf)
f2 = kml.Folder(ns, 'id2', 'name2', 'description2')
d.append(f2)
p = kml.Placemark(ns, 'id', 'name', 'description')
p.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 1)])
p2 = kml.Placemark(ns, 'id2', 'name2', 'description2')
f2.append(p)
nf.append(p2)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list((list(k.features())[0].features()))), 2)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_author(self):
d = kml.Document()
d.author = 'Christian Ledermann'
self.assertTrue('Christian Ledermann' in str(d.to_string()))
a = atom.Author(
name='Nobody',
uri='http://localhost',
email='cl@donotreply.com')
d.author = a
self.assertEqual(d.author, 'Nobody')
self.assertFalse('Christian Ledermann' in str(d.to_string()))
self.assertTrue('Nobody' in str(d.to_string()))
self.assertTrue('http://localhost' in str(d.to_string()))
self.assertTrue('cl@donotreply.com' in str(d.to_string()))
d2 = kml.Document()
d2.from_string(d.to_string())
self.assertEqual(d.to_string(), d2.to_string())
d.author = None
def test_link(self):
d = kml.Document()
d.link = 'http://localhost'
self.assertTrue('http://localhost' in str(d.to_string()))
l = atom.Link(href='#here')
d.link = l
self.assertTrue('#here' in str(d.to_string()))
self.assertRaises(TypeError, d.link, object)
d2 = kml.Document()
d2.from_string(d.to_string())
self.assertEqual(d.to_string(), d2.to_string())
d.link = None
def test_address(self):
address = '1600 Amphitheatre Parkway, Mountain View, CA 94043, USA'
d = kml.Document()
d.address = address
self.assertTrue(address in str(d.to_string()))
self.assertTrue('address>' in str(d.to_string()))
def test_phone_number(self):
phone = '+1 234 567 8901'
d = kml.Document()
d.phoneNumber = phone
self.assertTrue(phone in str(d.to_string()))
self.assertTrue('phoneNumber>' in str(d.to_string()))
class KmlFromStringTestCase(unittest.TestCase):
def test_document(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
<Placemark>
<name>Document Feature 1</name>
<styleUrl>#exampleStyleDocument</styleUrl>
<Point>
<coordinates>-122.371,37.816,0</coordinates>
</Point>
</Placemark>
<Placemark targetId="someTargetId">
<name>Document Feature 2</name>
<styleUrl>#exampleStyleDocument</styleUrl>
<Point>
<coordinates>-122.370,37.817,0</coordinates>
</Point>
</Placemark>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list(list(k.features())[0].features())), 2)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_document_booleans(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<visibility>true</visibility>
<open>1</open>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].visibility, 1)
self.assertEqual(list(k.features())[0].isopen, 1)
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document targetId="someTargetId">
<name>Document.kml</name>
<visibility>0</visibility>
<open>false</open>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].visibility, 0)
self.assertEqual(list(k.features())[0].isopen, 0)
def test_folders(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Folder>
<name>Folder.kml</name>
<open>1</open>
<description>
A folder is a container that can hold multiple other objects
</description>
<Placemark>
<name>Folder object 1 (Placemark)</name>
<Point>
<coordinates>-122.377588,37.830266,0</coordinates>
</Point>
</Placemark>
<Placemark>
<name>Folder object 2 (Polygon)</name>
<Polygon>
<outerBoundaryIs>
<LinearRing>
<coordinates>
-122.377830,37.830445,0
-122.377576,37.830631,0
-122.377840,37.830642,0
-122.377830,37.830445,0
</coordinates>
</LinearRing>
</outerBoundaryIs>
</Polygon>
</Placemark>
<Placemark>
<name>Folder object 3 (Path)</name>
<LineString>
<tessellate>1</tessellate>
<coordinates>
-122.378009,37.830128,0 -122.377885,37.830379,0
</coordinates>
</LineString>
</Placemark>
</Folder>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(len(list(list(k.features())[0].features())), 3)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_placemark(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Simple placemark</name>
<description>Attached to the ground. Intelligently places itself
at the height of the underlying terrain.</description>
<Point>
<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>
</Point>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(list(k.features())[0].name, "Simple placemark")
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_extended_data(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Simple placemark</name>
<description></description>
<Point>
<coordinates>-122.0822035425683,37.42228990140251,0</coordinates>
</Point>
<ExtendedData>
<Data name="holeNumber">
<displayName><![CDATA[
<b>This is hole </b>
]]></displayName>
<value>1</value>
</Data>
<Data name="holePar">
<displayName><![CDATA[
<i>The par for this hole is </i>
]]></displayName>
<value>4</value>
</Data>
<SchemaData schemaUrl="#TrailHeadTypeId">
<SimpleData name="TrailHeadName">Mount Everest</SimpleData>
<SimpleData name="TrailLength">347.45</SimpleData>
<SimpleData name="ElevationGain">10000</SimpleData>
</SchemaData>
</ExtendedData>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
extended_data = list(k.features())[0].extended_data
self.assertEqual(extended_data.elements[0].name, 'holeNumber')
self.assertEqual(extended_data.elements[0].value, '1')
self.assertTrue(
'<b>This is hole </b>' in extended_data.elements[0].display_name)
self.assertEqual(extended_data.elements[1].name, 'holePar')
self.assertEqual(extended_data.elements[1].value, '4')
self.assertTrue(
'<i>The par for this hole is </i>' in
extended_data.elements[1].display_name)
sd = extended_data.elements[2]
self.assertEqual(sd.data[0]['name'], 'TrailHeadName')
self.assertEqual(sd.data[1]['value'], '347.45')
def test_polygon(self):
doc = """
<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>South Africa</name>
<Polygon>
<outerBoundaryIs>
<LinearRing>
<coordinates>
31.521,-29.257,0
31.326,-29.402,0
30.902,-29.91,0
30.623,-30.424,0
30.056,-31.14,0
28.926,-32.172,0
28.22,-32.772,0
27.465,-33.227,0
26.419,-33.615,0
25.91,-33.667,0
25.781,-33.945,0
25.173,-33.797,0
24.678,-33.987,0
23.594,-33.794,0
22.988,-33.916,0
22.574,-33.864,0
21.543,-34.259,0
20.689,-34.417,0
20.071,-34.795,0
19.616,-34.819,0
19.193,-34.463,0
18.855,-34.444,0
18.425,-33.998,0
18.377,-34.137,0
18.244,-33.868,0
18.25,-33.281,0
17.925,-32.611,0
18.248,-32.429,0
18.222,-31.662,0
17.567,-30.726,0
17.064,-29.879,0
17.063,-29.876,0
16.345,-28.577,0
16.824,-28.082,0
17.219,-28.356,0
17.387,-28.784,0
17.836,-28.856,0
18.465,-29.045,0
19.002,-28.972,0
19.895,-28.461,0
19.896,-24.768,0
20.166,-24.918,0
20.759,-25.868,0
20.666,-26.477,0
20.89,-26.829,0
21.606,-26.727,0
22.106,-26.28,0
22.58,-25.979,0
22.824,-25.5,0
23.312,-25.269,0
23.734,-25.39,0
24.211,-25.67,0
25.025,-25.72,0
25.665,-25.487,0
25.766,-25.175,0
25.942,-24.696,0
26.486,-24.616,0
26.786,-24.241,0
27.119,-23.574,0
28.017,-22.828,0
29.432,-22.091,0
29.839,-22.102,0
30.323,-22.272,0
30.66,-22.152,0
31.191,-22.252,0
31.67,-23.659,0
31.931,-24.369,0
31.752,-25.484,0
31.838,-25.843,0
31.333,-25.66,0
31.044,-25.731,0
30.95,-26.023,0
30.677,-26.398,0
30.686,-26.744,0
31.283,-27.286,0
31.868,-27.178,0
32.072,-26.734,0
32.83,-26.742,0
32.58,-27.47,0
32.462,-28.301,0
32.203,-28.752,0
31.521,-29.257,0
</coordinates>
</LinearRing>
</outerBoundaryIs>
<innerBoundaryIs>
<LinearRing>
<coordinates>
28.978,-28.956,0
28.542,-28.648,0
28.074,-28.851,0
27.533,-29.243,0
26.999,-29.876,0
27.749,-30.645,0
28.107,-30.546,0
28.291,-30.226,0
28.848,-30.07,0
29.018,-29.744,0
29.325,-29.257,0
28.978,-28.956,0
</coordinates>
</LinearRing>
</innerBoundaryIs>
</Polygon>
</Placemark>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(isinstance(list(k.features())[0].geometry, Polygon))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multipoints(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark id="feat_2">
<name>MultiPoint</name>
<styleUrl>#stylesel_9</styleUrl>
<MultiGeometry id="geom_0">
<Point id="geom_5">
<coordinates>16,-35,0.0</coordinates>
</Point>
<Point id="geom_6">
<coordinates>16,-33,0.0</coordinates>
</Point>
<Point id="geom_7">
<coordinates>16,-31,0.0</coordinates>
</Point>
<Point id="geom_8">
<coordinates>16,-29,0.0</coordinates>
</Point>
<Point id="geom_9">
<coordinates>16,-27,0.0</coordinates>
</Point>
<Point id="geom_10">
<coordinates>16,-25,0.0</coordinates>
</Point>
<Point id="geom_11">
<coordinates>16,-23,0.0</coordinates>
</Point>
<Point id="geom_12">
<coordinates>16,-21,0.0</coordinates>
</Point>
<Point id="geom_15">
<coordinates>18,-35,0.0</coordinates>
</Point>
<Point id="geom_16">
<coordinates>18,-33,0.0</coordinates>
</Point>
<Point id="geom_17">
<coordinates>18,-31,0.0</coordinates>
</Point>
<Point id="geom_18">
<coordinates>18,-29,0.0</coordinates>
</Point>
</MultiGeometry>
</Placemark></kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(isinstance(list(k.features())[0].geometry, MultiPoint))
self.assertEqual(len(list(k.features())[0].geometry.geoms), 12)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multilinestrings(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Dnipro (Dnieper)</name>
<MultiGeometry>
<LineString><coordinates>33.54,46.831,0 33.606,46.869,0 33.662,46.957,0 33.739,47.05,0 33.859,47.149,0 33.976,47.307,0 33.998,47.411,0 34.155,47.49,0 34.448,47.542,0 34.712,47.553,0 34.946,47.521,0 35.088,47.528,0 35.138,47.573,0 35.149,47.657,0 35.106,47.842,0 </coordinates></LineString>
<LineString><coordinates>33.194,49.094,0 32.884,49.225,0 32.603,49.302,0 31.886,49.555,0 </coordinates></LineString>
<LineString><coordinates>31.44,50,0 31.48,49.933,0 31.486,49.871,0 31.467,49.754,0 </coordinates></LineString>
<LineString><coordinates>30.508,51.217,0 30.478,50.904,0 30.479,50.749,0 30.515,50.597,0 </coordinates></LineString>
</MultiGeometry>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(k.features())[0].geometry, MultiLineString))
self.assertEqual(len(list(k.features())[0].geometry.geoms), 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_multipolygon(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<name>Italy</name>
<MultiGeometry><Polygon><outerBoundaryIs><LinearRing><coordinates>12.621,35.492,0 12.611,35.489,0 12.603,35.491,0 12.598,35.494,0 12.594,35.494,0 12.556,35.508,0 12.536,35.513,0 12.526,35.517,0 12.534,35.522,0 12.556,35.521,0 12.567,35.519,0 12.613,35.515,0 12.621,35.513,0 12.624,35.512,0 12.622,35.51,0 12.621,35.508,0 12.624,35.502,0 12.621,35.492,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.873,35.852,0 12.857,35.852,0 12.851,35.856,0 12.846,35.863,0 12.847,35.868,0 12.854,35.871,0 12.86,35.872,0 12.867,35.872,0 12.874,35.866,0 12.877,35.856,0 12.873,35.852,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>11.981,36.827,0 11.988,36.824,0 11.994,36.825,0 12,36.836,0 12.038,36.806,0 12.052,36.79,0 12.054,36.767,0 12.031,36.741,0 11.997,36.745,0 11.962,36.765,0 11.938,36.789,0 11.934,36.795,0 11.926,36.812,0 11.923,36.828,0 11.935,36.836,0 11.939,36.837,0 11.947,36.841,0 11.952,36.843,0 11.958,36.84,0 11.968,36.831,0 11.972,36.829,0 11.981,36.827,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.322,37.94,0 12.337,37.933,0 12.355,37.927,0 12.369,37.925,0 12.358,37.914,0 12.343,37.913,0 12.327,37.918,0 12.315,37.925,0 12.3,37.919,0 12.288,37.921,0 12.279,37.929,0 12.274,37.939,0 12.288,37.938,0 12.298,37.941,0 12.306,37.945,0 12.315,37.946,0 12.322,37.94,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.078,37.96,0 12.079,37.95,0 12.065,37.951,0 12.048,37.961,0 12.037,37.974,0 12.03,37.984,0 12.036,37.991,0 12.054,37.992,0 12.065,37.986,0 12.072,37.968,0 12.078,37.96,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>15.643,38.262,0 15.635,38.261,0 15.625,38.261,0 15.584,38.24,0 15.57,38.227,0 15.564,38.214,0 15.56,38.2,0 15.576,38.2,0 15.527,38.137,0 15.501,38.085,0 15.393,37.976,0 15.303,37.864,0 15.284,37.833,0 15.267,37.812,0 15.242,37.795,0 15.214,37.761,0 15.207,37.747,0 15.209,37.737,0 15.219,37.718,0 15.221,37.706,0 15.217,37.696,0 15.203,37.685,0 15.2,37.675,0 15.197,37.655,0 15.185,37.626,0 15.179,37.604,0 15.164,37.567,0 15.117,37.522,0 15.097,37.494,0 15.092,37.477,0 15.09,37.459,0 15.093,37.36,0 15.097,37.343,0 15.104,37.33,0 15.111,37.322,0 15.181,37.291,0 15.218,37.285,0 15.237,37.275,0 15.253,37.257,0 15.262,37.234,0 15.245,37.246,0 15.236,37.242,0 15.229,37.23,0 15.221,37.22,0 15.222,37.237,0 15.216,37.244,0 15.206,37.244,0 15.193,37.24,0 15.2,37.227,0 15.184,37.207,0 15.195,37.176,0 15.217,37.155,0 15.234,37.165,0 15.248,37.158,0 15.248,37.152,0 15.23,37.149,0 15.232,37.135,0 15.247,37.118,0 15.265,37.11,0 15.289,37.108,0 15.304,37.101,0 15.309,37.086,0 15.303,37.062,0 15.289,37.069,0 15.283,37.061,0 15.284,37.048,0 15.292,37.042,0 15.313,37.044,0 15.322,37.04,0 15.33,37.027,0 15.333,37.011,0 15.325,37.008,0 15.315,37.012,0 15.309,37.018,0 15.304,37.016,0 15.269,37,0 15.275,36.993,0 15.267,36.989,0 15.264,36.987,0 15.269,36.98,0 15.269,36.973,0 15.245,36.972,0 15.227,36.965,0 15.212,36.956,0 15.197,36.952,0 15.175,36.944,0 15.159,36.924,0 15.108,36.82,0 15.107,36.808,0 15.095,36.799,0 15.099,36.779,0 15.118,36.747,0 15.135,36.687,0 15.135,36.675,0 15.115,36.66,0 15.094,36.655,0 15.074,36.659,0 15.056,36.671,0 15.041,36.687,0 15.034,36.694,0 15.021,36.699,0 15.008,36.703,0 14.998,36.702,0 14.994,36.696,0 14.983,36.689,0 14.958,36.698,0 14.919,36.72,0 14.883,36.73,0 14.847,36.726,0 14.781,36.699,0 14.777,36.707,0 14.774,36.71,0 14.761,36.706,0 14.745,36.719,0 14.685,36.726,0 14.672,36.744,0 14.659,36.754,0 14.601,36.772,0 14.583,36.781,0 14.566,36.778,0 14.488,36.793,0 14.476,36.805,0 14.395,36.945,0 14.37,36.973,0 14.279,37.044,0 14.209,37.081,0 14.127,37.112,0 14.089,37.117,0 13.977,37.11,0 13.968,37.108,0 13.949,37.099,0 13.939,37.096,0 13.895,37.101,0 13.833,37.139,0 13.795,37.152,0 13.752,37.159,0 13.716,37.171,0 13.684,37.189,0 13.599,37.256,0 13.57,37.273,0 13.535,37.282,0 13.489,37.288,0 13.453,37.299,0 13.422,37.314,0 13.373,37.346,0 13.33,37.366,0 13.312,37.381,0 13.303,37.386,0 13.29,37.389,0 13.279,37.393,0 13.254,37.432,0 13.248,37.436,0 13.226,37.446,0 13.215,37.458,0 13.207,37.464,0 13.195,37.466,0 13.19,37.469,0 13.18,37.484,0 13.175,37.487,0 13.052,37.5,0 13.037,37.495,0 13.027,37.493,0 13.017,37.497,0 13.011,37.507,0 13.005,37.527,0 13.001,37.535,0 12.975,37.557,0 12.943,37.568,0 12.863,37.576,0 12.781,37.574,0 12.698,37.563,0 12.66,37.565,0 12.637,37.582,0 12.595,37.638,0 12.578,37.652,0 12.564,37.658,0 12.524,37.658,0 12.507,37.665,0 12.49,37.682,0 12.475,37.703,0 12.466,37.72,0 12.461,37.734,0 12.46,37.748,0 12.457,37.76,0 12.449,37.771,0 12.437,37.783,0 12.428,37.797,0 12.428,37.809,0 12.445,37.816,0 12.447,37.812,0 12.461,37.819,0 12.466,37.823,0 12.464,37.825,0 12.471,37.853,0 12.473,37.854,0 12.478,37.872,0 12.479,37.881,0 12.477,37.886,0 12.468,37.897,0 12.466,37.906,0 12.465,37.913,0 12.465,37.914,0 12.468,37.916,0 12.491,37.954,0 12.497,37.98,0 12.503,37.997,0 12.505,38.011,0 12.493,38.021,0 12.524,38.031,0 12.55,38.055,0 12.577,38.072,0 12.609,38.062,0 12.639,38.079,0 12.652,38.091,0 12.657,38.107,0 12.663,38.116,0 12.677,38.116,0 12.692,38.112,0 12.705,38.111,0 12.726,38.126,0 12.725,38.15,0 12.72,38.175,0 12.732,38.193,0 12.738,38.181,0 12.75,38.182,0 12.761,38.181,0 12.767,38.162,0 12.791,38.117,0 12.819,38.078,0 12.829,38.07,0 12.858,38.058,0 12.869,38.051,0 12.87,38.042,0 12.902,38.028,0 12.945,38.033,0 13.028,38.062,0 13.062,38.083,0 13.07,38.091,0 13.072,38.095,0 13.07,38.101,0 13.069,38.114,0 13.067,38.123,0 13.057,38.133,0 13.055,38.142,0 13.09,38.166,0 13.084,38.174,0 13.09,38.183,0 13.102,38.19,0 13.113,38.193,0 13.123,38.191,0 13.158,38.179,0 13.18,38.176,0 13.208,38.176,0 13.231,38.184,0 13.239,38.207,0 13.255,38.202,0 13.267,38.205,0 13.278,38.21,0 13.297,38.214,0 13.311,38.219,0 13.319,38.22,0 13.324,38.218,0 13.326,38.211,0 13.327,38.205,0 13.329,38.2,0 13.367,38.179,0 13.372,38.173,0 13.374,38.14,0 13.377,38.131,0 13.392,38.103,0 13.514,38.11,0 13.542,38.094,0 13.54,38.077,0 13.542,38.067,0 13.548,38.056,0 13.558,38.049,0 13.588,38.039,0 13.623,38.015,0 13.652,38.001,0 13.698,37.993,0 13.712,37.988,0 13.708,37.985,0 13.708,37.984,0 13.706,37.98,0 13.727,37.981,0 13.791,37.973,0 13.813,37.978,0 13.858,37.996,0 13.899,38.004,0 13.913,38.012,0 13.925,38.022,0 13.939,38.029,0 14.008,38.038,0 14.021,38.049,0 14.063,38.03,0 14.084,38.024,0 14.107,38.021,0 14.122,38.022,0 14.152,38.029,0 14.274,38.015,0 14.332,38.018,0 14.385,38.029,0 14.433,38.049,0 14.465,38.037,0 14.512,38.044,0 14.635,38.081,0 14.668,38.099,0 14.696,38.121,0 14.734,38.157,0 14.745,38.161,0 14.778,38.159,0 14.799,38.16,0 14.875,38.175,0 14.889,38.182,0 14.898,38.186,0 14.908,38.187,0 14.936,38.186,0 14.945,38.182,0 14.963,38.163,0 14.97,38.159,0 14.982,38.158,0 15.008,38.152,0 15.04,38.153,0 15.049,38.152,0 15.054,38.148,0 15.064,38.135,0 15.069,38.131,0 15.088,38.128,0 15.106,38.133,0 15.123,38.141,0 15.178,38.156,0 15.204,38.183,0 15.241,38.241,0 15.238,38.249,0 15.237,38.251,0 15.237,38.253,0 15.241,38.261,0 15.238,38.265,0 15.244,38.265,0 15.247,38.254,0 15.241,38.23,0 15.246,38.217,0 15.258,38.21,0 15.275,38.207,0 15.292,38.207,0 15.322,38.211,0 15.4,38.232,0 15.423,38.244,0 15.434,38.253,0 15.473,38.268,0 15.513,38.297,0 15.529,38.302,0 15.56,38.3,0 15.616,38.28,0 15.652,38.275,0 15.649,38.266,0 15.643,38.262,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.999,38.371,0 14.987,38.364,0 14.964,38.381,0 14.949,38.396,0 14.946,38.412,0 14.96,38.433,0 14.967,38.433,0 14.967,38.418,0 14.983,38.412,0 14.994,38.403,0 15.002,38.391,0 15.008,38.378,0 14.999,38.371,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.967,38.453,0 14.949,38.451,0 14.935,38.458,0 14.922,38.469,0 14.908,38.474,0 14.9,38.481,0 14.901,38.498,0 14.91,38.515,0 14.925,38.522,0 14.958,38.522,0 14.967,38.516,0 14.96,38.502,0 14.966,38.497,0 14.975,38.49,0 14.98,38.487,0 14.98,38.481,0 14.953,38.481,0 14.958,38.469,0 14.962,38.465,0 14.967,38.461,0 14.967,38.453,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.361,38.539,0 14.346,38.535,0 14.343,38.547,0 14.357,38.551,0 14.361,38.539,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.864,38.549,0 14.862,38.539,0 14.824,38.552,0 14.794,38.571,0 14.815,38.584,0 14.852,38.585,0 14.867,38.581,0 14.877,38.569,0 14.873,38.565,0 14.869,38.56,0 14.864,38.549,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>14.585,38.557,0 14.574,38.557,0 14.552,38.562,0 14.544,38.575,0 14.543,38.587,0 14.546,38.588,0 14.564,38.585,0 14.576,38.577,0 14.58,38.566,0 14.585,38.561,0 14.585,38.557,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.177,38.693,0 13.165,38.691,0 13.153,38.695,0 13.153,38.702,0 13.158,38.71,0 13.169,38.717,0 13.186,38.718,0 13.196,38.711,0 13.197,38.708,0 13.177,38.693,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>15.225,38.777,0 15.217,38.773,0 15.206,38.775,0 15.187,38.789,0 15.187,38.793,0 15.194,38.798,0 15.204,38.802,0 15.209,38.806,0 15.212,38.81,0 15.219,38.812,0 15.228,38.81,0 15.235,38.808,0 15.239,38.804,0 15.237,38.796,0 15.232,38.789,0 15.23,38.783,0 15.225,38.777,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.361,39.118,0 8.386,39.105,0 8.418,39.106,0 8.445,39.102,0 8.457,39.073,0 8.459,39.068,0 8.464,39.065,0 8.47,39.065,0 8.477,39.07,0 8.478,39.07,0 8.48,39.072,0 8.484,39.07,0 8.465,39.056,0 8.46,39.05,0 8.464,39.042,0 8.455,39.028,0 8.447,38.994,0 8.438,38.967,0 8.433,38.963,0 8.422,38.96,0 8.41,38.962,0 8.407,38.967,0 8.406,38.974,0 8.402,38.981,0 8.365,39.029,0 8.35,39.062,0 8.354,39.083,0 8.354,39.091,0 8.347,39.091,0 8.347,39.097,0 8.361,39.118,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.306,39.104,0 8.291,39.099,0 8.27,39.1,0 8.255,39.107,0 8.258,39.118,0 8.258,39.124,0 8.233,39.144,0 8.225,39.157,0 8.231,39.173,0 8.246,39.181,0 8.291,39.188,0 8.306,39.193,0 8.307,39.161,0 8.313,39.12,0 8.306,39.104,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.959,40.712,0 13.945,40.701,0 13.935,40.705,0 13.92,40.704,0 13.904,40.7,0 13.891,40.694,0 13.882,40.699,0 13.86,40.707,0 13.85,40.715,0 13.857,40.735,0 13.862,40.744,0 13.871,40.749,0 13.868,40.752,0 13.863,40.762,0 13.884,40.762,0 13.947,40.745,0 13.966,40.735,0 13.963,40.729,0 13.963,40.723,0 13.966,40.715,0 13.959,40.712,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>13.427,40.791,0 13.415,40.786,0 13.419,40.796,0 13.424,40.8,0 13.432,40.801,0 13.427,40.791,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>8.333,41.105,0 8.343,41.098,0 8.345,41.086,0 8.342,41.074,0 8.333,41.064,0 8.275,41.057,0 8.252,41.043,0 8.252,41.016,0 8.247,40.993,0 8.21,40.996,0 8.218,41.005,0 8.222,41.014,0 8.224,41.024,0 8.224,41.033,0 8.229,41.042,0 8.242,41.052,0 8.261,41.064,0 8.276,41.07,0 8.278,41.081,0 8.276,41.095,0 8.278,41.105,0 8.285,41.107,0 8.303,41.105,0 8.306,41.109,0 8.309,41.114,0 8.314,41.118,0 8.327,41.126,0 8.326,41.118,0 8.328,41.112,0 8.333,41.105,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.471,41.19,0 9.474,41.184,0 9.475,41.179,0 9.47,41.172,0 9.464,41.173,0 9.456,41.181,0 9.449,41.186,0 9.442,41.183,0 9.437,41.186,0 9.448,41.205,0 9.443,41.211,0 9.446,41.22,0 9.454,41.234,0 9.46,41.242,0 9.468,41.241,0 9.475,41.236,0 9.478,41.228,0 9.48,41.224,0 9.479,41.217,0 9.471,41.19,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.239,41.249,0 9.247,41.248,0 9.258,41.249,0 9.269,41.236,0 9.268,41.202,0 9.279,41.195,0 9.275,41.199,0 9.274,41.205,0 9.275,41.212,0 9.279,41.221,0 9.286,41.221,0 9.29,41.209,0 9.289,41.205,0 9.286,41.201,0 9.286,41.195,0 9.3,41.196,0 9.306,41.198,0 9.313,41.201,0 9.317,41.196,0 9.334,41.187,0 9.336,41.211,0 9.353,41.207,0 9.389,41.181,0 9.389,41.187,0 9.397,41.184,0 9.405,41.181,0 9.413,41.181,0 9.423,41.181,0 9.423,41.174,0 9.417,41.171,0 9.415,41.168,0 9.413,41.164,0 9.409,41.16,0 9.421,41.156,0 9.427,41.149,0 9.433,41.14,0 9.443,41.133,0 9.438,41.125,0 9.437,41.115,0 9.443,41.092,0 9.455,41.112,0 9.461,41.12,0 9.471,41.126,0 9.467,41.13,0 9.466,41.134,0 9.463,41.137,0 9.457,41.14,0 9.47,41.146,0 9.482,41.145,0 9.495,41.142,0 9.509,41.14,0 9.514,41.143,0 9.519,41.148,0 9.524,41.15,0 9.533,41.14,0 9.525,41.133,0 9.535,41.128,0 9.541,41.123,0 9.547,41.121,0 9.553,41.126,0 9.56,41.126,0 9.562,41.122,0 9.562,41.121,0 9.564,41.121,0 9.567,41.119,0 9.566,41.107,0 9.563,41.097,0 9.557,41.088,0 9.546,41.077,0 9.544,41.082,0 9.541,41.087,0 9.54,41.092,0 9.522,41.031,0 9.512,41.016,0 9.533,41.016,0 9.525,41.03,0 9.544,41.037,0 9.555,41.034,0 9.558,41.025,0 9.553,41.009,0 9.558,41.009,0 9.559,41.011,0 9.559,41.013,0 9.56,41.016,0 9.566,41.011,0 9.569,41.009,0 9.574,41.009,0 9.589,41.02,0 9.616,41.019,0 9.645,41.011,0 9.663,41.002,0 9.652,40.991,0 9.637,40.992,0 9.62,40.999,0 9.605,41.002,0 9.588,40.996,0 9.583,40.98,0 9.579,40.962,0 9.567,40.948,0 9.572,40.935,0 9.558,40.931,0 9.512,40.934,0 9.512,40.929,0 9.513,40.928,0 9.505,40.927,0 9.512,40.915,0 9.521,40.915,0 9.53,40.919,0 9.54,40.92,0 9.55,40.917,0 9.568,40.908,0 9.574,40.906,0 9.593,40.91,0 9.608,40.918,0 9.623,40.924,0 9.643,40.92,0 9.638,40.911,0 9.632,40.905,0 9.624,40.9,0 9.615,40.899,0 9.615,40.893,0 9.651,40.879,0 9.656,40.876,0 9.658,40.864,0 9.664,40.858,0 9.672,40.859,0 9.684,40.865,0 9.69,40.856,0 9.7,40.85,0 9.712,40.847,0 9.725,40.845,0 9.691,40.836,0 9.682,40.829,0 9.69,40.817,0 9.69,40.811,0 9.675,40.814,0 9.662,40.809,0 9.658,40.8,0 9.669,40.79,0 9.67,40.801,0 9.676,40.788,0 9.705,40.759,0 9.711,40.745,0 9.715,40.727,0 9.745,40.68,0 9.749,40.667,0 9.754,40.605,0 9.757,40.595,0 9.762,40.587,0 9.769,40.584,0 9.782,40.582,0 9.786,40.576,0 9.787,40.567,0 9.793,40.557,0 9.821,40.536,0 9.827,40.529,0 9.827,40.519,0 9.816,40.502,0 9.813,40.492,0 9.809,40.471,0 9.801,40.455,0 9.779,40.427,0 9.762,40.39,0 9.75,40.377,0 9.728,40.372,0 9.713,40.366,0 9.701,40.353,0 9.684,40.324,0 9.671,40.312,0 9.646,40.296,0 9.635,40.282,0 9.627,40.263,0 9.625,40.248,0 9.629,40.205,0 9.632,40.196,0 9.655,40.144,0 9.666,40.131,0 9.68,40.126,0 9.688,40.12,0 9.711,40.096,0 9.733,40.084,0 9.731,40.068,0 9.694,39.993,0 9.688,39.961,0 9.697,39.934,0 9.703,39.937,0 9.71,39.94,0 9.716,39.94,0 9.718,39.934,0 9.715,39.924,0 9.709,39.922,0 9.702,39.922,0 9.697,39.919,0 9.69,39.906,0 9.685,39.894,0 9.684,39.882,0 9.69,39.871,0 9.684,39.871,0 9.684,39.865,0 9.688,39.863,0 9.693,39.86,0 9.697,39.858,0 9.697,39.852,0 9.685,39.84,0 9.676,39.819,0 9.671,39.793,0 9.669,39.769,0 9.67,39.756,0 9.676,39.732,0 9.677,39.718,0 9.675,39.708,0 9.665,39.691,0 9.663,39.677,0 9.661,39.67,0 9.656,39.663,0 9.652,39.652,0 9.65,39.639,0 9.656,39.594,0 9.654,39.567,0 9.629,39.502,0 9.645,39.484,0 9.64,39.452,0 9.615,39.399,0 9.603,39.355,0 9.601,39.341,0 9.604,39.326,0 9.612,39.316,0 9.635,39.303,0 9.635,39.297,0 9.608,39.289,0 9.582,39.266,0 9.568,39.238,0 9.574,39.214,0 9.566,39.205,0 9.569,39.199,0 9.577,39.194,0 9.581,39.187,0 9.578,39.179,0 9.569,39.159,0 9.567,39.149,0 9.558,39.139,0 9.54,39.134,0 9.523,39.125,0 9.519,39.104,0 9.511,39.108,0 9.508,39.111,0 9.508,39.116,0 9.512,39.124,0 9.497,39.133,0 9.481,39.135,0 9.466,39.132,0 9.451,39.124,0 9.443,39.124,0 9.439,39.133,0 9.429,39.138,0 9.409,39.146,0 9.384,39.169,0 9.378,39.173,0 9.368,39.177,0 9.346,39.196,0 9.337,39.201,0 9.327,39.203,0 9.313,39.208,0 9.3,39.214,0 9.293,39.221,0 9.286,39.214,0 9.272,39.22,0 9.253,39.225,0 9.217,39.228,0 9.198,39.221,0 9.182,39.207,0 9.17,39.193,0 9.167,39.187,0 9.137,39.194,0 9.114,39.211,0 9.073,39.248,0 9.064,39.243,0 9.056,39.247,0 9.048,39.256,0 9.039,39.262,0 9.025,39.265,0 9.015,39.264,0 9.013,39.26,0 9.026,39.256,0 9.026,39.248,0 9.022,39.24,0 9.027,39.236,0 9.036,39.232,0 9.038,39.227,0 9.039,39.228,0 9.051,39.225,0 9.075,39.23,0 9.08,39.224,0 9.08,39.216,0 9.08,39.212,0 9.039,39.179,0 9.027,39.165,0 9.019,39.146,0 9.017,39.124,0 9.019,39.104,0 9.025,39.086,0 9.033,39.07,0 9.038,39.063,0 9.044,39.058,0 9.046,39.051,0 9.03,39.03,0 9.019,38.995,0 9.026,38.995,0 9.016,38.989,0 9.013,38.99,0 9.005,38.995,0 8.997,38.983,0 8.895,38.902,0 8.889,38.9,0 8.878,38.899,0 8.873,38.896,0 8.862,38.882,0 8.854,38.878,0 8.842,38.88,0 8.828,38.889,0 8.806,38.906,0 8.806,38.885,0 8.791,38.904,0 8.767,38.92,0 8.74,38.93,0 8.717,38.932,0 8.695,38.925,0 8.669,38.91,0 8.652,38.891,0 8.656,38.871,0 8.641,38.864,0 8.635,38.871,0 8.643,38.89,0 8.634,38.895,0 8.616,38.896,0 8.6,38.899,0 8.6,38.906,0 8.616,38.923,0 8.616,38.947,0 8.604,38.965,0 8.581,38.96,0 8.573,39.013,0 8.56,39.057,0 8.553,39.057,0 8.545,39.051,0 8.521,39.061,0 8.505,39.063,0 8.51,39.068,0 8.519,39.083,0 8.505,39.091,0 8.483,39.08,0 8.483,39.084,0 8.478,39.09,0 8.474,39.107,0 8.466,39.119,0 8.455,39.125,0 8.443,39.118,0 8.439,39.128,0 8.439,39.153,0 8.436,39.166,0 8.429,39.173,0 8.419,39.177,0 8.413,39.175,0 8.416,39.166,0 8.41,39.169,0 8.406,39.174,0 8.403,39.181,0 8.402,39.19,0 8.399,39.201,0 8.393,39.204,0 8.386,39.204,0 8.381,39.207,0 8.373,39.222,0 8.372,39.23,0 8.377,39.238,0 8.427,39.283,0 8.433,39.302,0 8.416,39.323,0 8.418,39.339,0 8.383,39.359,0 8.375,39.379,0 8.379,39.388,0 8.396,39.404,0 8.402,39.412,0 8.406,39.427,0 8.404,39.436,0 8.39,39.462,0 8.387,39.465,0 8.387,39.47,0 8.395,39.481,0 8.422,39.508,0 8.436,39.525,0 8.452,39.558,0 8.464,39.577,0 8.457,39.584,0 8.465,39.598,0 8.463,39.617,0 8.45,39.659,0 8.447,39.704,0 8.443,39.714,0 8.443,39.721,0 8.447,39.731,0 8.445,39.757,0 8.447,39.762,0 8.46,39.76,0 8.469,39.755,0 8.5,39.716,0 8.518,39.702,0 8.539,39.696,0 8.566,39.701,0 8.515,39.713,0 8.505,39.721,0 8.507,39.738,0 8.521,39.755,0 8.536,39.771,0 8.546,39.783,0 8.539,39.783,0 8.536,39.776,0 8.531,39.77,0 8.525,39.766,0 8.519,39.762,0 8.53,39.772,0 8.541,39.789,0 8.549,39.807,0 8.553,39.821,0 8.556,39.852,0 8.554,39.864,0 8.546,39.878,0 8.524,39.899,0 8.495,39.912,0 8.464,39.914,0 8.436,39.899,0 8.443,39.893,0 8.446,39.898,0 8.45,39.899,0 8.456,39.898,0 8.464,39.899,0 8.452,39.893,0 8.445,39.883,0 8.436,39.858,0 8.429,39.865,0 8.438,39.877,0 8.432,39.885,0 8.419,39.892,0 8.404,39.903,0 8.401,39.903,0 8.399,39.905,0 8.395,39.912,0 8.394,39.92,0 8.397,39.927,0 8.4,39.933,0 8.402,39.94,0 8.394,39.977,0 8.395,39.988,0 8.407,40.01,0 8.408,40.022,0 8.395,40.036,0 8.381,40.03,0 8.378,40.033,0 8.385,40.042,0 8.402,40.05,0 8.405,40.049,0 8.435,40.051,0 8.453,40.056,0 8.46,40.057,0 8.469,40.062,0 8.48,40.074,0 8.488,40.089,0 8.491,40.104,0 8.486,40.118,0 8.468,40.144,0 8.464,40.163,0 8.46,40.216,0 8.477,40.262,0 8.477,40.292,0 8.463,40.314,0 8.442,40.331,0 8.416,40.345,0 8.409,40.338,0 8.387,40.352,0 8.384,40.372,0 8.395,40.424,0 8.391,40.442,0 8.38,40.468,0 8.366,40.492,0 8.35,40.502,0 8.332,40.51,0 8.324,40.531,0 8.32,40.555,0 8.313,40.578,0 8.292,40.595,0 8.268,40.594,0 8.217,40.57,0 8.196,40.578,0 8.206,40.598,0 8.217,40.612,0 8.194,40.617,0 8.177,40.606,0 8.167,40.586,0 8.162,40.564,0 8.154,40.578,0 8.148,40.593,0 8.141,40.619,0 8.141,40.625,0 8.158,40.632,0 8.174,40.641,0 8.186,40.656,0 8.189,40.68,0 8.192,40.68,0 8.196,40.685,0 8.198,40.691,0 8.193,40.694,0 8.18,40.695,0 8.174,40.697,0 8.168,40.701,0 8.154,40.719,0 8.146,40.726,0 8.134,40.729,0 8.21,40.865,0 8.216,40.881,0 8.217,40.899,0 8.21,40.914,0 8.193,40.92,0 8.179,40.928,0 8.183,40.945,0 8.194,40.963,0 8.203,40.975,0 8.21,40.975,0 8.213,40.963,0 8.221,40.962,0 8.229,40.962,0 8.237,40.955,0 8.236,40.946,0 8.232,40.934,0 8.23,40.921,0 8.234,40.91,0 8.278,40.865,0 8.311,40.85,0 8.422,40.839,0 8.478,40.826,0 8.501,40.824,0 8.521,40.827,0 8.599,40.853,0 8.619,40.866,0 8.635,40.881,0 8.641,40.896,0 8.71,40.92,0 8.734,40.921,0 8.752,40.919,0 8.765,40.914,0 8.823,40.947,0 8.84,40.961,0 8.876,41.008,0 8.889,41.016,0 8.887,41.02,0 8.887,41.021,0 8.886,41.022,0 8.882,41.023,0 8.914,41.032,0 8.923,41.037,0 8.93,41.043,0 8.941,41.061,0 8.947,41.064,0 8.959,41.07,0 8.976,41.082,0 8.991,41.097,0 9.006,41.122,0 9.025,41.129,0 9.094,41.135,0 9.108,41.139,0 9.136,41.16,0 9.142,41.153,0 9.158,41.169,0 9.164,41.184,0 9.163,41.225,0 9.172,41.243,0 9.191,41.251,0 9.213,41.256,0 9.231,41.262,0 9.233,41.253,0 9.239,41.249,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.435,41.217,0 9.395,41.211,0 9.377,41.213,0 9.373,41.222,0 9.373,41.23,0 9.378,41.234,0 9.385,41.237,0 9.392,41.241,0 9.396,41.248,0 9.398,41.256,0 9.402,41.258,0 9.408,41.258,0 9.414,41.262,0 9.422,41.261,0 9.427,41.254,0 9.431,41.246,0 9.43,41.238,0 9.429,41.229,0 9.431,41.225,0 9.434,41.221,0 9.435,41.217,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.316,42.341,0 10.313,42.324,0 10.294,42.328,0 10.297,42.345,0 10.306,42.352,0 10.316,42.341,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.922,42.334,0 10.909,42.325,0 10.874,42.36,0 10.862,42.366,0 10.871,42.376,0 10.877,42.387,0 10.884,42.392,0 10.896,42.386,0 10.907,42.378,0 10.919,42.356,0 10.931,42.346,0 10.926,42.339,0 10.922,42.334,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.095,42.577,0 10.086,42.572,0 10.072,42.573,0 10.059,42.576,0 10.05,42.582,0 10.053,42.589,0 10.063,42.592,0 10.073,42.6,0 10.08,42.614,0 10.084,42.615,0 10.088,42.604,0 10.092,42.596,0 10.096,42.591,0 10.098,42.588,0 10.098,42.584,0 10.095,42.577,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>10.431,42.816,0 10.437,42.804,0 10.431,42.787,0 10.421,42.776,0 10.407,42.769,0 10.389,42.763,0 10.408,42.757,0 10.426,42.741,0 10.431,42.722,0 10.416,42.709,0 10.411,42.718,0 10.404,42.719,0 10.394,42.718,0 10.382,42.722,0 10.378,42.728,0 10.368,42.746,0 10.365,42.75,0 10.352,42.755,0 10.338,42.765,0 10.326,42.765,0 10.314,42.743,0 10.305,42.76,0 10.266,42.744,0 10.246,42.757,0 10.241,42.742,0 10.236,42.736,0 10.23,42.735,0 10.148,42.737,0 10.125,42.743,0 10.107,42.757,0 10.102,42.784,0 10.112,42.801,0 10.134,42.812,0 10.159,42.817,0 10.18,42.819,0 10.19,42.817,0 10.213,42.808,0 10.225,42.804,0 10.243,42.803,0 10.266,42.804,0 10.266,42.809,0 10.265,42.81,0 10.263,42.81,0 10.26,42.812,0 10.273,42.819,0 10.273,42.826,0 10.273,42.827,0 10.29,42.825,0 10.327,42.826,0 10.323,42.811,0 10.333,42.806,0 10.348,42.806,0 10.355,42.808,0 10.359,42.817,0 10.366,42.823,0 10.375,42.827,0 10.382,42.832,0 10.393,42.858,0 10.401,42.869,0 10.413,42.873,0 10.422,42.871,0 10.432,42.864,0 10.439,42.855,0 10.444,42.845,0 10.437,42.838,0 10.432,42.828,0 10.431,42.816,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>9.844,43.06,0 9.848,43.058,0 9.854,43.059,0 9.843,43.035,0 9.828,43.019,0 9.81,43.017,0 9.793,43.037,0 9.812,43.071,0 9.827,43.081,0 9.841,43.065,0 9.842,43.063,0 9.844,43.06,0 </coordinates></LinearRing></outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing><coordinates>12.122,46.972,0 12.128,46.949,0 12.135,46.937,0 12.142,46.928,0 12.142,46.919,0 12.127,46.909,0 12.137,46.906,0 12.161,46.903,0 12.172,46.899,0 12.184,46.891,0 12.189,46.885,0 12.195,46.88,0 12.209,46.877,0 12.251,46.876,0 12.267,46.868,0 12.276,46.846,0 12.276,46.834,0 12.273,46.827,0 12.27,46.82,0 12.267,46.808,0 12.267,46.795,0 12.269,46.789,0 12.275,46.785,0 12.284,46.78,0 12.305,46.774,0 12.326,46.772,0 12.343,46.765,0 12.351,46.743,0 12.37,46.711,0 12.405,46.69,0 12.446,46.679,0 12.5,46.672,0 12.531,46.658,0 12.547,46.652,0 12.562,46.651,0 12.62,46.656,0 12.67,46.653,0 12.679,46.65,0 12.697,46.641,0 12.707,46.638,0 12.716,46.638,0 12.732,46.642,0 12.74,46.643,0 12.774,46.635,0 12.83,46.61,0 13.065,46.598,0 13.146,46.585,0 13.21,46.558,0 13.231,46.552,0 13.271,46.551,0 13.373,46.566,0 13.417,46.56,0 13.478,46.564,0 13.485,46.562,0 13.499,46.551,0 13.507,46.547,0 13.549,46.546,0 13.67,46.519,0 13.685,46.518,0 13.701,46.52,0 13.701,46.512,0 13.699,46.505,0 13.695,46.499,0 13.69,46.493,0 13.688,46.468,0 13.677,46.452,0 13.659,46.445,0 13.634,46.446,0 13.6,46.443,0 13.576,46.427,0 13.554,46.406,0 13.53,46.388,0 13.484,46.371,0 13.46,46.359,0 13.447,46.355,0 13.434,46.354,0 13.423,46.345,0 13.41,46.324,0 13.391,46.302,0 13.365,46.29,0 13.373,46.28,0 13.379,46.268,0 13.385,46.243,0 13.385,46.243,0 13.385,46.243,0 13.398,46.231,0 13.402,46.217,0 13.41,46.208,0 13.437,46.211,0 13.423,46.229,0 13.438,46.225,0 13.468,46.223,0 13.482,46.218,0 13.51,46.214,0 13.529,46.205,0 13.559,46.184,0 13.584,46.181,0 13.614,46.184,0 13.637,46.18,0 13.645,46.162,0 13.616,46.125,0 13.505,46.066,0 13.482,46.045,0 13.49,46.039,0 13.493,46.032,0 13.49,46.026,0 13.482,46.018,0 13.477,46.016,0 13.462,46.006,0 13.475,45.996,0 13.479,45.993,0 13.48,45.992,0 13.481,45.991,0 13.482,45.99,0 13.482,45.989,0 13.509,45.967,0 13.539,45.969,0 13.572,45.98,0 13.606,45.985,0 13.623,45.966,0 13.608,45.927,0 13.569,45.865,0 13.566,45.83,0 13.581,45.809,0 13.609,45.799,0 13.644,45.796,0 13.66,45.792,0 13.709,45.765,0 13.779,45.743,0 13.858,45.649,0 13.869,45.641,0 13.884,45.635,0 13.893,45.635,0 13.895,45.632,0 13.887,45.619,0 13.848,45.585,0 13.801,45.581,0 13.761,45.596,0 13.712,45.593,0 13.719,45.6,0 13.731,45.613,0 13.757,45.613,0 13.787,45.611,0 13.809,45.614,0 13.796,45.617,0 13.787,45.624,0 13.778,45.635,0 13.74,45.649,0 13.758,45.655,0 13.754,45.672,0 13.74,45.691,0 13.727,45.703,0 13.648,45.762,0 13.63,45.772,0 13.575,45.789,0 13.552,45.792,0 13.535,45.782,0 13.525,45.76,0 13.529,45.74,0 13.555,45.737,0 13.519,45.725,0 13.514,45.721,0 13.508,45.714,0 13.481,45.71,0 13.47,45.707,0 13.452,45.694,0 13.429,45.681,0 13.402,45.675,0 13.377,45.683,0 13.392,45.686,0 13.41,45.691,0 13.425,45.698,0 13.432,45.707,0 13.423,45.724,0 13.382,45.73,0 13.37,45.744,0 13.352,45.74,0 13.255,45.756,0 13.246,45.759,0 13.222,45.776,0 13.216,45.779,0 13.206,45.778,0 13.17,45.768,0 13.158,45.754,0 13.15,45.751,0 13.14,45.755,0 13.132,45.769,0 13.12,45.772,0 13.111,45.767,0 13.109,45.758,0 13.112,45.749,0 13.124,45.744,0 13.124,45.737,0 13.101,45.736,0 13.081,45.727,0 13.07,45.713,0 13.076,45.697,0 13.092,45.689,0 13.112,45.691,0 13.15,45.703,0 13.139,45.689,0 13.104,45.669,0 13.096,45.652,0 13.086,45.642,0 13.061,45.636,0 12.982,45.635,0 12.944,45.628,0 12.781,45.553,0 12.612,45.496,0 12.513,45.47,0 12.497,45.46,0 12.488,45.456,0 12.452,45.45,0 12.424,45.438,0 12.411,45.436,0 12.419,45.451,0 12.43,45.464,0 12.436,45.475,0 12.431,45.484,0 12.441,45.483,0 12.448,45.484,0 12.452,45.489,0 12.452,45.498,0 12.459,45.498,0 12.463,45.489,0 12.468,45.485,0 12.472,45.486,0 12.479,45.491,0 12.466,45.504,0 12.477,45.503,0 12.488,45.504,0 12.498,45.506,0 12.5,45.504,0 12.501,45.506,0 12.504,45.503,0 12.507,45.499,0 12.507,45.498,0 12.504,45.498,0 12.493,45.498,0 12.493,45.491,0 12.516,45.492,0 12.521,45.505,0 12.522,45.519,0 12.531,45.525,0 12.549,45.527,0 12.563,45.531,0 12.574,45.54,0 12.582,45.553,0 12.57,45.549,0 12.545,45.536,0 12.538,45.536,0 12.519,45.55,0 12.511,45.559,0 12.507,45.573,0 12.486,45.565,0 12.459,45.548,0 12.443,45.53,0 12.452,45.518,0 12.452,45.512,0 12.435,45.512,0 12.418,45.523,0 12.411,45.518,0 12.404,45.518,0 12.397,45.539,0 12.385,45.523,0 12.391,45.514,0 12.425,45.504,0 12.425,45.498,0 12.412,45.493,0 12.394,45.491,0 12.381,45.494,0 12.384,45.504,0 12.351,45.505,0 12.31,45.489,0 12.273,45.463,0 12.253,45.436,0 12.253,45.43,0 12.259,45.43,0 12.251,45.42,0 12.247,45.411,0 12.249,45.402,0 12.259,45.395,0 12.25,45.385,0 12.248,45.378,0 12.249,45.371,0 12.246,45.361,0 12.238,45.358,0 12.229,45.357,0 12.224,45.354,0 12.233,45.34,0 12.221,45.327,0 12.217,45.316,0 12.209,45.309,0 12.188,45.306,0 12.175,45.31,0 12.164,45.316,0 12.155,45.313,0 12.15,45.292,0 12.16,45.283,0 12.169,45.262,0 12.181,45.258,0 12.192,45.263,0 12.2,45.274,0 12.203,45.288,0 12.198,45.299,0 12.218,45.294,0 12.222,45.283,0 12.221,45.269,0 12.225,45.251,0 12.214,45.248,0 12.212,45.243,0 12.216,45.237,0 12.225,45.23,0 12.222,45.216,0 12.231,45.204,0 12.248,45.197,0 12.267,45.196,0 12.264,45.2,0 12.263,45.201,0 12.259,45.203,0 12.274,45.211,0 12.296,45.226,0 12.308,45.23,0 12.299,45.215,0 12.305,45.201,0 12.316,45.186,0 12.322,45.172,0 12.322,45.139,0 12.329,45.101,0 12.319,45.103,0 12.308,45.108,0 12.309,45.114,0 12.308,45.124,0 12.308,45.128,0 12.298,45.106,0 12.297,45.088,0 12.307,45.078,0 12.329,45.08,0 12.326,45.083,0 12.324,45.086,0 12.322,45.093,0 12.341,45.081,0 12.354,45.067,0 12.364,45.052,0 12.377,45.039,0 12.377,45.032,0 12.369,45.031,0 12.365,45.029,0 12.361,45.027,0 12.356,45.024,0 12.369,45.011,0 12.384,45.026,0 12.387,45.039,0 12.381,45.051,0 12.369,45.065,0 12.384,45.056,0 12.402,45.05,0 12.414,45.043,0 12.411,45.032,0 12.427,45.02,0 12.435,45.015,0 12.445,45.011,0 12.465,44.992,0 12.487,44.976,0 12.5,44.983,0 12.497,44.984,0 12.49,44.983,0 12.487,44.983,0 12.487,44.991,0 12.503,44.991,0 12.517,44.987,0 12.528,44.98,0 12.535,44.97,0 12.534,44.961,0 12.524,44.95,0 12.528,44.943,0 12.519,44.934,0 12.516,44.928,0 12.513,44.922,0 12.507,44.922,0 12.5,44.921,0 12.495,44.91,0 12.493,44.878,0 12.488,44.862,0 12.475,44.845,0 12.445,44.82,0 12.444,44.825,0 12.439,44.835,0 12.433,44.846,0 12.425,44.854,0 12.44,44.877,0 12.444,44.89,0 12.439,44.901,0 12.427,44.905,0 12.416,44.9,0 12.407,44.891,0 12.404,44.884,0 12.393,44.868,0 12.392,44.859,0 12.417,44.851,0 12.416,44.843,0 12.409,44.836,0 12.397,44.833,0 12.397,44.826,0 12.404,44.825,0 12.417,44.821,0 12.425,44.82,0 12.417,44.803,0 12.398,44.794,0 12.376,44.792,0 12.358,44.804,0 12.347,44.815,0 12.322,44.833,0 12.304,44.843,0 12.293,44.843,0 12.267,44.826,0 12.267,44.82,0 12.281,44.82,0 12.254,44.751,0 12.247,44.711,0 12.253,44.668,0 12.266,44.636,0 12.276,44.62,0 12.284,44.614,0 12.286,44.602,0 12.281,44.532,0 12.284,44.487,0 12.315,44.387,0 12.319,44.361,0 12.322,44.353,0 12.326,44.348,0 12.34,44.334,0 12.343,44.329,0 12.345,44.308,0 12.351,44.288,0 12.369,44.25,0 12.391,44.222,0 12.418,44.195,0 12.459,44.166,0 12.479,44.139,0 12.511,44.114,0 12.548,44.093,0 12.575,44.085,0 12.632,44.03,0 12.662,44.008,0 12.692,43.99,0 12.711,43.983,0 12.757,43.972,0 12.804,43.967,0 12.823,43.958,0 12.863,43.935,0 12.929,43.916,0 12.939,43.904,0 12.948,43.897,0 13.254,43.703,0 13.371,43.65,0 13.39,43.644,0 13.4,43.635,0 13.447,43.623,0 13.474,43.612,0 13.484,43.616,0 13.491,43.623,0 13.497,43.627,0 13.5,43.628,0 13.502,43.63,0 13.505,43.633,0 13.511,43.633,0 13.517,43.631,0 13.52,43.627,0 13.522,43.622,0 13.525,43.62,0 13.544,43.613,0 13.558,43.596,0 13.57,43.58,0 13.579,43.573,0 13.599,43.569,0 13.616,43.56,0 13.625,43.547,0 13.618,43.531,0 13.761,43.264,0 13.777,43.243,0 13.781,43.236,0 13.787,43.2,0 13.791,43.192,0 13.803,43.178,0 13.835,43.127,0 13.849,43.092,0 13.866,43.007,0 13.945,42.798,0 13.981,42.73,0 14.002,42.698,0 14.064,42.625,0 14.069,42.609,0 14.076,42.599,0 14.221,42.47,0 14.285,42.428,0 14.357,42.393,0 14.388,42.373,0 14.43,42.321,0 14.561,42.225,0 14.596,42.208,0 14.654,42.191,0 14.694,42.185,0 14.71,42.175,0 14.718,42.16,0 14.723,42.119,0 14.73,42.099,0 14.741,42.084,0 14.758,42.079,0 14.781,42.075,0 14.8,42.066,0 14.836,42.044,0 14.871,42.032,0 14.953,42.021,0 14.994,42.01,0 15.008,42.001,0 15.035,41.974,0 15.046,41.969,0 15.064,41.964,0 15.105,41.942,0 15.124,41.934,0 15.166,41.927,0 15.282,41.928,0 15.401,41.908,0 15.447,41.907,0 15.612,41.928,0 15.775,41.921,0 16.028,41.944,0 16.112,41.928,0 16.112,41.926,0 16.141,41.92,0 16.161,41.892,0 16.18,41.893,0 16.177,41.877,0 16.184,41.858,0 16.193,41.821,0 16.194,41.808,0 16.193,41.791,0 16.185,41.779,0 16.167,41.763,0 16.146,41.749,0 16.128,41.742,0 16.108,41.737,0 16.09,41.726,0 16.064,41.701,0 16.028,41.68,0 15.926,41.64,0 15.901,41.614,0 15.892,41.577,0 15.897,41.536,0 15.912,41.503,0 15.934,41.479,0 15.962,41.459,0 16.022,41.428,0 16.086,41.412,0 16.101,41.403,0 16.115,41.393,0 16.302,41.328,0 16.461,41.262,0 16.521,41.25,0 16.539,41.239,0 16.555,41.227,0 16.594,41.207,0 16.831,41.146,0 16.852,41.133,0 16.859,41.133,0 16.859,41.14,0 16.865,41.14,0 16.886,41.124,0 17.058,41.082,0 17.204,41.021,0 17.277,40.98,0 17.311,40.955,0 17.348,40.912,0 17.362,40.906,0 17.378,40.902,0 17.414,40.881,0 17.476,40.83,0 17.493,40.824,0 17.513,40.82,0 17.549,40.802,0 17.635,40.785,0 17.646,40.78,0 17.749,40.747,0 17.844,40.694,0 17.922,40.683,0 17.956,40.67,0 17.956,40.647,0 17.967,40.647,0 17.993,40.653,0 18.008,40.65,0 18.012,40.644,0 18.012,40.635,0 18.016,40.625,0 18.04,40.608,0 18.044,40.602,0 18.038,40.557,0 18.12,40.504,0 18.212,40.464,0 18.232,40.461,0 18.239,40.457,0 18.259,40.43,0 18.271,40.421,0 18.304,40.4,0 18.33,40.366,0 18.344,40.351,0 18.362,40.345,0 18.371,40.338,0 18.438,40.268,0 18.501,40.152,0 18.505,40.146,0 18.51,40.142,0 18.517,40.139,0 18.512,40.127,0 18.514,40.12,0 18.518,40.114,0 18.517,40.104,0 18.509,40.094,0 18.492,40.084,0 18.484,40.055,0 18.471,40.043,0 18.435,40.022,0 18.412,39.979,0 18.408,39.968,0 18.405,39.947,0 18.395,39.925,0 18.393,39.916,0 18.4,39.89,0 18.401,39.878,0 18.387,39.825,0 18.39,39.817,0 18.384,39.814,0 18.374,39.8,0 18.369,39.796,0 18.347,39.798,0 18.339,39.8,0 18.331,39.803,0 18.283,39.833,0 18.266,39.837,0 18.225,39.837,0 18.212,39.839,0 18.187,39.852,0 18.162,39.86,0 18.131,39.883,0 18.095,39.903,0 18.082,39.906,0 18.072,39.911,0 18.008,39.986,0 17.996,39.995,0 17.996,40.002,0 18.012,40.003,0 18.021,40.01,0 18.023,40.021,0 18.016,40.036,0 18.006,40.045,0 17.979,40.051,0 17.968,40.057,0 18.003,40.074,0 18.012,40.096,0 17.998,40.12,0 17.968,40.146,0 17.941,40.163,0 17.927,40.176,0 17.92,40.191,0 17.92,40.21,0 17.917,40.227,0 17.912,40.24,0 17.9,40.249,0 17.913,40.249,0 17.913,40.255,0 17.864,40.285,0 17.848,40.29,0 17.513,40.303,0 17.494,40.307,0 17.441,40.331,0 17.431,40.331,0 17.41,40.33,0 17.4,40.331,0 17.393,40.335,0 17.375,40.348,0 17.369,40.351,0 17.352,40.355,0 17.297,40.379,0 17.241,40.395,0 17.213,40.406,0 17.201,40.42,0 17.224,40.428,0 17.244,40.441,0 17.248,40.457,0 17.228,40.474,0 17.248,40.48,0 17.296,40.473,0 17.317,40.482,0 17.324,40.498,0 17.305,40.499,0 17.262,40.488,0 17.264,40.491,0 17.269,40.496,0 17.248,40.503,0 17.23,40.497,0 17.211,40.487,0 17.191,40.482,0 17.182,40.485,0 17.177,40.493,0 17.172,40.502,0 17.167,40.509,0 17.157,40.512,0 17.134,40.512,0 17.125,40.515,0 17.05,40.519,0 16.977,40.492,0 16.913,40.445,0 16.783,40.301,0 16.762,40.269,0 16.738,40.211,0 16.731,40.2,0 16.716,40.193,0 16.68,40.146,0 16.625,40.108,0 16.605,40.084,0 16.597,40.046,0 16.6,40.034,0 16.614,39.996,0 16.632,39.966,0 16.622,39.953,0 16.606,39.943,0 16.59,39.92,0 16.543,39.885,0 16.509,39.837,0 16.492,39.805,0 16.49,39.775,0 16.503,39.747,0 16.529,39.721,0 16.529,39.714,0 16.516,39.689,0 16.546,39.661,0 16.592,39.636,0 16.625,39.625,0 16.75,39.62,0 16.783,39.611,0 16.799,39.603,0 16.817,39.591,0 16.831,39.576,0 16.838,39.56,0 16.847,39.552,0 16.906,39.529,0 16.954,39.499,0 16.971,39.495,0 16.996,39.492,0 17.012,39.486,0 17.024,39.475,0 17.036,39.461,0 17.058,39.441,0 17.089,39.422,0 17.125,39.409,0 17.159,39.406,0 17.123,39.338,0 17.115,39.283,0 17.115,39.269,0 17.118,39.256,0 17.125,39.244,0 17.143,39.222,0 17.146,39.21,0 17.141,39.179,0 17.123,39.121,0 17.125,39.091,0 17.148,39.054,0 17.152,39.046,0 17.159,39.04,0 17.193,39.031,0 17.207,39.029,0 17.187,39.019,0 17.177,39.012,0 17.173,39.005,0 17.172,38.966,0 17.173,38.96,0 17.139,38.936,0 17.136,38.932,0 17.128,38.929,0 17.119,38.919,0 17.105,38.899,0 17.096,38.919,0 17.071,38.923,0 17.043,38.916,0 17.023,38.906,0 16.997,38.929,0 16.982,38.937,0 16.958,38.94,0 16.936,38.938,0 16.839,38.918,0 16.728,38.879,0 16.688,38.856,0 16.68,38.847,0 16.671,38.84,0 16.611,38.816,0 16.586,38.798,0 16.575,38.785,0 16.564,38.756,0 16.551,38.741,0 16.539,38.723,0 16.535,38.7,0 16.547,38.693,0 16.55,38.69,0 16.549,38.672,0 16.559,38.596,0 16.578,38.528,0 16.578,38.503,0 16.57,38.429,0 16.562,38.416,0 16.523,38.387,0 16.509,38.371,0 16.498,38.369,0 16.468,38.348,0 16.436,38.34,0 16.34,38.301,0 16.307,38.277,0 16.17,38.143,0 16.152,38.111,0 16.126,38.005,0 16.112,37.973,0 16.102,37.96,0 16.091,37.949,0 16.078,37.94,0 16.064,37.932,0 16.016,37.924,0 16.002,37.919,0 15.943,37.933,0 15.762,37.925,0 15.736,37.931,0 15.709,37.941,0 15.685,37.953,0 15.666,37.967,0 15.646,37.988,0 15.636,38.009,0 15.639,38.027,0 15.659,38.042,0 15.633,38.074,0 15.625,38.092,0 15.628,38.107,0 15.642,38.126,0 15.648,38.143,0 15.647,38.162,0 15.639,38.186,0 15.633,38.22,0 15.651,38.241,0 15.685,38.253,0 15.787,38.278,0 15.796,38.285,0 15.799,38.291,0 15.813,38.3,0 15.817,38.306,0 15.83,38.351,0 15.905,38.474,0 15.918,38.517,0 15.916,38.55,0 15.901,38.578,0 15.871,38.604,0 15.864,38.608,0 15.851,38.613,0 15.845,38.618,0 15.836,38.628,0 15.834,38.634,0 15.836,38.639,0 15.837,38.649,0 15.845,38.66,0 15.864,38.668,0 15.905,38.679,0 15.969,38.712,0 16.003,38.725,0 16.049,38.728,0 16.121,38.721,0 16.137,38.724,0 16.153,38.731,0 16.18,38.748,0 16.201,38.776,0 16.216,38.814,0 16.222,38.856,0 16.221,38.899,0 16.215,38.919,0 16.205,38.934,0 16.19,38.943,0 16.169,38.947,0 16.155,38.955,0 16.14,38.974,0 16.084,39.075,0 16.043,39.31,0 16.032,39.345,0 15.955,39.489,0 15.934,39.513,0 15.905,39.536,0 15.877,39.551,0 15.868,39.564,0 15.865,39.588,0 15.851,39.615,0 15.837,39.652,0 15.816,39.679,0 15.807,39.695,0 15.789,39.796,0 15.789,39.79,0 15.784,39.81,0 15.779,39.82,0 15.772,39.824,0 15.77,39.83,0 15.783,39.868,0 15.775,39.891,0 15.742,39.929,0 15.735,39.943,0 15.729,39.964,0 15.714,39.981,0 15.679,40.009,0 15.652,40.043,0 15.631,40.057,0 15.625,40.065,0 15.625,40.078,0 15.611,40.073,0 15.536,40.078,0 15.51,40.07,0 15.493,40.059,0 15.46,40.029,0 15.425,40.004,0 15.405,39.999,0 15.377,40.002,0 15.354,40.012,0 15.315,40.034,0 15.303,40.036,0 15.294,40.032,0 15.284,40.03,0 15.273,40.028,0 15.262,40.029,0 15.262,40.036,0 15.28,40.047,0 15.264,40.074,0 15.234,40.1,0 15.21,40.112,0 15.191,40.119,0 15.128,40.169,0 15.113,40.175,0 15.096,40.173,0 15.066,40.166,0 15.048,40.169,0 15.035,40.175,0 15.015,40.194,0 14.974,40.223,0 14.967,40.224,0 14.959,40.231,0 14.923,40.238,0 14.912,40.241,0 14.907,40.258,0 14.932,40.285,0 14.94,40.307,0 14.933,40.324,0 14.933,40.334,0 14.943,40.338,0 14.954,40.34,0 14.965,40.345,0 14.973,40.352,0 14.98,40.359,0 14.99,40.394,0 14.976,40.431,0 14.889,40.573,0 14.862,40.607,0 14.836,40.632,0 14.81,40.653,0 14.783,40.67,0 14.753,40.676,0 14.72,40.667,0 14.691,40.649,0 14.679,40.646,0 14.626,40.649,0 14.614,40.646,0 14.572,40.617,0 14.545,40.613,0 14.517,40.62,0 14.487,40.632,0 14.472,40.624,0 14.423,40.615,0 14.402,40.602,0 14.356,40.583,0 14.343,40.57,0 14.331,40.584,0 14.329,40.605,0 14.338,40.624,0 14.36,40.632,0 14.38,40.634,0 14.388,40.637,0 14.395,40.65,0 14.403,40.657,0 14.471,40.699,0 14.48,40.711,0 14.475,40.729,0 14.461,40.744,0 14.443,40.755,0 14.426,40.762,0 14.415,40.765,0 14.399,40.767,0 14.391,40.77,0 14.385,40.774,0 14.372,40.787,0 14.367,40.79,0 14.349,40.797,0 14.313,40.828,0 14.295,40.839,0 14.276,40.84,0 14.249,40.837,0 14.224,40.831,0 14.213,40.821,0 14.204,40.801,0 14.182,40.8,0 14.112,40.829,0 14.096,40.834,0 14.083,40.831,0 14.077,40.822,0 14.078,40.81,0 14.082,40.797,0 14.083,40.783,0 14.075,40.788,0 14.041,40.798,0 14.053,40.837,0 14.044,40.875,0 13.966,40.996,0 13.931,41.014,0 13.918,41.023,0 13.915,41.033,0 13.913,41.054,0 13.911,41.064,0 13.885,41.104,0 13.786,41.203,0 13.722,41.252,0 13.709,41.256,0 13.679,41.25,0 13.664,41.25,0 13.657,41.259,0 13.595,41.253,0 13.564,41.238,0 13.576,41.208,0 13.544,41.206,0 13.535,41.208,0 13.526,41.215,0 13.52,41.225,0 13.515,41.229,0 13.508,41.221,0 13.5,41.221,0 13.481,41.239,0 13.325,41.295,0 13.286,41.295,0 13.205,41.284,0 13.187,41.278,0 13.152,41.26,0 13.115,41.251,0 13.091,41.226,0 13.069,41.221,0 13.045,41.227,0 13.037,41.24,0 13.034,41.257,0 13.024,41.273,0 13.013,41.286,0 12.993,41.315,0 12.98,41.331,0 12.924,41.379,0 12.894,41.399,0 12.863,41.413,0 12.842,41.418,0 12.764,41.421,0 12.749,41.423,0 12.679,41.458,0 12.655,41.465,0 12.643,41.458,0 12.636,41.447,0 12.62,41.459,0 12.546,41.544,0 12.449,41.63,0 12.343,41.702,0 12.328,41.711,0 12.301,41.717,0 12.286,41.727,0 12.277,41.729,0 12.247,41.733,0 12.24,41.736,0 12.224,41.75,0 12.216,41.768,0 12.212,41.787,0 12.212,41.808,0 12.207,41.827,0 12.195,41.847,0 12.171,41.879,0 12.148,41.903,0 12.05,41.96,0 12.039,41.965,0 12.03,41.973,0 12.027,41.986,0 12.021,41.993,0 11.993,41.996,0 11.983,42,0 11.97,42.011,0 11.953,42.022,0 11.935,42.031,0 11.917,42.038,0 11.84,42.036,0 11.828,42.034,0 11.823,42.047,0 11.81,42.066,0 11.794,42.084,0 11.78,42.092,0 11.772,42.106,0 11.751,42.128,0 11.746,42.136,0 11.744,42.152,0 11.737,42.169,0 11.683,42.252,0 11.659,42.279,0 11.54,42.349,0 11.49,42.359,0 11.421,42.386,0 11.397,42.393,0 11.397,42.4,0 11.387,42.404,0 11.377,42.407,0 11.366,42.408,0 11.355,42.407,0 11.363,42.4,0 11.334,42.4,0 11.26,42.421,0 11.246,42.422,0 11.228,42.422,0 11.212,42.419,0 11.205,42.411,0 11.201,42.395,0 11.187,42.379,0 11.185,42.366,0 11.175,42.369,0 11.165,42.369,0 11.158,42.368,0 11.157,42.366,0 11.148,42.371,0 11.135,42.384,0 11.107,42.391,0 11.095,42.402,0 11.087,42.418,0 11.081,42.435,0 11.1,42.443,0 11.123,42.446,0 11.167,42.448,0 11.175,42.458,0 11.184,42.48,0 11.19,42.504,0 11.188,42.521,0 11.167,42.546,0 11.159,42.564,0 11.149,42.563,0 11.138,42.559,0 11.129,42.558,0 11.117,42.572,0 11.108,42.591,0 11.098,42.607,0 11.081,42.612,0 11.078,42.632,0 11.054,42.647,0 11.006,42.668,0 11.001,42.68,0 10.996,42.696,0 10.99,42.71,0 10.982,42.716,0 10.973,42.72,0 10.944,42.743,0 10.891,42.764,0 10.732,42.804,0 10.756,42.819,0 10.766,42.835,0 10.767,42.854,0 10.766,42.877,0 10.769,42.884,0 10.775,42.888,0 10.778,42.894,0 10.774,42.908,0 10.764,42.918,0 10.751,42.925,0 10.682,42.949,0 10.633,42.958,0 10.584,42.959,0 10.54,42.949,0 10.544,42.939,0 10.547,42.935,0 10.519,42.925,0 10.5,42.94,0 10.478,42.99,0 10.503,43.005,0 10.518,43.024,0 10.54,43.079,0 10.536,43.091,0 10.536,43.112,0 10.54,43.134,0 10.547,43.147,0 10.539,43.164,0 10.535,43.185,0 10.533,43.226,0 10.529,43.246,0 10.517,43.267,0 10.438,43.388,0 10.374,43.453,0 10.36,43.465,0 10.327,43.477,0 10.318,43.492,0 10.295,43.568,0 10.265,43.809,0 10.252,43.846,0 10.211,43.92,0 10.181,43.955,0 10.137,43.978,0 10.106,44.016,0 10.091,44.025,0 10.073,44.029,0 10.036,44.048,0 10.015,44.052,0 9.999,44.058,0 9.989,44.06,0 9.985,44.055,0 9.981,44.05,0 9.973,44.045,0 9.963,44.044,0 9.954,44.048,0 9.938,44.06,0 9.905,44.08,0 9.888,44.093,0 9.877,44.088,0 9.845,44.108,0 9.827,44.107,0 9.834,44.1,0 9.829,44.098,0 9.825,44.095,0 9.82,44.093,0 9.825,44.085,0 9.831,44.079,0 9.839,44.075,0 9.848,44.072,0 9.848,44.066,0 9.842,44.063,0 9.839,44.06,0 9.834,44.052,0 9.847,44.046,0 9.843,44.041,0 9.833,44.042,0 9.827,44.055,0 9.82,44.063,0 9.772,44.079,0 9.722,44.113,0 9.71,44.118,0 9.683,44.136,0 9.673,44.141,0 9.644,44.142,0 9.632,44.144,0 9.622,44.148,0 9.587,44.178,0 9.581,44.179,0 9.573,44.191,0 9.557,44.2,0 9.512,44.215,0 9.5,44.222,0 9.49,44.231,0 9.485,44.244,0 9.473,44.24,0 9.454,44.237,0 9.437,44.239,0 9.43,44.247,0 9.423,44.257,0 9.375,44.272,0 9.368,44.294,0 9.263,44.336,0 9.231,44.353,0 9.222,44.344,0 9.214,44.333,0 9.21,44.321,0 9.211,44.305,0 9.166,44.318,0 9.147,44.328,0 9.149,44.34,0 9.131,44.363,0 9.103,44.374,0 9.002,44.387,0 8.953,44.4,0 8.924,44.411,0 8.915,44.409,0 8.869,44.409,0 8.846,44.413,0 8.838,44.417,0 8.828,44.428,0 8.763,44.432,0 8.738,44.429,0 8.725,44.424,0 8.696,44.406,0 8.686,44.398,0 8.679,44.394,0 8.671,44.394,0 8.663,44.395,0 8.656,44.394,0 8.594,44.363,0 8.577,44.36,0 8.565,44.357,0 8.541,44.34,0 8.467,44.304,0 8.445,44.284,0 8.45,44.264,0 8.44,44.253,0 8.437,44.247,0 8.436,44.24,0 8.433,44.238,0 8.418,44.23,0 8.412,44.227,0 8.407,44.215,0 8.409,44.204,0 8.409,44.193,0 8.395,44.182,0 8.37,44.173,0 8.314,44.16,0 8.285,44.148,0 8.27,44.138,0 8.257,44.128,0 8.234,44.103,0 8.231,44.096,0 8.232,44.08,0 8.231,44.072,0 8.224,44.057,0 8.217,44.045,0 8.17,44.006,0 8.153,43.983,0 8.168,43.962,0 8.168,43.956,0 8.145,43.952,0 8.116,43.927,0 8.09,43.92,0 8.082,43.915,0 8.076,43.909,0 8.073,43.904,0 8.068,43.896,0 8.056,43.892,0 8.032,43.887,0 7.96,43.853,0 7.786,43.822,0 7.737,43.798,0 7.695,43.791,0 7.573,43.791,0 7.545,43.784,0 7.532,43.784,0 7.524,43.789,0 7.513,43.792,0 7.503,43.792,0 7.483,43.84,0 7.478,43.866,0 7.493,43.886,0 7.537,43.921,0 7.557,43.944,0 7.609,43.976,0 7.631,43.994,0 7.639,44.005,0 7.647,44.027,0 7.653,44.04,0 7.664,44.049,0 7.679,44.057,0 7.69,44.067,0 7.692,44.085,0 7.676,44.109,0 7.654,44.125,0 7.642,44.144,0 7.656,44.176,0 7.625,44.18,0 7.584,44.161,0 7.555,44.159,0 7.381,44.123,0 7.341,44.124,0 7.331,44.125,0 7.322,44.132,0 7.316,44.14,0 7.309,44.147,0 7.296,44.151,0 7.27,44.154,0 7.251,44.16,0 7.145,44.207,0 7.105,44.218,0 7.046,44.24,0 7.033,44.243,0 7.02,44.242,0 7.008,44.239,0 6.996,44.238,0 6.983,44.242,0 6.973,44.249,0 6.969,44.258,0 6.966,44.268,0 6.959,44.277,0 6.95,44.285,0 6.93,44.295,0 6.921,44.302,0 6.916,44.31,0 6.904,44.33,0 6.896,44.34,0 6.874,44.358,0 6.87,44.363,0 6.866,44.372,0 6.866,44.377,0 6.869,44.383,0 6.877,44.414,0 6.884,44.423,0 6.918,44.436,0 6.892,44.452,0 6.861,44.475,0 6.839,44.503,0 6.836,44.534,0 6.846,44.547,0 6.897,44.575,0 6.932,44.618,0 6.946,44.625,0 6.934,44.647,0 6.941,44.667,0 6.96,44.683,0 6.983,44.692,0 7.001,44.692,0 7.037,44.685,0 7.055,44.685,0 7.049,44.698,0 7.019,44.739,0 7.015,44.747,0 7.01,44.772,0 6.998,44.794,0 6.999,44.795,0 7.004,44.811,0 7.006,44.812,0 7.006,44.816,0 7.007,44.819,0 7.007,44.822,0 7.005,44.828,0 7.001,44.833,0 6.983,44.847,0 6.933,44.862,0 6.915,44.863,0 6.866,44.856,0 6.847,44.859,0 6.778,44.888,0 6.745,44.908,0 6.728,44.929,0 6.73,44.985,0 6.723,45.013,0 6.697,45.027,0 6.662,45.029,0 6.652,45.036,0 6.64,45.05,0 6.637,45.059,0 6.638,45.067,0 6.637,45.074,0 6.62,45.084,0 6.603,45.103,0 6.615,45.115,0 6.633,45.126,0 6.667,45.14,0 6.676,45.141,0 6.694,45.14,0 6.702,45.141,0 6.711,45.145,0 6.729,45.155,0 6.736,45.157,0 6.771,45.153,0 6.808,45.139,0 6.844,45.13,0 6.877,45.141,0 6.879,45.147,0 6.873,45.152,0 6.868,45.157,0 6.873,45.166,0 6.881,45.168,0 6.905,45.169,0 6.914,45.17,0 6.928,45.18,0 6.946,45.201,0 6.959,45.21,0 6.994,45.221,0 7.03,45.228,0 7.038,45.226,0 7.05,45.215,0 7.055,45.214,0 7.062,45.219,0 7.081,45.243,0 7.108,45.259,0 7.108,45.275,0 7.098,45.295,0 7.093,45.324,0 7.098,45.33,0 7.13,45.357,0 7.151,45.383,0 7.16,45.398,0 7.161,45.411,0 7.153,45.415,0 7.11,45.428,0 7.097,45.435,0 7.089,45.447,0 7.082,45.459,0 7.072,45.47,0 7.028,45.493,0 6.983,45.511,0 6.975,45.526,0 6.97,45.567,0 6.966,45.574,0 6.955,45.586,0 6.953,45.594,0 6.956,45.603,0 6.967,45.62,0 6.969,45.626,0 6.963,45.641,0 6.951,45.647,0 6.919,45.653,0 6.905,45.66,0 6.883,45.676,0 6.869,45.679,0 6.843,45.683,0 6.816,45.697,0 6.796,45.718,0 6.785,45.76,0 6.782,45.777,0 6.783,45.795,0 6.788,45.812,0 6.801,45.826,0 6.816,45.833,0 6.846,45.836,0 6.846,45.838,0 6.849,45.842,0 6.853,45.847,0 6.858,45.849,0 6.862,45.849,0 6.87,45.845,0 6.873,45.845,0 6.88,45.846,0 6.905,45.845,0 6.926,45.85,0 6.949,45.858,0 6.969,45.87,0 6.983,45.886,0 6.989,45.899,0 6.997,45.911,0 7.008,45.921,0 7.022,45.925,0 7.067,45.89,0 7.09,45.881,0 7.121,45.876,0 7.154,45.877,0 7.184,45.88,0 7.245,45.898,0 7.274,45.91,0 7.287,45.913,0 7.362,45.908,0 7.394,45.916,0 7.453,45.946,0 7.483,45.955,0 7.504,45.957,0 7.515,45.967,0 7.524,45.978,0 7.541,45.984,0 7.643,45.966,0 7.659,45.96,0 7.674,45.95,0 7.693,45.931,0 7.694,45.929,0 7.706,45.926,0 7.715,45.927,0 7.722,45.93,0 7.732,45.93,0 7.78,45.918,0 7.808,45.918,0 7.825,45.915,0 7.831,45.914,0 7.844,45.919,0 7.846,45.923,0 7.845,45.928,0 7.848,45.938,0 7.872,45.969,0 7.898,45.982,0 7.969,45.993,0 7.979,45.995,0 7.986,45.999,0 7.998,46.011,0 7.999,46.013,0 8.009,46.028,0 8.011,46.03,0 8.016,46.058,0 8.016,46.069,0 8.018,46.081,0 8.025,46.091,0 8.035,46.097,0 8.056,46.098,0 8.067,46.101,0 8.111,46.127,0 8.132,46.159,0 8.13,46.196,0 8.1,46.236,0 8.077,46.25,0 8.073,46.254,0 8.077,46.262,0 8.087,46.272,0 8.107,46.286,0 8.128,46.292,0 8.172,46.299,0 8.193,46.309,0 8.242,46.354,0 8.27,46.364,0 8.282,46.37,0 8.291,46.378,0 8.297,46.388,0 8.297,46.398,0 8.29,46.401,0 8.287,46.405,0 8.295,46.418,0 8.316,46.434,0 8.343,46.444,0 8.399,46.452,0 8.428,46.449,0 8.442,46.435,0 8.446,46.412,0 8.446,46.382,0 8.443,46.353,0 8.427,46.302,0 8.423,46.276,0 8.427,46.251,0 8.438,46.235,0 8.457,46.225,0 8.483,46.218,0 8.51,46.208,0 8.539,46.188,0 8.602,46.123,0 8.612,46.119,0 8.631,46.115,0 8.677,46.096,0 8.695,46.095,0 8.702,46.098,0 8.718,46.108,0 8.724,46.11,0 8.732,46.107,0 8.739,46.098,0 8.747,46.094,0 8.763,46.093,0 8.794,46.093,0 8.809,46.09,0 8.834,46.066,0 8.82,46.043,0 8.791,46.019,0 8.773,45.991,0 8.77,45.986,0 8.768,45.983,0 8.785,45.982,0 8.8,45.979,0 8.858,45.957,0 8.864,45.953,0 8.871,45.947,0 8.881,45.931,0 8.898,45.91,0 8.907,45.896,0 8.912,45.883,0 8.914,45.866,0 8.91,45.854,0 8.904,45.842,0 8.9,45.826,0 8.94,45.835,0 8.972,45.825,0 9.002,45.821,0 9.034,45.848,0 9.059,45.882,0 9.063,45.899,0 9.052,45.916,0 9.042,45.92,0 9.021,45.923,0 9.011,45.927,0 9.002,45.936,0 8.993,45.954,0 8.983,45.962,0 8.981,45.964,0 8.98,45.967,0 8.981,45.969,0 8.983,45.972,0 9.016,45.993,0 8.998,46.028,0 9.002,46.039,0 9.028,46.053,0 9.05,46.058,0 9.059,46.062,0 9.067,46.071,0 9.07,46.083,0 9.068,46.106,0 9.072,46.119,0 9.091,46.138,0 9.163,46.172,0 9.171,46.183,0 9.176,46.194,0 9.181,46.204,0 9.192,46.21,0 9.204,46.214,0 9.216,46.221,0 9.225,46.231,0 9.24,46.267,0 9.269,46.309,0 9.275,46.331,0 9.274,46.344,0 9.26,46.38,0 9.26,46.394,0 9.263,46.407,0 9.261,46.417,0 9.248,46.423,0 9.238,46.437,0 9.246,46.461,0 9.263,46.485,0 9.282,46.497,0 9.331,46.502,0 9.351,46.498,0 9.352,46.485,0 9.377,46.469,0 9.385,46.466,0 9.395,46.469,0 9.4,46.475,0 9.404,46.483,0 9.411,46.489,0 9.427,46.497,0 9.435,46.498,0 9.438,46.492,0 9.444,46.396,0 9.442,46.381,0 9.444,46.375,0 9.452,46.37,0 9.474,46.362,0 9.483,46.357,0 9.503,46.321,0 9.515,46.309,0 9.536,46.299,0 9.56,46.293,0 9.674,46.292,0 9.693,46.297,0 9.708,46.312,0 9.709,46.32,0 9.707,46.331,0 9.709,46.342,0 9.72,46.351,0 9.731,46.351,0 9.755,46.341,0 9.768,46.339,0 9.789,46.343,0 9.855,46.367,0 9.899,46.372,0 9.918,46.371,0 9.939,46.367,0 9.964,46.356,0 9.971,46.34,0 9.971,46.32,0 9.978,46.298,0 9.992,46.284,0 10.032,46.26,0 10.042,46.243,0 10.043,46.22,0 10.076,46.22,0 10.118,46.231,0 10.146,46.243,0 10.159,46.262,0 10.146,46.28,0 10.105,46.309,0 10.096,46.321,0 10.092,46.329,0 10.092,46.338,0 10.097,46.352,0 10.105,46.361,0 10.126,46.374,0 10.133,46.381,0 10.141,46.403,0 10.133,46.414,0 10.116,46.419,0 10.071,46.425,0 10.042,46.433,0 10.026,46.446,0 10.044,46.467,0 10.035,46.471,0 10.03,46.477,0 10.028,46.484,0 10.027,46.493,0 10.031,46.504,0 10.031,46.526,0 10.033,46.533,0 10.041,46.542,0 10.063,46.557,0 10.071,46.564,0 10.083,46.597,0 10.088,46.604,0 10.097,46.608,0 10.192,46.627,0 10.218,46.627,0 10.234,46.618,0 10.236,46.607,0 10.23,46.586,0 10.235,46.575,0 10.276,46.566,0 10.284,46.561,0 10.289,46.556,0 10.295,46.551,0 10.307,46.547,0 10.319,46.546,0 10.354,46.548,0 10.426,46.535,0 10.444,46.538,0 10.458,46.554,0 10.466,46.578,0 10.467,46.604,0 10.459,46.624,0 10.438,46.636,0 10.396,46.639,0 10.378,46.653,0 10.369,46.672,0 10.374,46.682,0 10.385,46.689,0 10.394,46.701,0 10.397,46.715,0 10.396,46.726,0 10.4,46.736,0 10.417,46.743,0 10.429,46.756,0 10.426,46.769,0 10.419,46.784,0 10.417,46.799,0 10.439,46.817,0 10.445,46.823,0 10.449,46.832,0 10.454,46.864,0 10.486,46.846,0 10.528,46.843,0 10.629,46.862,0 10.647,46.864,0 10.662,46.861,0 10.739,46.83,0 10.749,46.819,0 10.744,46.813,0 10.722,46.8,0 10.717,46.795,0 10.723,46.786,0 10.734,46.786,0 10.755,46.791,0 10.766,46.788,0 10.795,46.777,0 10.805,46.777,0 10.824,46.78,0 10.834,46.78,0 10.843,46.777,0 10.86,46.767,0 10.87,46.764,0 10.88,46.765,0 10.914,46.772,0 10.931,46.774,0 10.966,46.772,0 10.983,46.768,0 10.997,46.769,0 11.011,46.779,0 11.033,46.806,0 11.037,46.808,0 11.049,46.812,0 11.053,46.815,0 11.055,46.82,0 11.053,46.83,0 11.054,46.834,0 11.073,46.865,0 11.084,46.9,0 11.092,46.912,0 11.157,46.957,0 11.174,46.964,0 11.244,46.979,0 11.314,46.987,0 11.349,46.982,0 11.381,46.972,0 11.411,46.97,0 11.445,46.993,0 11.445,46.993,0 11.453,47.001,0 11.462,47.006,0 11.472,47.007,0 11.489,47.004,0 11.496,47.002,0 11.502,46.998,0 11.507,46.993,0 11.515,46.989,0 11.524,46.988,0 11.534,46.99,0 11.543,46.993,0 11.543,46.993,0 11.544,46.993,0 11.544,46.993,0 11.573,46.999,0 11.596,47,0 11.648,46.993,0 11.648,46.993,0 11.65,46.993,0 11.657,46.993,0 11.665,46.993,0 11.684,46.992,0 11.716,46.975,0 11.735,46.971,0 11.746,46.972,0 11.766,46.983,0 11.777,46.988,0 11.823,46.993,0 11.857,47.012,0 11.9,47.028,0 11.944,47.038,0 12.015,47.04,0 12.116,47.077,0 12.181,47.085,0 12.204,47.08,0 12.204,47.053,0 12.182,47.034,0 12.122,47.011,0 12.111,46.993,0 12.118,46.983,0 12.122,46.972,0 </coordinates></LinearRing></outerBoundaryIs><innerBoundaryIs><LinearRing><coordinates>12.4,43.903,0 12.429,43.892,0 12.461,43.895,0 12.479,43.917,0 12.478,43.92,0 12.478,43.923,0 12.48,43.926,0 12.483,43.929,0 12.49,43.939,0 12.492,43.956,0 12.489,43.973,0 12.482,43.983,0 12.453,43.979,0 12.421,43.967,0 12.396,43.948,0 12.386,43.925,0 12.4,43.903,0 </coordinates></LinearRing></innerBoundaryIs><innerBoundaryIs><LinearRing><coordinates>12.444,41.902,0 12.449,41.9,0 12.455,41.9,0 12.458,41.902,0 12.455,41.908,0 12.447,41.907,0 12.444,41.902,0 </coordinates></LinearRing></innerBoundaryIs></Polygon></MultiGeometry>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(k.features())[0].geometry, MultiPolygon))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_atom(self):
pass
def test_schema(self):
doc = """<Schema name="TrailHeadType" id="TrailHeadTypeId">
<SimpleField type="string" name="TrailHeadName">
<displayName><![CDATA[<b>Trail Head Name</b>]]></displayName>
</SimpleField>
<SimpleField type="double" name="TrailLength">
<displayName><![CDATA[<i>The length in miles</i>]]></displayName>
</SimpleField>
<SimpleField type="int" name="ElevationGain">
<displayName><![CDATA[<i>change in altitude</i>]]></displayName>
</SimpleField>
</Schema> """
s = kml.Schema(ns='', id='default')
s.from_string(doc)
self.assertEqual(len(list(s.simple_fields)), 3)
self.assertEqual(list(s.simple_fields)[0]['type'], 'string')
self.assertEqual(list(s.simple_fields)[1]['type'], 'double')
self.assertEqual(list(s.simple_fields)[2]['type'], 'int')
self.assertEqual(list(s.simple_fields)[0]['name'], 'TrailHeadName')
self.assertEqual(list(s.simple_fields)[1]['name'], 'TrailLength')
self.assertEqual(list(s.simple_fields)[2]['name'], 'ElevationGain')
self.assertEqual(list(s.simple_fields)[0][
'displayName'
], '<b>Trail Head Name</b>')
self.assertEqual(list(s.simple_fields)[1][
'displayName'
], '<i>The length in miles</i>')
self.assertEqual(list(s.simple_fields)[2][
'displayName'
], '<i>change in altitude</i>')
s1 = kml.Schema(ns='', id='default')
s1.from_string(s.to_string())
self.assertEqual(len(list(s1.simple_fields)), 3)
self.assertEqual(list(s1.simple_fields)[0]['type'], 'string')
self.assertEqual(list(s1.simple_fields)[1]['name'], 'TrailLength')
self.assertEqual(list(s1.simple_fields)[2][
'displayName'
], '<i>change in altitude</i>')
self.assertEqual(s.to_string(), s1.to_string())
doc1 = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
%s
</Document>
</kml>""" % doc
k = kml.KML()
k.from_string(doc1)
d = list(k.features())[0]
s2 = list(d.schemata())[0]
s.ns = config.NS
self.assertEqual(s.to_string(), s2.to_string())
k1 = kml.KML()
k1.from_string(k.to_string())
self.assertTrue('Schema' in k1.to_string())
self.assertTrue('SimpleField' in k1.to_string())
self.assertEqual(k1.to_string(), k.to_string())
def test_schema_data(self):
doc = """<SchemaData schemaUrl="#TrailHeadTypeId">
<SimpleData name="TrailHeadName">Pi in the sky</SimpleData>
<SimpleData name="TrailLength">3.14159</SimpleData>
<SimpleData name="ElevationGain">10</SimpleData>
</SchemaData>"""
sd = kml.SchemaData(ns='', schema_url='#default')
sd.from_string(doc)
self.assertEqual(sd.schema_url, '#TrailHeadTypeId')
self.assertEqual(
sd.data[0], {'name': 'TrailHeadName',
'value': 'Pi in the sky'})
self.assertEqual(
sd.data[1], {'name': 'TrailLength',
'value': '3.14159'})
self.assertEqual(sd.data[2], {'name': 'ElevationGain', 'value': '10'})
sd1 = kml.SchemaData(ns='', schema_url='#default')
sd1.from_string(sd.to_string())
self.assertEqual(sd1.schema_url, '#TrailHeadTypeId')
self.assertEqual(sd.to_string(), sd1.to_string())
def test_snippet(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<Snippet maxLines="2" >Short Desc</Snippet>
</Placemark> </kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(list(k.features())[0].snippet['text'], 'Short Desc')
self.assertEqual(list(k.features())[0].snippet['maxLines'], 2)
list(k.features())[0]._snippet['maxLines'] = 3
self.assertEqual(list(k.features())[0].snippet['maxLines'], 3)
self.assertTrue('maxLines="3"' in k.to_string())
list(k.features())[0].snippet = {'text': 'Annother Snippet'}
self.assertFalse('maxLines' in k.to_string())
self.assertTrue('Annother Snippet' in k.to_string())
list(k.features())[0].snippet = 'Diffrent Snippet'
self.assertFalse('maxLines' in k.to_string())
self.assertTrue('Diffrent Snippet' in k.to_string())
def test_from_wrong_string(self):
doc = kml.KML()
self.assertRaises(TypeError, doc.from_string, '<xml></xml>')
def test_address(self):
doc = kml.Document()
doc.from_string("""
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2" id="pm-id">
<kml:name>pm-name</kml:name>
<kml:description>pm-description</kml:description>
<kml:visibility>1</kml:visibility>
<kml:address>1600 Amphitheatre Parkway, Mountain View, CA 94043, USA</kml:address>
</kml:Document>
""")
doc2 = kml.Document()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_phone_number(self):
doc = kml.Document()
doc.from_string("""
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2" id="pm-id">
<kml:name>pm-name</kml:name>
<kml:description>pm-description</kml:description>
<kml:visibility>1</kml:visibility>
<kml:phoneNumber>+1 234 567 8901</kml:phoneNumber>
</kml:Document>
""")
doc2 = kml.Document()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_groundoverlay(self):
doc = kml.KML()
doc.from_string(
"""
<kml xmlns="http://www.opengis.net/kml/2.2">
<Folder>
<name>Ground Overlays</name>
<description>Examples of ground overlays</description>
<GroundOverlay>
<name>Large-scale overlay on terrain</name>
<description>Overlay shows Mount Etna erupting
on July 13th, 2001.</description>
<Icon>
<href>http://developers.google.com/kml/documentation/images/etna.jpg</href>
</Icon>
<LatLonBox>
<north>37.91904192681665</north>
<south>37.46543388598137</south>
<east>15.35832653742206</east>
<west>14.60128369746704</west>
<rotation>-0.1556640799496235</rotation>
</LatLonBox>
</GroundOverlay>
</Folder>
</kml>
""")
doc2 = kml.KML()
doc2.from_string(doc.to_string())
self.assertEqual(doc.to_string(), doc2.to_string())
def test_linarring_placemark(self):
doc = kml.KML()
doc.from_string( """<kml xmlns="http://www.opengis.net/kml/2.2">
<Placemark>
<LinearRing>
<coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,0.0</coordinates>
</LinearRing>
</Placemark> </kml>""")
doc2 = kml.KML()
doc2.from_string(doc.to_string())
self.assertTrue(
isinstance(list(doc.features())[0].geometry, LinearRing))
self.assertEqual(doc.to_string(), doc2.to_string())
class StyleTestCase(unittest.TestCase):
def test_styleurl(self):
f = kml.Document()
f.styleUrl = '#somestyle'
self.assertEqual(f.styleUrl, '#somestyle')
self.assertTrue(isinstance(f._styleUrl, styles.StyleUrl))
s = styles.StyleUrl(config.NS, url='#otherstyle')
f.styleUrl = s
self.assertTrue(isinstance(f._styleUrl, styles.StyleUrl))
self.assertEqual(f.styleUrl, '#otherstyle')
f2 = kml.Document()
f2.from_string(f.to_string())
self.assertEqual(f.to_string(), f2.to_string())
def test_style(self):
lstyle = styles.LineStyle(color='red', width=2.0)
style = styles.Style(styles=[lstyle])
f = kml.Document(styles=[style])
f2 = kml.Document()
f2.from_string(f.to_string(prettyprint=True))
self.assertEqual(f.to_string(), f2.to_string())
def test_polystyle_fill(self):
style = styles.PolyStyle()
def test_polystyle_outline(self):
style = styles.PolyStyle()
class StyleUsageTestCase(unittest.TestCase):
def test_create_document_style(self):
style = styles.Style(styles=[styles.PolyStyle(color='7f000000')])
doc = kml.Document(styles=[style])
doc2 = kml.Document()
doc2.append_style(style)
expected = """
<kml:Document xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:visibility>1</kml:visibility>
<kml:Style>
<kml:PolyStyle>
<kml:color>7f000000</kml:color>
<kml:fill>1</kml:fill>
<kml:outline>1</kml:outline>
</kml:PolyStyle>
</kml:Style>
</kml:Document>
"""
doc3 = kml.Document()
doc3.from_string(expected)
self.assertEqual(doc.to_string(), doc2.to_string())
self.assertEqual(doc2.to_string(), doc3.to_string())
self.assertEqual(doc.to_string(), doc3.to_string())
def test_create_placemark_style(self):
style = styles.Style(styles=[styles.PolyStyle(color='7f000000')])
place = kml.Placemark(styles=[style])
place2 = kml.Placemark()
place2.append_style(style)
expected = """
<kml:Placemark xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:visibility>1</kml:visibility>
<kml:Style>
<kml:PolyStyle>
<kml:color>7f000000</kml:color>
<kml:fill>1</kml:fill>
<kml:outline>1</kml:outline>
</kml:PolyStyle>
</kml:Style>
</kml:Placemark>
"""
place3 = kml.Placemark()
place3.from_string(expected)
self.assertEqual(place.to_string(), place2.to_string())
self.assertEqual(place2.to_string(), place3.to_string())
self.assertEqual(place.to_string(), place3.to_string())
class StyleFromStringTestCase(unittest.TestCase):
def test_styleurl(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<styleUrl>#default</styleUrl>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertEqual(list(k.features())[0].styleUrl, '#default')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_balloonstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<Style id="exampleBalloonStyle">
<BalloonStyle>
<!-- a background color for the balloon -->
<bgColor>ffffffbb</bgColor>
<!-- styling of the balloon text -->
<textColor>ff000000</textColor>
<text><![CDATA[
<b><font color="#CC0000" size="+3">$[name]</font></b>
<br/><br/>
<font face="Courier">$[description]</font>
<br/><br/>
Extra text that will appear in the description balloon
<br/><br/>
<!-- insert the to/from hyperlinks -->
$[geDirections]
]]></text>
<!-- kml:displayModeEnum -->
<displayMode>default</displayMode>
</BalloonStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.BalloonStyle))
self.assertEqual(style.bgColor, 'ffffffbb')
self.assertEqual(style.textColor, 'ff000000')
self.assertEqual(style.displayMode, 'default')
self.assertTrue('$[geDirections]' in style.text)
self.assertTrue('$[description]' in style.text)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k2.to_string(), k.to_string())
def test_balloonstyle_old_color(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<Style id="exampleBalloonStyle">
<BalloonStyle>
<!-- a background color for the balloon -->
<color>ffffffbb</color>
</BalloonStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.BalloonStyle))
self.assertEqual(style.bgColor, 'ffffffbb')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k2.to_string(), k.to_string())
def test_labelstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.LabelStyle))
self.assertEqual(style.color, 'ff0000cc')
self.assertEqual(style.colorMode, None)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_iconstyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<Style id="randomColorIcon">
<IconStyle>
<color>ff00ff00</color>
<colorMode>random</colorMode>
<scale>1.1</scale>
<heading>0</heading>
<Icon>
<href>http://maps.google.com/icon21.png</href>
</Icon>
</IconStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list((k.features()))), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.IconStyle))
self.assertEqual(style.color, 'ff00ff00')
self.assertEqual(style.scale, 1.1)
self.assertEqual(style.colorMode, 'random')
self.assertEqual(style.heading, 0.0)
self.assertEqual(style.icon_href, 'http://maps.google.com/icon21.png')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_linestyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>LineStyle.kml</name>
<open>1</open>
<Style id="linestyleExample">
<LineStyle>
<color>7f0000ff</color>
<width>4</width>
</LineStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.LineStyle))
self.assertEqual(style.color, '7f0000ff')
self.assertEqual(style.width, 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<color>ff0000cc</color>
<colorMode>random</colorMode>
</PolyStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.color, 'ff0000cc')
self.assertEqual(style.colorMode, 'random')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle_float_fill(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<fill>0.0</fill>
</PolyStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.fill, 0)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_polystyle_float_outline(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>PolygonStyle.kml</name>
<open>1</open>
<Style id="examplePolyStyle">
<PolyStyle>
<outline>0.0</outline>
</PolyStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
style = list(list(list(k.features())[0].styles())[0].styles())[0]
self.assertTrue(isinstance(style, styles.PolyStyle))
self.assertEqual(style.outline, 0)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_styles(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<!-- Begin Style Definitions -->
<Style id="myDefaultStyles">
<IconStyle>
<color>a1ff00ff</color>
<scale>1.399999976158142</scale>
<Icon>
<href>http://myserver.com/icon.jpg</href>
</Icon>
</IconStyle>
<LabelStyle>
<color>7fffaaff</color>
<scale>1.5</scale>
</LabelStyle>
<LineStyle>
<color>ff0000ff</color>
<width>15</width>
</LineStyle>
<PolyStyle>
<color>7f7faaaa</color>
<colorMode>random</colorMode>
</PolyStyle>
</Style>
<!-- End Style Definitions -->
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(list(list(k.features())[0].styles())[0], styles.Style))
style = list(list(list(k.features())[0].styles())[0].styles())
self.assertEqual(len(style), 4)
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_stylemapurl(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<styleUrl>#normalState</styleUrl>
</Pair>
<Pair>
<key>highlight</key>
<styleUrl>#highlightState</styleUrl>
</Pair>
</StyleMap>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(
list(list(k.features())[0].styles())[0], styles.StyleMap))
sm = list(list(list(k.features())[0].styles()))[0]
self.assertTrue(isinstance(sm.normal, styles.StyleUrl))
self.assertEqual(sm.normal.url, '#normalState')
self.assertTrue(isinstance(sm.highlight, styles.StyleUrl))
self.assertEqual(sm.highlight.url, '#highlightState')
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_stylemapstyles(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
</Pair>
<Pair>
<key>highlight</key>
<Style id="examplePolyStyle">
<PolyStyle>
<color>ff0000cc</color>
<colorMode>random</colorMode>
</PolyStyle>
<LineStyle>
<color>ff0000ff</color>
<width>15</width>
</LineStyle>
</Style>
</Pair>
</StyleMap>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
self.assertTrue(
isinstance(
list(list(k.features())[0].styles())[0], styles.StyleMap))
sm = list(list(list(k.features())[0].styles()))[0]
self.assertTrue(isinstance(sm.normal, styles.Style))
self.assertEqual(len(list(sm.normal.styles())), 1)
self.assertTrue(
isinstance(list(sm.normal.styles())[0], styles.LabelStyle))
self.assertTrue(isinstance(sm.highlight, styles.Style))
self.assertTrue(isinstance(sm.highlight, styles.Style))
self.assertEqual(len(list(sm.highlight.styles())), 2)
self.assertTrue(
isinstance(list(sm.highlight.styles())[0], styles.LineStyle))
self.assertTrue(
isinstance(list(sm.highlight.styles())[1], styles.PolyStyle))
k2 = kml.KML()
k2.from_string(k.to_string())
self.assertEqual(k.to_string(), k2.to_string())
def test_get_style_by_url(self):
doc = """<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>Document.kml</name>
<open>1</open>
<Style id="exampleStyleDocument">
<LabelStyle>
<color>ff0000cc</color>
</LabelStyle>
</Style>
<StyleMap id="styleMapExample">
<Pair>
<key>normal</key>
<styleUrl>#normalState</styleUrl>
</Pair>
<Pair>
<key>highlight</key>
<styleUrl>#highlightState</styleUrl>
</Pair>
</StyleMap>
<Style id="linestyleExample">
<LineStyle>
<color>7f0000ff</color>
<width>4</width>
</LineStyle>
</Style>
</Document>
</kml>"""
k = kml.KML()
k.from_string(doc)
self.assertEqual(len(list(k.features())), 1)
document = list(k.features())[0]
style = document.get_style_by_url(
'http://localhost:8080/somepath#exampleStyleDocument')
self.assertTrue(isinstance(list(style.styles())[0], styles.LabelStyle))
style = document.get_style_by_url('somepath#linestyleExample')
self.assertTrue(isinstance(list(style.styles())[0], styles.LineStyle))
style = document.get_style_by_url('#styleMapExample')
self.assertTrue(isinstance(style, styles.StyleMap))
class DateTimeTestCase(unittest.TestCase):
def test_timestamp(self):
now = datetime.datetime.now()
ts = kml.TimeStamp(timestamp=now)
self.assertEqual(ts.timestamp, [now, 'dateTime'])
self.assertTrue('TimeStamp>' in str(ts.to_string()))
self.assertTrue('when>' in str(ts.to_string()))
self.assertTrue(now.isoformat() in str(ts.to_string()))
y2k = datetime.date(2000, 1, 1)
ts = kml.TimeStamp(timestamp=y2k)
self.assertEqual(ts.timestamp, [y2k, 'date'])
self.assertTrue('2000-01-01' in str(ts.to_string()))
def test_timestamp_resolution(self):
now = datetime.datetime.now()
ts = kml.TimeStamp(timestamp=now)
self.assertTrue(now.isoformat() in str(ts.to_string()))
ts.timestamp[1] = 'date'
self.assertTrue(now.date().isoformat() in str(ts.to_string()))
self.assertFalse(now.isoformat() in str(ts.to_string()))
year = str(now.year)
ym = now.strftime('%Y-%m')
ts.timestamp[1] = 'gYearMonth'
self.assertTrue(ym in str(ts.to_string()))
self.assertFalse(now.date().isoformat() in str(ts.to_string()))
ts.timestamp[1] = 'gYear'
self.assertTrue(year in str(ts.to_string()))
self.assertFalse(ym in str(ts.to_string()))
ts.timestamp = None
self.assertRaises(TypeError, ts.to_string)
def test_timespan(self):
now = datetime.datetime.now()
y2k = datetime.datetime(2000, 1, 1)
ts = kml.TimeSpan(end=now, begin=y2k)
self.assertEqual(ts.end, [now, 'dateTime'])
self.assertEqual(ts.begin, [y2k, 'dateTime'])
self.assertTrue('TimeSpan>' in str(ts.to_string()))
self.assertTrue('begin>' in str(ts.to_string()))
self.assertTrue('end>' in str(ts.to_string()))
self.assertTrue(now.isoformat() in str(ts.to_string()))
self.assertTrue(y2k.isoformat() in str(ts.to_string()))
ts.end = None
self.assertFalse(now.isoformat() in str(ts.to_string()))
self.assertTrue(y2k.isoformat() in str(ts.to_string()))
ts.begin = None
self.assertRaises(ValueError, ts.to_string)
def test_feature_timestamp(self):
now = datetime.datetime.now()
f = kml.Document()
f.timeStamp = now
self.assertEqual(f.timeStamp, now)
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('TimeStamp>' in str(f.to_string()))
self.assertTrue('when>' in str(f.to_string()))
f.timeStamp = now.date()
self.assertTrue(now.date().isoformat() in str(f.to_string()))
self.assertFalse(now.isoformat() in str(f.to_string()))
f.timeStamp = None
self.assertFalse('TimeStamp>' in str(f.to_string()))
def test_feature_timespan(self):
now = datetime.datetime.now()
y2k = datetime.date(2000, 1, 1)
f = kml.Document()
f.begin = y2k
f.end = now
self.assertEqual(f.begin, y2k)
self.assertEqual(f.end, now)
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
f.end = None
self.assertFalse(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertFalse('end>' in str(f.to_string()))
f.begin = None
self.assertFalse('TimeSpan>' in str(f.to_string()))
def test_feature_timespan_stamp(self):
now = datetime.datetime.now()
y2k = datetime.date(2000, 1, 1)
f = kml.Document()
f.begin = y2k
f.end = now
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertTrue('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
self.assertFalse('TimeStamp>' in str(f.to_string()))
self.assertFalse('when>' in str(f.to_string()))
f.timeStamp = now
self.assertTrue(now.isoformat() in str(f.to_string()))
self.assertTrue('TimeStamp>' in str(f.to_string()))
self.assertTrue('when>' in str(f.to_string()))
self.assertFalse('2000-01-01' in str(f.to_string()))
self.assertFalse('TimeSpan>' in str(f.to_string()))
self.assertFalse('begin>' in str(f.to_string()))
self.assertFalse('end>' in str(f.to_string()))
f.end = y2k
self.assertFalse(now.isoformat() in str(f.to_string()))
self.assertTrue('2000-01-01' in str(f.to_string()))
self.assertTrue('TimeSpan>' in str(f.to_string()))
self.assertFalse('begin>' in str(f.to_string()))
self.assertTrue('end>' in str(f.to_string()))
self.assertFalse('TimeStamp>' in str(f.to_string()))
self.assertFalse('when>' in str(f.to_string()))
ts = kml.TimeStamp(timestamp=now)
f._time_stamp = ts
self.assertRaises(ValueError, f.to_string)
def test_read_timestamp(self):
ts = kml.TimeStamp(ns='')
doc = """
<TimeStamp>
<when>1997</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYear')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 1, 1, 0, 0))
doc = """
<TimeStamp>
<when>1997-07</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYearMonth')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 7, 1, 0, 0))
doc = """
<TimeStamp>
<when>199808</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'gYearMonth')
self.assertEqual(ts.timestamp[0], datetime.datetime(1998, 8, 1, 0, 0))
doc = """
<TimeStamp>
<when>1997-07-16</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'date')
self.assertEqual(ts.timestamp[0], datetime.datetime(1997, 7, 16, 0, 0))
doc = """
<TimeStamp>
<when>1997-07-16T07:30:15Z</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'dateTime')
self.assertEqual(ts.timestamp[0], datetime.datetime(
1997, 7, 16, 7, 30, 15,
tzinfo=tzutc()))
doc = """
<TimeStamp>
<when>1997-07-16T10:30:15+03:00</when>
</TimeStamp>
"""
ts.from_string(doc)
self.assertEqual(ts.timestamp[1], 'dateTime')
self.assertEqual(ts.timestamp[0], datetime.datetime(
1997, 7, 16, 10, 30, 15,
tzinfo=tzoffset(None, 10800)))
def test_read_timespan(self):
ts = kml.TimeSpan(ns='')
doc = """
<TimeSpan>
<begin>1876-08-01</begin>
<end>1997-07-16T07:30:15Z</end>
</TimeSpan>
"""
ts.from_string(doc)
self.assertEqual(ts.begin[1], 'date')
self.assertEqual(ts.begin[0], datetime.datetime(1876, 8, 1, 0, 0))
self.assertEqual(ts.end[1], 'dateTime')
self.assertEqual(ts.end[0], datetime.datetime(
1997, 7, 16, 7, 30, 15,
tzinfo=tzutc()))
def test_featurefromstring(self):
d = kml.Document(ns='')
doc = """<Document>
<name>Document.kml</name>
<open>1</open>
<TimeStamp>
<when>1997-07-16T10:30:15+03:00</when>
</TimeStamp>
<TimeSpan>
<begin>1876-08-01</begin>
<end>1997-07-16T07:30:15Z</end>
</TimeSpan>
</Document>"""
d.from_string(doc)
class AtomTestCase(unittest.TestCase):
def test_author(self):
a = atom.Author(name="Christian Ledermann")
self.assertEqual(a.name, "Christian Ledermann")
a.uri = 'http://iwlearn.net'
a.email = 'christian@gmail.com'
self.assertTrue("Christian Ledermann" in str(a.to_string()))
self.assertTrue('http://iwlearn.net' in str(a.to_string()))
self.assertTrue('christian@gmail.com' in str(a.to_string()))
self.assertTrue('name>' in str(a.to_string()))
self.assertTrue('uri>' in str(a.to_string()))
self.assertTrue('email>' in str(a.to_string()))
a.email = 'christian'
self.assertFalse('email>' in str(a.to_string()))
a2 = atom.Author()
a2.from_string(a.to_string())
self.assertEqual(a.to_string(), a2.to_string())
def test_link(self):
l = atom.Link(href="http://localhost/", rel="alternate")
self.assertEqual(l.href, "http://localhost/")
self.assertEqual(l.rel, "alternate")
l.title = "Title"
l.type = "text/html"
l.hreflang = 'en'
l.length = "4096"
self.assertTrue('href="http://localhost/"' in str(l.to_string()))
self.assertTrue('rel="alternate"' in str(l.to_string()))
self.assertTrue('title="Title"' in str(l.to_string()))
self.assertTrue('hreflang="en"' in str(l.to_string()))
self.assertTrue('type="text/html"' in str(l.to_string()))
self.assertTrue('length="4096"' in str(l.to_string()))
self.assertTrue('link' in str(l.to_string()))
self.assertTrue('="http://www.w3.org/2005/Atom"' in str(l.to_string()))
l2 = atom.Link()
l2.from_string(l.to_string())
self.assertEqual(l.to_string(), l2.to_string())
l.href = None
self.assertRaises(ValueError, l.to_string)
class SetGeometryTestCase(unittest.TestCase):
def test_altitude_mode(self):
geom = Geometry()
geom.geometry = Point(0, 1)
self.assertEqual(geom.altitude_mode, None)
self.assertFalse('altitudeMode' in str(geom.to_string()))
geom.altitude_mode = 'unknown'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'clampToSeaFloor'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'relativeToSeaFloor'
self.assertRaises(AssertionError, geom.to_string)
geom.altitude_mode = 'clampToGround'
self.assertFalse('altitudeMode' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertTrue(
'altitudeMode>relativeToGround</' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertTrue('altitudeMode>absolute</' in str(geom.to_string()))
def test_extrude(self):
geom = Geometry()
self.assertEqual(geom.extrude, False)
geom.geometry = Point(0, 1)
geom.extrude = False
self.assertFalse('extrude' in str(geom.to_string()))
geom.extrude = True
geom.altitude_mode = 'clampToGround'
self.assertFalse('extrude' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertTrue('extrude>1</' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertTrue('extrude>1</' in str(geom.to_string()))
def test_tesselate(self):
geom = Geometry()
self.assertEqual(geom.tessellate, False)
geom.geometry = LineString([(0, 0), (1, 1)])
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'clampToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.tessellate = True
geom.altitude_mode = None
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'relativeToGround'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'absolute'
self.assertFalse('tessellate' in str(geom.to_string()))
geom.altitude_mode = 'clampToGround'
self.assertTrue('tessellate>1</' in str(geom.to_string()))
geom.geometry = Point(0, 1)
self.assertFalse('tessellate' in str(geom.to_string()))
geom.geometry = Polygon([(0, 0), (1, 0), (1, 1), (0, 0)])
self.assertFalse('tessellate' in str(geom.to_string()))
def test_point(self):
p = Point(0, 1)
g = Geometry(geometry=p)
self.assertEqual(g.geometry, p)
g = Geometry(geometry=p.__geo_interface__)
self.assertEqual(g.geometry.__geo_interface__, p.__geo_interface__)
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
def test_linestring(self):
l = LineString([(0, 0), (1, 1)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,1.000000</' in
str(g.to_string()))
g2 = Geometry()
g2.from_string(g.to_string())
self.assertEqual(g.to_string(), g2.to_string())
def test_linearring(self):
l = LinearRing([(0, 0), (1, 0), (1, 1), (0, 0)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
def test_polygon(self):
l = Polygon([(0, 0), (1, 0), (1, 1), (0, 0)])
g = Geometry(geometry=l)
self.assertEqual(g.geometry, l)
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertFalse('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
p = Polygon(
[(-1, -1), (2, -1), (2, 2), (-1, -1)], [[(0, 0), (1, 0), (1, 1),
(0, 0)]], )
g = Geometry(geometry=p)
self.assertEqual(g.geometry, p)
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertTrue('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</'
in str(g.to_string()))
def test_multipoint(self):
p0 = Point(0, 1)
p1 = Point(1, 1)
g = Geometry(geometry=MultiPoint([p0, p1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
self.assertTrue(
'coordinates>1.000000,1.000000</' in str(g.to_string()))
def test_multilinestring(self):
l0 = LineString([(0, 0), (1, 0)])
l1 = LineString([(0, 1), (1, 1)])
g = Geometry(geometry=MultiLineString([l0, l1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000</' in
str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000 1.000000,1.000000</' in
str(g.to_string()))
def test_multipolygon(self):
p0 = Polygon(
[(-1, -1), (2, -1), (2, 2), (-1, -1)], [[(0, 0), (1, 0), (1, 1),
(0, 0)]])
p1 = Polygon([(3, 0), (4, 0), (4, 1), (3, 0)])
g = Geometry(geometry=MultiPolygon([p0, p1]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertTrue('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</'
in str(g.to_string()))
self.assertTrue(
'coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</'
in str(g.to_string()))
def test_geometrycollection(self):
po = Polygon([(3, 0), (4, 0), (4, 1), (3, 0)])
lr = LinearRing([(0, -1), (1, -1), (1, 1), (0, -1)])
ls = LineString([(0, 0), (1, 1)])
p = Point(0, 1)
g = Geometry(geometry=GeometryCollection([po, p, ls, lr]))
self.assertTrue('MultiGeometry' in str(g.to_string()))
self.assertTrue('Polygon' in str(g.to_string()))
self.assertTrue('outerBoundaryIs' in str(g.to_string()))
self.assertFalse('innerBoundaryIs' in str(g.to_string()))
self.assertTrue('LinearRing' in str(g.to_string()))
self.assertTrue(
'coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</'
in str(g.to_string()))
self.assertTrue('LineString' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,0.000000 1.000000,1.000000</' in
str(g.to_string()))
self.assertTrue('Point' in str(g.to_string()))
self.assertTrue(
'coordinates>0.000000,1.000000</' in str(g.to_string()))
class GetGeometryTestCase(unittest.TestCase):
def test_altitude_mode(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:altitudeMode>clampToGround</kml:altitudeMode>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.altitude_mode, None)
g.from_string(doc)
self.assertEqual(g.altitude_mode, 'clampToGround')
def test_extrude(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:extrude>1</kml:extrude>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.extrude, False)
g.from_string(doc)
self.assertEqual(g.extrude, True)
def test_tesselate(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
<kml:tessellate>1</kml:tessellate>
</kml:Point>"""
g = Geometry()
self.assertEqual(g.tessellate, False)
g.from_string(doc)
self.assertEqual(g.tessellate, True)
def test_point(self):
doc = """<kml:Point xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__,
{'type': 'Point',
'coordinates': (0.0, 1.0)})
def test_linestring(self):
doc = """<kml:LineString xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,0.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__,
{'type': 'LineString',
'coordinates': ((0.0, 0.0), (1.0, 1.0))})
def test_linearring(self):
doc = """<kml:LinearRing xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'LinearRing',
'coordinates': ((0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0))
})
def test_polygon(self):
doc = """<kml:Polygon xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'Polygon',
'coordinates': ((
(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 0.0)
), )
})
doc = """<kml:Polygon xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
<kml:innerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:innerBoundaryIs>
</kml:Polygon>
"""
g.from_string(doc)
self.assertEqual(
g.geometry.__geo_interface__, {
'type': 'Polygon',
'coordinates': (
((-1.0, -1.0), (2.0, -1.0), (2.0, 2.0),
(-1.0, -1.0)), ((0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
(0.0, 0.0)),
)
})
def test_multipoint(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Point>
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>
<kml:Point>
<kml:coordinates>1.000000,1.000000</kml:coordinates>
</kml:Point>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_multilinestring(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:LineString>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000</kml:coordinates>
</kml:LineString>
<kml:LineString>
<kml:coordinates>0.000000,1.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_multipolygon(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>-1.000000,-1.000000 2.000000,-1.000000 2.000000,2.000000 -1.000000,-1.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
<kml:innerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>0.000000,0.000000 1.000000,0.000000 1.000000,1.000000 0.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:innerBoundaryIs>
</kml:Polygon>
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>3.000000,0.000000 4.000000,0.000000 4.000000,1.000000 3.000000,0.000000</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
def test_geometrycollection(self):
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:Polygon>
<kml:outerBoundaryIs>
<kml:LinearRing>
<kml:coordinates>3,0 4,0 4,1 3,0</kml:coordinates>
</kml:LinearRing>
</kml:outerBoundaryIs>
</kml:Polygon>
<kml:Point>
<kml:coordinates>0.000000,1.000000</kml:coordinates>
</kml:Point>
<kml:LineString>
<kml:coordinates>0.000000,0.000000 1.000000,1.000000</kml:coordinates>
</kml:LineString>
<kml:LinearRing>
<kml:coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,1.0 0.0,0.0</kml:coordinates>
</kml:LinearRing>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 4)
doc = """
<kml:MultiGeometry xmlns:kml="http://www.opengis.net/kml/2.2">
<kml:LinearRing>
<kml:coordinates>3.0,0.0 4.0,0.0 4.0,1.0 3.0,0.0</kml:coordinates>
</kml:LinearRing>
<kml:LinearRing>
<kml:coordinates>0.0,0.0 1.0,0.0 1.0,1.0 0.0,0.0</kml:coordinates>
</kml:LinearRing>
</kml:MultiGeometry>
"""
g = Geometry()
g.from_string(doc)
self.assertEqual(len(g.geometry), 2)
self.assertEqual(g.geometry.geom_type, 'GeometryCollection')
class Force3DTestCase(unittest.TestCase):
def setUp(self):
config.FORCE3D = False
def tearDown(self):
config.FORCE3D = False
def test3d(self):
config.FORCE3D = True
ns = ''
p2 = kml.Placemark(ns, 'id', 'name', 'description')
p2.geometry = Polygon([(0, 0), (1, 1), (1, 0)])
p3 = kml.Placemark(ns, 'id', 'name', 'description')
p3.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 0)])
self.assertEqual(p2.to_string(), p3.to_string())
def testno3d(self):
config.FORCE3D = False
ns = ''
p2 = kml.Placemark(ns, 'id', 'name', 'description')
p2.geometry = Polygon([(0, 0), (1, 1), (1, 0)])
p3 = kml.Placemark(ns, 'id', 'name', 'description')
p3.geometry = Polygon([(0, 0, 0), (1, 1, 0), (1, 0, 0)])
self.assertNotEqual(p2.to_string(), p3.to_string())
class BaseFeatureTestCase(unittest.TestCase):
def test_address_string(self):
f = kml._Feature()
address = '1600 Amphitheatre Parkway, Mountain View, CA 94043, USA'
f.address = address
self.assertEqual(f.address, address)
def test_address_none(self):
f = kml._Feature()
f.address = None
self.assertEqual(f.address, None)
def test_address_value_error(self):
f = kml._Feature()
with self.assertRaises(ValueError):
f.address = 123
def test_phone_number_string(self):
f = kml._Feature()
f.phoneNumber = '+1-234-567-8901'
self.assertEqual(f.phoneNumber, '+1-234-567-8901')
def test_phone_number_none(self):
f = kml._Feature()
f.phoneNumber = None
self.assertEqual(f.phoneNumber, None)
def test_phone_number_value_error(self):
f = kml._Feature()
with self.assertRaises(ValueError):
f.phoneNumber = 123
class BaseOverlayTestCase(unittest.TestCase):
def test_color_string(self):
o = kml._Overlay(name='An Overlay')
o.color = '00010203'
self.assertEqual(o.color, '00010203')
def test_color_none(self):
o = kml._Overlay(name='An Overlay')
o.color = '00010203'
self.assertEqual(o.color, '00010203')
o.color = None
self.assertEqual(o.color, None)
def test_color_value_error(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.color = object()
def test_draw_order_string(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = '1'
self.assertEqual(o.drawOrder, '1')
def test_draw_order_int(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = 1
self.assertEqual(o.drawOrder, '1')
def test_draw_order_none(self):
o = kml._Overlay(name='An Overlay')
o.drawOrder = '1'
self.assertEqual(o.drawOrder, '1')
o.drawOrder = None
self.assertEqual(o.drawOrder, None)
def test_draw_order_value_error(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.drawOrder = object()
def test_icon_without_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = 'http://example.com/'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_open_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_close_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = 'http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_with_tag(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
def test_icon_to_none(self):
o = kml._Overlay(name='An Overlay')
o.icon = '<href>http://example.com/</href>'
self.assertEqual(o.icon, '<href>http://example.com/</href>')
o.icon = None
self.assertEqual(o.icon, None)
def test_icon_raise_exception(self):
o = kml._Overlay(name='An Overlay')
with self.assertRaises(ValueError):
o.icon = 12345
class GroundOverlayTestCase(unittest.TestCase):
def setUp(self):
self.g = kml.GroundOverlay()
def test_altitude_int(self):
self.g.altitude = 123
self.assertEqual(self.g.altitude, '123')
def test_altitude_float(self):
self.g.altitude = 123.4
self.assertEqual(self.g.altitude, '123.4')
def test_altitude_string(self):
self.g.altitude = '123'
self.assertEqual(self.g.altitude, '123')
def test_altitude_value_error(self):
with self.assertRaises(ValueError):
self.g.altitude = object()
def test_altitude_none(self):
self.g.altitude = '123'
self.assertEqual(self.g.altitude, '123')
self.g.altitude = None
self.assertEqual(self.g.altitude, None)
def test_altitude_mode_default(self):
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_error(self):
self.g.altitudeMode = ''
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_clamp(self):
self.g.altitudeMode = 'clampToGround'
self.assertEqual(self.g.altitudeMode, 'clampToGround')
def test_altitude_mode_absolute(self):
self.g.altitudeMode = 'absolute'
self.assertEqual(self.g.altitudeMode, 'absolute')
def test_latlonbox_function(self):
self.g.latLonBox(10, 20, 30, 40, 50)
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_string(self):
self.g.north = '10'
self.g.south = '20'
self.g.east = '30'
self.g.west = '40'
self.g.rotation = '50'
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_int(self):
self.g.north = 10
self.g.south = 20
self.g.east = 30
self.g.west = 40
self.g.rotation = 50
self.assertEqual(self.g.north, '10')
self.assertEqual(self.g.south, '20')
self.assertEqual(self.g.east, '30')
self.assertEqual(self.g.west, '40')
self.assertEqual(self.g.rotation, '50')
def test_latlonbox_float(self):
self.g.north = 10.0
self.g.south = 20.0
self.g.east = 30.0
self.g.west = 40.0
self.g.rotation = 50.0
self.assertEqual(self.g.north, '10.0')
self.assertEqual(self.g.south, '20.0')
self.assertEqual(self.g.east, '30.0')
self.assertEqual(self.g.west, '40.0')
self.assertEqual(self.g.rotation, '50.0')
def test_latlonbox_value_error(self):
with self.assertRaises(ValueError):
self.g.north = object()
with self.assertRaises(ValueError):
self.g.south = object()
with self.assertRaises(ValueError):
self.g.east = object()
with self.assertRaises(ValueError):
self.g.west = object()
with self.assertRaises(ValueError):
self.g.rotation = object()
self.assertEqual(self.g.north, None)
self.assertEqual(self.g.south, None)
self.assertEqual(self.g.east, None)
self.assertEqual(self.g.west, None)
self.assertEqual(self.g.rotation, None)
def test_latlonbox_empty_string(self):
self.g.north = ''
self.g.south = ''
self.g.east = ''
self.g.west = ''
self.g.rotation = ''
self.assertEqual(self.g.north, '')
self.assertEqual(self.g.south, '')
self.assertEqual(self.g.east, '')
self.assertEqual(self.g.west, '')
self.assertEqual(self.g.rotation, '')
def test_latlonbox_none(self):
self.g.north = None
self.g.south = None
self.g.east = None
self.g.west = None
self.g.rotation = None
self.assertEqual(self.g.north, None)
self.assertEqual(self.g.south, None)
self.assertEqual(self.g.east, None)
self.assertEqual(self.g.west, None)
self.assertEqual(self.g.rotation, None)
class GroundOverlayStringTestCase(unittest.TestCase):
def test_default_to_string(self):
g = kml.GroundOverlay()
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_to_string(self):
g = kml.GroundOverlay()
g.icon = 'http://example.com'
g.drawOrder = 1
g.color = '00010203'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:color>00010203</kml:color>'
'<kml:drawOrder>1</kml:drawOrder>'
'<kml:icon><href>http://example.com</href></kml:icon>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_int(self):
g = kml.GroundOverlay()
g.altitude = 123
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_float(self):
g = kml.GroundOverlay()
g.altitude = 123.4
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_from_string(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_absolute(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 'absolute'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>absolute</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_unknown_string(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 'unknown string'
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_altitude_mode_value(self):
g = kml.GroundOverlay()
g.altitude = '123.4'
g.altitudeMode = 1234
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:altitude>123.4</kml:altitude>'
'<kml:altitudeMode>clampToGround</kml:altitudeMode>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_no_rotation(self):
g = kml.GroundOverlay()
g.latLonBox(10, 20, 30, 40)
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>0</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_rotation(self):
g = kml.GroundOverlay()
g.latLonBox(10, 20, 30, 40, 50)
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>50</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_latlonbox_nswer(self):
g = kml.GroundOverlay()
g.north = 10
g.south = 20
g.east = 30
g.west = 40
g.rotation = 50
expected = kml.GroundOverlay()
expected.from_string(
'<kml:GroundOverlay xmlns:kml="http://www.opengis.net/kml/2.2">'
'<kml:visibility>1</kml:visibility>'
'<kml:latLonBox>'
'<kml:north>10</kml:north>'
'<kml:south>20</kml:south>'
'<kml:east>30</kml:east>'
'<kml:west>40</kml:west>'
'<kml:rotation>50</kml:rotation>'
'</kml:latLonBox>'
'</kml:GroundOverlay>')
self.assertEqual(g.to_string(), expected.to_string())
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BaseClassesTestCase))
suite.addTest(unittest.makeSuite(BuildKmlTestCase))
suite.addTest(unittest.makeSuite(KmlFromStringTestCase))
suite.addTest(unittest.makeSuite(StyleTestCase))
suite.addTest(unittest.makeSuite(StyleFromStringTestCase))
suite.addTest(unittest.makeSuite(DateTimeTestCase))
suite.addTest(unittest.makeSuite(AtomTestCase))
suite.addTest(unittest.makeSuite(SetGeometryTestCase))
suite.addTest(unittest.makeSuite(GetGeometryTestCase))
suite.addTest(unittest.makeSuite(Force3DTestCase))
suite.addTest(unittest.makeSuite(BaseOverlayTestCase))
suite.addTest(unittest.makeSuite(GroundOverlayTestCase))
return suite
if __name__ == '__main__':
unittest.main()
| true | true |
f71b8c248dafde29a7c3bf37462e5c2f5296a920 | 461 | py | Python | pyapi/utils/db.py | dockerian/py-api | 777db7d5dacf3ecf29a991f50d2ac78bb5bef66a | [
"Apache-2.0"
] | null | null | null | pyapi/utils/db.py | dockerian/py-api | 777db7d5dacf3ecf29a991f50d2ac78bb5bef66a | [
"Apache-2.0"
] | 6 | 2019-12-26T16:51:55.000Z | 2022-03-21T22:16:45.000Z | pyapi/utils/db.py | dockerian/pyapi | 777db7d5dacf3ecf29a991f50d2ac78bb5bef66a | [
"Apache-2.0"
] | null | null | null | """
# db module - database adapter functions
"""
from sqlalchemy import DateTime, TypeDecorator
# pylint: disable=abstract-method
class DateTimeUtc(TypeDecorator):
'''
Results returned as offset-aware datetimes.
'''
impl = DateTime
# pylint: disable=unused-argument
def process_result_value(self, value, dialect):
"""
set UTC time zone with processing value
"""
return value.replace(tzinfo=pytz.utc)
| 20.954545 | 51 | 0.668113 |
from sqlalchemy import DateTime, TypeDecorator
class DateTimeUtc(TypeDecorator):
impl = DateTime
def process_result_value(self, value, dialect):
return value.replace(tzinfo=pytz.utc)
| true | true |
f71b8c325f2c4b1fda3cadbcc6909025b1010728 | 2,157 | py | Python | flask_rebar/swagger_generation/swagger_words.py | jsonau/flask-rebar | 22b82596e60bcb537c69dba03ed7155176a9aca1 | [
"MIT"
] | null | null | null | flask_rebar/swagger_generation/swagger_words.py | jsonau/flask-rebar | 22b82596e60bcb537c69dba03ed7155176a9aca1 | [
"MIT"
] | null | null | null | flask_rebar/swagger_generation/swagger_words.py | jsonau/flask-rebar | 22b82596e60bcb537c69dba03ed7155176a9aca1 | [
"MIT"
] | null | null | null | """
Swagger Words
~~~~~~~~~~~~~
Python friendly aliases to reserved Swagger words.
:copyright: Copyright 2018 PlanGrid, Inc., see AUTHORS.
:license: MIT, see LICENSE for details.
"""
from __future__ import unicode_literals
additional_properties = "additionalProperties"
all_of = "allOf"
allow_empty_value = "allowEmptyValue"
any_of = "anyOf"
api_key = "apiKey"
array = "array"
basic = "basic"
binary = "binary"
body = "body"
boolean = "boolean"
byte = "byte"
collection_format = "collectionFormat"
components = "components"
consumes = "consumes"
content = "content"
csv = "csv"
date = "date"
date_time = "date-time"
default = "default"
definitions = "definitions"
description = "description"
double = "double"
enum = "enum"
example = "example"
external_docs = "externalDocs"
exclusive_maximum = "exclusiveMaximum"
exclusive_minimum = "exclusiveMinimum"
explode = "explode"
float_ = "float"
form = "form"
format_ = "format"
header = "header"
host = "host"
in_ = "in"
info = "info"
integer = "integer"
int32 = "int32"
int64 = "int64"
items = "items"
max_items = "maxItems"
max_length = "maxLength"
max_properties = "maxProperties"
maximum = "maximum"
min_items = "minItems"
min_length = "minLength"
min_properties = "minProperties"
minimum = "minimum"
multi = "multi"
multiple_of = "multipleOf"
name = "name"
null = "null"
nullable = "x-nullable"
number = "number"
oauth2 = "oauth2"
object_ = "object"
one_of = "oneOf"
openapi = "openapi"
operation_id = "operationId"
parameters = "parameters"
password = "password"
path = "path"
paths = "paths"
pattern = "pattern"
produces = "produces"
properties = "properties"
query = "query"
ref = "$ref"
request_body = "requestBody"
required = "required"
responses = "responses"
schema = "schema"
schemas = "schemas"
schemes = "schemes"
security = "security"
security_definitions = "securityDefinitions"
security_schemes = "securitySchemes"
servers = "servers"
simple = "simple"
string = "string"
style = "style"
summary = "summary"
swagger = "swagger"
tags = "tags"
title = "title"
type_ = "type"
unique_items = "uniqueItems"
url = "url"
uuid = "uuid"
variables = "variables"
version = "version"
| 21.147059 | 59 | 0.710246 | from __future__ import unicode_literals
additional_properties = "additionalProperties"
all_of = "allOf"
allow_empty_value = "allowEmptyValue"
any_of = "anyOf"
api_key = "apiKey"
array = "array"
basic = "basic"
binary = "binary"
body = "body"
boolean = "boolean"
byte = "byte"
collection_format = "collectionFormat"
components = "components"
consumes = "consumes"
content = "content"
csv = "csv"
date = "date"
date_time = "date-time"
default = "default"
definitions = "definitions"
description = "description"
double = "double"
enum = "enum"
example = "example"
external_docs = "externalDocs"
exclusive_maximum = "exclusiveMaximum"
exclusive_minimum = "exclusiveMinimum"
explode = "explode"
float_ = "float"
form = "form"
format_ = "format"
header = "header"
host = "host"
in_ = "in"
info = "info"
integer = "integer"
int32 = "int32"
int64 = "int64"
items = "items"
max_items = "maxItems"
max_length = "maxLength"
max_properties = "maxProperties"
maximum = "maximum"
min_items = "minItems"
min_length = "minLength"
min_properties = "minProperties"
minimum = "minimum"
multi = "multi"
multiple_of = "multipleOf"
name = "name"
null = "null"
nullable = "x-nullable"
number = "number"
oauth2 = "oauth2"
object_ = "object"
one_of = "oneOf"
openapi = "openapi"
operation_id = "operationId"
parameters = "parameters"
password = "password"
path = "path"
paths = "paths"
pattern = "pattern"
produces = "produces"
properties = "properties"
query = "query"
ref = "$ref"
request_body = "requestBody"
required = "required"
responses = "responses"
schema = "schema"
schemas = "schemas"
schemes = "schemes"
security = "security"
security_definitions = "securityDefinitions"
security_schemes = "securitySchemes"
servers = "servers"
simple = "simple"
string = "string"
style = "style"
summary = "summary"
swagger = "swagger"
tags = "tags"
title = "title"
type_ = "type"
unique_items = "uniqueItems"
url = "url"
uuid = "uuid"
variables = "variables"
version = "version"
| true | true |
f71b8c4522567898a2c8dbed743a740b05b28ad7 | 1,035 | py | Python | oslo_ovsdb_frontend/impl/native/helpers.py | salv-orlando/oslo_ovsdb_frontend | 07845187467a9e8ad00f02f597e0e1277f28c637 | [
"Apache-2.0"
] | null | null | null | oslo_ovsdb_frontend/impl/native/helpers.py | salv-orlando/oslo_ovsdb_frontend | 07845187467a9e8ad00f02f597e0e1277f28c637 | [
"Apache-2.0"
] | null | null | null | oslo_ovsdb_frontend/impl/native/helpers.py | salv-orlando/oslo_ovsdb_frontend | 07845187467a9e8ad00f02f597e0e1277f28c637 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def _connection_to_manager_uri(conn_uri):
proto, addr = conn_uri.split(':', 1)
if ':' in addr:
ip, port = addr.split(':', 1)
return 'p%s:%s:%s' % (proto, port, ip)
else:
return 'p%s:%s' % (proto, addr)
def enable_connection_uri(conn_uri, execute_func):
manager_uri = _connection_to_manager_uri(conn_uri)
execute_func(['ovs-vsctl', 'set-manager', manager_uri], run_as_root=True)
| 36.964286 | 78 | 0.686957 |
def _connection_to_manager_uri(conn_uri):
proto, addr = conn_uri.split(':', 1)
if ':' in addr:
ip, port = addr.split(':', 1)
return 'p%s:%s:%s' % (proto, port, ip)
else:
return 'p%s:%s' % (proto, addr)
def enable_connection_uri(conn_uri, execute_func):
manager_uri = _connection_to_manager_uri(conn_uri)
execute_func(['ovs-vsctl', 'set-manager', manager_uri], run_as_root=True)
| true | true |
f71b8d9b0f6513f05ab15b6e9fc0dbe880661dcb | 940 | py | Python | rapidenv/misc/set_activate_alias.py | innoviz-sw-infra/rapid-env | acc5e1e461af42b5fbb7024c0b79d4315c206fe2 | [
"MIT"
] | 1 | 2021-02-15T20:55:49.000Z | 2021-02-15T20:55:49.000Z | rapidenv/misc/set_activate_alias.py | innoviz-sw-infra/rapid-env | acc5e1e461af42b5fbb7024c0b79d4315c206fe2 | [
"MIT"
] | null | null | null | rapidenv/misc/set_activate_alias.py | innoviz-sw-infra/rapid-env | acc5e1e461af42b5fbb7024c0b79d4315c206fe2 | [
"MIT"
] | null | null | null | import sys
from pathlib import Path
def mainwin32():
if len(sys.argv) < 2:
print(f'to use run: python set_activate_alias.py $profile')
return
profile = sys.argv[1]
profile = Path(profile)
# makr parent directory if not exist
if not profile.parent.exists():
profile.parent.mkdir(parents=True)
# make file if not exist
if not profile.exists():
with open(profile, "a") as f:
f.write("")
with open(profile, 'r') as f:
txt = f.read()
insert = r"Set-Alias -Name activate -Value .\venv\Scripts\activate"
if txt.find(insert) != -1:
print(f'alias already set in "{profile}".')
return
# write to file
with open(profile, "a") as f:
f.write(insert + "\n")
def main():
if sys.platform == "win32":
mainwin32()
else:
print("plafrom not supported")
if __name__ == "__main__":
main() | 22.926829 | 71 | 0.575532 | import sys
from pathlib import Path
def mainwin32():
if len(sys.argv) < 2:
print(f'to use run: python set_activate_alias.py $profile')
return
profile = sys.argv[1]
profile = Path(profile)
if not profile.parent.exists():
profile.parent.mkdir(parents=True)
if not profile.exists():
with open(profile, "a") as f:
f.write("")
with open(profile, 'r') as f:
txt = f.read()
insert = r"Set-Alias -Name activate -Value .\venv\Scripts\activate"
if txt.find(insert) != -1:
print(f'alias already set in "{profile}".')
return
with open(profile, "a") as f:
f.write(insert + "\n")
def main():
if sys.platform == "win32":
mainwin32()
else:
print("plafrom not supported")
if __name__ == "__main__":
main() | true | true |
f71b8dec12d6719d1ccd4adbb31f7a450c33383c | 1,268 | py | Python | apps/accounts/forms.py | cloudartisan/dojomaster | 9d5efa0345c659636f8d8b556302d0d7bb2055a8 | [
"MIT"
] | 1 | 2019-02-21T14:47:31.000Z | 2019-02-21T14:47:31.000Z | apps/accounts/forms.py | cloudartisan/dojomaster | 9d5efa0345c659636f8d8b556302d0d7bb2055a8 | [
"MIT"
] | null | null | null | apps/accounts/forms.py | cloudartisan/dojomaster | 9d5efa0345c659636f8d8b556302d0d7bb2055a8 | [
"MIT"
] | null | null | null | from django import forms
from .models import UserAccount
class UserCreationForm(forms.ModelForm):
"""
A form for creating new users. Includes all the required
fields, plus a repeated password.
"""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = UserAccount
fields = ('email',)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""
A form for updating users. Includes all the fields on the user.
"""
class Meta:
model = UserAccount
fields = ()
| 29.488372 | 90 | 0.662461 | from django import forms
from .models import UserAccount
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = UserAccount
fields = ('email',)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = UserAccount
fields = ()
| true | true |
f71b8eacdcd41ec7c42144254a210d3c2c2d6f9a | 568 | py | Python | ahrs/filters/__init__.py | ethan-jiang-1/ahrs | e1725267b0009a8a573f99dbf8d06e8481407ab6 | [
"MIT"
] | 184 | 2019-09-06T07:58:52.000Z | 2022-03-31T04:27:09.000Z | ahrs/filters/__init__.py | geoKinga/ahrs | 87f9210cfcf6c545d86ae8588a93f012020164ee | [
"MIT"
] | 48 | 2019-11-13T15:42:46.000Z | 2022-03-31T23:53:53.000Z | ahrs/filters/__init__.py | geoKinga/ahrs | 87f9210cfcf6c545d86ae8588a93f012020164ee | [
"MIT"
] | 34 | 2019-12-19T16:22:00.000Z | 2022-03-14T09:51:50.000Z | # -*- coding: utf-8 -*-
"""
Attitude Estimators
===================
These are the most common attitude filters.
"""
from .angular import AngularRate
from .aqua import AQUA
from .complementary import Complementary
from .davenport import Davenport
from .ekf import EKF
from .famc import FAMC
from .flae import FLAE
from .fourati import Fourati
from .fqa import FQA
from .tilt import Tilt
from .madgwick import Madgwick
from .mahony import Mahony
from .oleq import OLEQ
from .quest import QUEST
from .roleq import ROLEQ
from .saam import SAAM
from .triad import TRIAD
| 21.037037 | 43 | 0.753521 |
from .angular import AngularRate
from .aqua import AQUA
from .complementary import Complementary
from .davenport import Davenport
from .ekf import EKF
from .famc import FAMC
from .flae import FLAE
from .fourati import Fourati
from .fqa import FQA
from .tilt import Tilt
from .madgwick import Madgwick
from .mahony import Mahony
from .oleq import OLEQ
from .quest import QUEST
from .roleq import ROLEQ
from .saam import SAAM
from .triad import TRIAD
| true | true |
f71b8eb018fd43deeb30f1cc3852fc3278cb539b | 1,196 | py | Python | Exercicios/ex059.py | MateusBarboza99/Python-03- | 9c6df88aaa8ba83d385b92722ed1df5873df3a77 | [
"MIT"
] | null | null | null | Exercicios/ex059.py | MateusBarboza99/Python-03- | 9c6df88aaa8ba83d385b92722ed1df5873df3a77 | [
"MIT"
] | null | null | null | Exercicios/ex059.py | MateusBarboza99/Python-03- | 9c6df88aaa8ba83d385b92722ed1df5873df3a77 | [
"MIT"
] | null | null | null | from time import sleep
valor1 = int(input('Digite Primeiro valor: '))
valor2 = int(input('Digite segundo valor: '))
opção = 0
while opção != 5:
print(''' [ 1 ] SOMAR
[ 2 ] MULTIPLICAR
[ 3 ] MAIOR
[ 4 ] NOVOS NÚMEROS
[ 5 ] SAIR DO PROGRAMA''')
opção = int(input('Qual opção você deseja ? '))
if opção == 1:
total = valor1 + valor2
print('A soma entre {} + {} é igual a {}'.format(valor1, valor2, total))
elif opção == 2:
produto = valor1 * valor2
print('Multiplicando {} x {} é igual a {}'.format(valor1, valor2, produto))
elif opção == 3:
if valor1 > valor2:
maior = valor1
else:
maior = valor2
print('O Maior número entre {} e {} foi o {}'.format(valor1, valor2, maior))
elif opção == 4:
print('Por favor Informe os número novamente: ')
valor1 = int(input('Digite Primeiro valor: '))
valor2 = int(input('Digite segundo valor: '))
elif opção == 5:
print('Finalizando.......')
sleep(4)
else:
print('Opção Invalida! Tente Novamente!! ')
print('=-=' * 10)
sleep(2)
print('Fim do Programa! Volte sempre!!!')
| 32.324324 | 88 | 0.553512 | from time import sleep
valor1 = int(input('Digite Primeiro valor: '))
valor2 = int(input('Digite segundo valor: '))
opção = 0
while opção != 5:
print(''' [ 1 ] SOMAR
[ 2 ] MULTIPLICAR
[ 3 ] MAIOR
[ 4 ] NOVOS NÚMEROS
[ 5 ] SAIR DO PROGRAMA''')
opção = int(input('Qual opção você deseja ? '))
if opção == 1:
total = valor1 + valor2
print('A soma entre {} + {} é igual a {}'.format(valor1, valor2, total))
elif opção == 2:
produto = valor1 * valor2
print('Multiplicando {} x {} é igual a {}'.format(valor1, valor2, produto))
elif opção == 3:
if valor1 > valor2:
maior = valor1
else:
maior = valor2
print('O Maior número entre {} e {} foi o {}'.format(valor1, valor2, maior))
elif opção == 4:
print('Por favor Informe os número novamente: ')
valor1 = int(input('Digite Primeiro valor: '))
valor2 = int(input('Digite segundo valor: '))
elif opção == 5:
print('Finalizando.......')
sleep(4)
else:
print('Opção Invalida! Tente Novamente!! ')
print('=-=' * 10)
sleep(2)
print('Fim do Programa! Volte sempre!!!')
| true | true |
f71b8f02bb638c288dc1d7f5c04c314106795526 | 2,828 | py | Python | tests/providers/amazon/aws/operators/test_step_function_start_execution.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 15,947 | 2019-01-05T13:51:02.000Z | 2022-03-31T23:33:16.000Z | tests/providers/amazon/aws/operators/test_step_function_start_execution.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 14,603 | 2019-01-05T09:43:19.000Z | 2022-03-31T23:11:59.000Z | tests/providers/amazon/aws/operators/test_step_function_start_execution.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 8,429 | 2019-01-05T19:45:47.000Z | 2022-03-31T22:13:01.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest import mock
from unittest.mock import MagicMock
from airflow.providers.amazon.aws.operators.step_function_start_execution import (
StepFunctionStartExecutionOperator,
)
TASK_ID = 'step_function_start_execution_task'
STATE_MACHINE_ARN = 'arn:aws:states:us-east-1:000000000000:stateMachine:pseudo-state-machine'
NAME = 'NAME'
INPUT = '{}'
AWS_CONN_ID = 'aws_non_default'
REGION_NAME = 'us-west-2'
class TestStepFunctionStartExecutionOperator(unittest.TestCase):
def setUp(self):
self.mock_context = MagicMock()
def test_init(self):
# Given / When
operator = StepFunctionStartExecutionOperator(
task_id=TASK_ID,
state_machine_arn=STATE_MACHINE_ARN,
name=NAME,
state_machine_input=INPUT,
aws_conn_id=AWS_CONN_ID,
region_name=REGION_NAME,
)
# Then
assert TASK_ID == operator.task_id
assert STATE_MACHINE_ARN == operator.state_machine_arn
assert NAME == operator.name
assert INPUT == operator.input
assert AWS_CONN_ID == operator.aws_conn_id
assert REGION_NAME == operator.region_name
@mock.patch('airflow.providers.amazon.aws.operators.step_function_start_execution.StepFunctionHook')
def test_execute(self, mock_hook):
# Given
hook_response = (
'arn:aws:states:us-east-1:123456789012:execution:'
'pseudo-state-machine:020f5b16-b1a1-4149-946f-92dd32d97934'
)
hook_instance = mock_hook.return_value
hook_instance.start_execution.return_value = hook_response
operator = StepFunctionStartExecutionOperator(
task_id=TASK_ID,
state_machine_arn=STATE_MACHINE_ARN,
name=NAME,
state_machine_input=INPUT,
aws_conn_id=AWS_CONN_ID,
region_name=REGION_NAME,
)
# When
result = operator.execute(self.mock_context)
# Then
assert hook_response == result
| 33.666667 | 104 | 0.703324 |
import unittest
from unittest import mock
from unittest.mock import MagicMock
from airflow.providers.amazon.aws.operators.step_function_start_execution import (
StepFunctionStartExecutionOperator,
)
TASK_ID = 'step_function_start_execution_task'
STATE_MACHINE_ARN = 'arn:aws:states:us-east-1:000000000000:stateMachine:pseudo-state-machine'
NAME = 'NAME'
INPUT = '{}'
AWS_CONN_ID = 'aws_non_default'
REGION_NAME = 'us-west-2'
class TestStepFunctionStartExecutionOperator(unittest.TestCase):
def setUp(self):
self.mock_context = MagicMock()
def test_init(self):
operator = StepFunctionStartExecutionOperator(
task_id=TASK_ID,
state_machine_arn=STATE_MACHINE_ARN,
name=NAME,
state_machine_input=INPUT,
aws_conn_id=AWS_CONN_ID,
region_name=REGION_NAME,
)
assert TASK_ID == operator.task_id
assert STATE_MACHINE_ARN == operator.state_machine_arn
assert NAME == operator.name
assert INPUT == operator.input
assert AWS_CONN_ID == operator.aws_conn_id
assert REGION_NAME == operator.region_name
@mock.patch('airflow.providers.amazon.aws.operators.step_function_start_execution.StepFunctionHook')
def test_execute(self, mock_hook):
hook_response = (
'arn:aws:states:us-east-1:123456789012:execution:'
'pseudo-state-machine:020f5b16-b1a1-4149-946f-92dd32d97934'
)
hook_instance = mock_hook.return_value
hook_instance.start_execution.return_value = hook_response
operator = StepFunctionStartExecutionOperator(
task_id=TASK_ID,
state_machine_arn=STATE_MACHINE_ARN,
name=NAME,
state_machine_input=INPUT,
aws_conn_id=AWS_CONN_ID,
region_name=REGION_NAME,
)
result = operator.execute(self.mock_context)
assert hook_response == result
| true | true |
f71b8fffbe1ae2ccf4e9b4742ebf89d95ffbb7f6 | 39 | py | Python | tfcv/modeling/modules/attention/__init__.py | xingzhaolee/tfcv | 27b6a4e8e93cf9b5fecedd6c259118f64b74e263 | [
"MIT"
] | null | null | null | tfcv/modeling/modules/attention/__init__.py | xingzhaolee/tfcv | 27b6a4e8e93cf9b5fecedd6c259118f64b74e263 | [
"MIT"
] | null | null | null | tfcv/modeling/modules/attention/__init__.py | xingzhaolee/tfcv | 27b6a4e8e93cf9b5fecedd6c259118f64b74e263 | [
"MIT"
] | null | null | null | from .bam import *
from .cbam import *
| 13 | 19 | 0.692308 | from .bam import *
from .cbam import *
| true | true |
f71b90fd99c79c76b18913c0621289947d10b94f | 2,923 | py | Python | pkg/distro/packaging_test.py | psigen/rules_pkg | b20c45f292be6c74d2f0d829ba02c83dbe271195 | [
"Apache-2.0"
] | null | null | null | pkg/distro/packaging_test.py | psigen/rules_pkg | b20c45f292be6c74d2f0d829ba02c83dbe271195 | [
"Apache-2.0"
] | null | null | null | pkg/distro/packaging_test.py | psigen/rules_pkg | b20c45f292be6c74d2f0d829ba02c83dbe271195 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that the rules_pkg distribution is usable."""
import os
import subprocess
import unittest
from bazel_tools.tools.python.runfiles import runfiles
from releasing import release_tools
from distro import release_version
_VERBOSE = True
class PackagingTest(unittest.TestCase):
"""Test the distribution packaging."""
def setUp(self):
self.data_files = runfiles.Create()
self.repo = 'rules_pkg'
self.version = release_version.RELEASE_VERSION
def testBuild(self):
# Set up a fresh Bazel workspace using the currently build repo.
tempdir = os.path.join(os.environ['TEST_TMPDIR'], 'build')
if not os.path.exists(tempdir):
os.makedirs(tempdir)
with open(os.path.join(tempdir, 'WORKSPACE'), 'w') as workspace:
file_name = release_tools.package_basename(self.repo, self.version)
local_path = runfiles.Create().Rlocation(
os.path.join('rules_pkg', 'distro', file_name))
sha256 = release_tools.get_package_sha256(local_path)
workspace_content = '\n'.join((
'workspace(name = "test_rules_pkg_packaging")',
release_tools.workspace_content(
'file://%s' % local_path, self.repo, sha256,
deps_method='rules_pkg_dependencies'
)
))
workspace.write(workspace_content)
if _VERBOSE:
print('=== WORKSPACE ===')
print(workspace_content)
# We do a little dance of renaming *.tmpl to *, mostly so that we do not
# have a BUILD file in testdata, which would create a package boundary.
def CopyTestFile(source_name, dest_name):
source_path = self.data_files.Rlocation(
os.path.join('rules_pkg', 'distro', 'testdata', source_name))
with open(source_path) as inp:
with open(os.path.join(tempdir, dest_name), 'w') as out:
content = inp.read()
out.write(content)
CopyTestFile('BUILD.tmpl', 'BUILD')
os.chdir(tempdir)
build_result = subprocess.check_output(['bazel', 'build', ':dummy_tar'])
if _VERBOSE:
print('=== Build Result ===')
print(build_result)
# TODO(aiuto): Find tar in a disciplined way
content = subprocess.check_output(
['tar', 'tzf', 'bazel-bin/dummy_tar.tar.gz'])
self.assertEqual(b'./\n./BUILD\n', content)
if __name__ == '__main__':
unittest.main()
| 34.797619 | 76 | 0.689702 |
import os
import subprocess
import unittest
from bazel_tools.tools.python.runfiles import runfiles
from releasing import release_tools
from distro import release_version
_VERBOSE = True
class PackagingTest(unittest.TestCase):
def setUp(self):
self.data_files = runfiles.Create()
self.repo = 'rules_pkg'
self.version = release_version.RELEASE_VERSION
def testBuild(self):
tempdir = os.path.join(os.environ['TEST_TMPDIR'], 'build')
if not os.path.exists(tempdir):
os.makedirs(tempdir)
with open(os.path.join(tempdir, 'WORKSPACE'), 'w') as workspace:
file_name = release_tools.package_basename(self.repo, self.version)
local_path = runfiles.Create().Rlocation(
os.path.join('rules_pkg', 'distro', file_name))
sha256 = release_tools.get_package_sha256(local_path)
workspace_content = '\n'.join((
'workspace(name = "test_rules_pkg_packaging")',
release_tools.workspace_content(
'file://%s' % local_path, self.repo, sha256,
deps_method='rules_pkg_dependencies'
)
))
workspace.write(workspace_content)
if _VERBOSE:
print('=== WORKSPACE ===')
print(workspace_content)
def CopyTestFile(source_name, dest_name):
source_path = self.data_files.Rlocation(
os.path.join('rules_pkg', 'distro', 'testdata', source_name))
with open(source_path) as inp:
with open(os.path.join(tempdir, dest_name), 'w') as out:
content = inp.read()
out.write(content)
CopyTestFile('BUILD.tmpl', 'BUILD')
os.chdir(tempdir)
build_result = subprocess.check_output(['bazel', 'build', ':dummy_tar'])
if _VERBOSE:
print('=== Build Result ===')
print(build_result)
content = subprocess.check_output(
['tar', 'tzf', 'bazel-bin/dummy_tar.tar.gz'])
self.assertEqual(b'./\n./BUILD\n', content)
if __name__ == '__main__':
unittest.main()
| true | true |
f71b9174bdbad3aa8de9187fea7cf060e68be521 | 277,501 | py | Python | sdk/eventgrid/azure-eventgrid/azure/eventgrid/_generated/models/_models.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | sdk/eventgrid/azure-eventgrid/azure/eventgrid/_generated/models/_models.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | sdk/eventgrid/azure-eventgrid/azure/eventgrid/_generated/models/_models.py | mtin/azure-sdk-for-python | 08d7f8f76d1c9eca230cbcecb3c42eb92817bcb8 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class AcsChatEventBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all chat events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventBaseProperties, self).__init__(**kwargs)
self.recipient_communication_identifier = kwargs.get('recipient_communication_identifier', None)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatEventInThreadBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all thread-level chat events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventInThreadBaseProperties, self).__init__(**kwargs)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatMessageEventBaseProperties(AcsChatEventBaseProperties):
"""Schema of common properties of all chat message events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageDeleted event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param delete_time: The time at which the message was deleted.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
"""Schema of common properties of all thread-level chat message events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventInThreadBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageDeletedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param delete_time: The time at which the message was deleted.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedInThreadEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEditedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageEdited event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
:param edit_time: The time at which the message was edited.
:type edit_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageEditedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageEditedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
:param edit_time: The time at which the message was edited.
:type edit_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageReceivedEventData(AcsChatMessageEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageReceived event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatMessageReceivedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatMessageReceivedInThread event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param message_id: The chat message id.
:type message_id: str
:param sender_communication_identifier: The communication identifier of the sender.
:type sender_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param sender_display_name: The display name of the sender.
:type sender_display_name: str
:param compose_time: The original compose time of the message.
:type compose_time: ~datetime.datetime
:param type: The type of the message.
:type type: str
:param version: The version of the message.
:type version: long
:param message_body: The body of the chat message.
:type message_body: str
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatParticipantAddedToThreadEventData(AcsChatEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadParticipantAdded event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param time: The time at which the user was added to the thread.
:type time: ~datetime.datetime
:param added_by_communication_identifier: The communication identifier of the user who added
the user.
:type added_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_added: The details of the user who was added.
:type participant_added: ~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
self.version = kwargs.get('version', None)
class AcsChatThreadEventBaseProperties(AcsChatEventBaseProperties):
"""Schema of common properties of all chat thread events.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantAddedToThreadWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatParticipantAddedToThreadWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param time: The time at which the user was added to the thread.
:type time: ~datetime.datetime
:param added_by_communication_identifier: The communication identifier of the user who added
the user.
:type added_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_added: The details of the user who was added.
:type participant_added: ~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
class AcsChatParticipantRemovedFromThreadEventData(AcsChatEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadParticipantRemoved event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param time: The time at which the user was removed to the thread.
:type time: ~datetime.datetime
:param removed_by_communication_identifier: The communication identifier of the user who
removed the user.
:type removed_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_removed: The details of the user who was removed.
:type participant_removed:
~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantRemovedFromThreadWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatParticipantRemovedFromThreadWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param time: The time at which the user was removed to the thread.
:type time: ~datetime.datetime
:param removed_by_communication_identifier: The communication identifier of the user who
removed the user.
:type removed_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param participant_removed: The details of the user who was removed.
:type participant_removed:
~event_grid_publisher_client.models.AcsChatThreadParticipantProperties
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
class AcsChatThreadEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
"""Schema of common properties of all chat thread events.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventInThreadBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatThreadCreatedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadCreated event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param created_by_communication_identifier: The communication identifier of the user who
created the thread.
:type created_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param properties: The thread properties.
:type properties: dict[str, object]
:param participants: The list of properties of participants who are part of the thread.
:type participants:
list[~event_grid_publisher_client.models.AcsChatThreadParticipantProperties]
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadCreatedWithUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadCreatedWithUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param created_by_communication_identifier: The communication identifier of the user who
created the thread.
:type created_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param properties: The thread properties.
:type properties: dict[str, object]
:param participants: The list of properties of participants who are part of the thread.
:type participants:
list[~event_grid_publisher_client.models.AcsChatThreadParticipantProperties]
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedWithUserEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadDeletedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadDeleted event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param deleted_by_communication_identifier: The communication identifier of the user who
deleted the thread.
:type deleted_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param delete_time: The deletion time of the thread.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatThreadParticipantProperties(msrest.serialization.Model):
"""Schema of the chat thread participant.
:param display_name: The name of the user.
:type display_name: str
:param participant_communication_identifier: The communication identifier of the user.
:type participant_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'participant_communication_identifier': {'key': 'participantCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadParticipantProperties, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.participant_communication_identifier = kwargs.get('participant_communication_identifier', None)
class AcsChatThreadPropertiesUpdatedEventData(AcsChatThreadEventInThreadBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadPropertiesUpdated event.
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param edited_by_communication_identifier: The communication identifier of the user who updated
the thread properties.
:type edited_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param edit_time: The time at which the properties of the thread were updated.
:type edit_time: ~datetime.datetime
:param properties: The updated thread properties.
:type properties: dict[str, object]
"""
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadPropertiesUpdatedPerUserEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadPropertiesUpdatedPerUser event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param edited_by_communication_identifier: The communication identifier of the user who updated
the thread properties.
:type edited_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param edit_time: The time at which the properties of the thread were updated.
:type edit_time: ~datetime.datetime
:param properties: The updated thread properties.
:type properties: dict[str, object]
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedPerUserEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadWithUserDeletedEventData(AcsChatThreadEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.ChatThreadWithUserDeleted event.
:param recipient_communication_identifier: The communication identifier of the target user.
:type recipient_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param transaction_id: The transaction id will be used as co-relation vector.
:type transaction_id: str
:param thread_id: The chat thread id.
:type thread_id: str
:param create_time: The original creation time of the thread.
:type create_time: ~datetime.datetime
:param version: The version of the thread.
:type version: long
:param deleted_by_communication_identifier: The communication identifier of the user who
deleted the thread.
:type deleted_by_communication_identifier:
~event_grid_publisher_client.models.CommunicationIdentifierModel
:param delete_time: The deletion time of the thread.
:type delete_time: ~datetime.datetime
"""
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadWithUserDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsRecordingChunkInfoProperties(msrest.serialization.Model):
"""Schema for all properties of Recording Chunk Information.
:param document_id: The documentId of the recording chunk.
:type document_id: str
:param index: The index of the recording chunk.
:type index: long
:param end_reason: The reason for ending the recording chunk.
:type end_reason: str
"""
_attribute_map = {
'document_id': {'key': 'documentId', 'type': 'str'},
'index': {'key': 'index', 'type': 'long'},
'end_reason': {'key': 'endReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingChunkInfoProperties, self).__init__(**kwargs)
self.document_id = kwargs.get('document_id', None)
self.index = kwargs.get('index', None)
self.end_reason = kwargs.get('end_reason', None)
class AcsRecordingFileStatusUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.RecordingFileStatusUpdated event.
:param recording_storage_info: The details of recording storage information.
:type recording_storage_info:
~event_grid_publisher_client.models.AcsRecordingStorageInfoProperties
:param recording_start_time: The time at which the recording started.
:type recording_start_time: ~datetime.datetime
:param recording_duration_ms: The recording duration in milliseconds.
:type recording_duration_ms: long
:param session_end_reason: The reason for ending recording session.
:type session_end_reason: str
"""
_attribute_map = {
'recording_storage_info': {'key': 'recordingStorageInfo', 'type': 'AcsRecordingStorageInfoProperties'},
'recording_start_time': {'key': 'recordingStartTime', 'type': 'iso-8601'},
'recording_duration_ms': {'key': 'recordingDurationMs', 'type': 'long'},
'session_end_reason': {'key': 'sessionEndReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingFileStatusUpdatedEventData, self).__init__(**kwargs)
self.recording_storage_info = kwargs.get('recording_storage_info', None)
self.recording_start_time = kwargs.get('recording_start_time', None)
self.recording_duration_ms = kwargs.get('recording_duration_ms', None)
self.session_end_reason = kwargs.get('session_end_reason', None)
class AcsRecordingStorageInfoProperties(msrest.serialization.Model):
"""Schema for all properties of Recording Storage Information.
:param recording_chunks: List of details of recording chunks information.
:type recording_chunks:
list[~event_grid_publisher_client.models.AcsRecordingChunkInfoProperties]
"""
_attribute_map = {
'recording_chunks': {'key': 'recordingChunks', 'type': '[AcsRecordingChunkInfoProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingStorageInfoProperties, self).__init__(**kwargs)
self.recording_chunks = kwargs.get('recording_chunks', None)
class AcsSmsDeliveryAttemptProperties(msrest.serialization.Model):
"""Schema for details of a delivery attempt.
:param timestamp: TimeStamp when delivery was attempted.
:type timestamp: ~datetime.datetime
:param segments_succeeded: Number of segments that were successfully delivered.
:type segments_succeeded: int
:param segments_failed: Number of segments whose delivery failed.
:type segments_failed: int
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'segments_succeeded': {'key': 'segmentsSucceeded', 'type': 'int'},
'segments_failed': {'key': 'segmentsFailed', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryAttemptProperties, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.segments_succeeded = kwargs.get('segments_succeeded', None)
self.segments_failed = kwargs.get('segments_failed', None)
class AcsSmsEventBaseProperties(msrest.serialization.Model):
"""Schema of common properties of all SMS events.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.from_property = kwargs.get('from_property', None)
self.to = kwargs.get('to', None)
class AcsSmsDeliveryReportReceivedEventData(AcsSmsEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.SMSDeliveryReportReceived event.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
:param delivery_status: Status of Delivery.
:type delivery_status: str
:param delivery_status_details: Details about Delivery Status.
:type delivery_status_details: str
:param delivery_attempts: List of details of delivery attempts made.
:type delivery_attempts:
list[~event_grid_publisher_client.models.AcsSmsDeliveryAttemptProperties]
:param received_timestamp: The time at which the SMS delivery report was received.
:type received_timestamp: ~datetime.datetime
:param tag: Customer Content.
:type tag: str
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'delivery_status': {'key': 'deliveryStatus', 'type': 'str'},
'delivery_status_details': {'key': 'deliveryStatusDetails', 'type': 'str'},
'delivery_attempts': {'key': 'deliveryAttempts', 'type': '[AcsSmsDeliveryAttemptProperties]'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryReportReceivedEventData, self).__init__(**kwargs)
self.delivery_status = kwargs.get('delivery_status', None)
self.delivery_status_details = kwargs.get('delivery_status_details', None)
self.delivery_attempts = kwargs.get('delivery_attempts', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
self.tag = kwargs.get('tag', None)
class AcsSmsReceivedEventData(AcsSmsEventBaseProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Communication.SMSReceived event.
:param message_id: The identity of the SMS message.
:type message_id: str
:param from_property: The identity of SMS message sender.
:type from_property: str
:param to: The identity of SMS message receiver.
:type to: str
:param message: The SMS content.
:type message: str
:param received_timestamp: The time at which the SMS was received.
:type received_timestamp: ~datetime.datetime
"""
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsReceivedEventData, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
class AppConfigurationKeyValueDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.AppConfiguration.KeyValueDeleted event.
:param key: The key used to identify the key-value that was deleted.
:type key: str
:param label: The label, if any, used to identify the key-value that was deleted.
:type label: str
:param etag: The etag representing the key-value that was deleted.
:type etag: str
:param sync_token: The sync token representing the server state after the event.
:type sync_token: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueDeletedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppConfigurationKeyValueModifiedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.AppConfiguration.KeyValueModified event.
:param key: The key used to identify the key-value that was modified.
:type key: str
:param label: The label, if any, used to identify the key-value that was modified.
:type label: str
:param etag: The etag representing the new state of the key-value.
:type etag: str
:param sync_token: The sync token representing the server state after the event.
:type sync_token: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueModifiedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppEventTypeDetail(msrest.serialization.Model):
"""Detail of action on the app.
:param action: Type of action of the operation. Possible values include: "Restarted",
"Stopped", "ChangedAppSettings", "Started", "Completed", "Failed".
:type action: str or ~event_grid_publisher_client.models.AppAction
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppEventTypeDetail, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
class AppServicePlanEventTypeDetail(msrest.serialization.Model):
"""Detail of action on the app service plan.
:param stamp_kind: Kind of environment where app service plan is. Possible values include:
"Public", "AseV1", "AseV2".
:type stamp_kind: str or ~event_grid_publisher_client.models.StampKind
:param action: Type of action on the app service plan. Possible values include: "Updated".
:type action: str or ~event_grid_publisher_client.models.AppServicePlanAction
:param status: Asynchronous operation status of the operation on the app service plan. Possible
values include: "Started", "Completed", "Failed".
:type status: str or ~event_grid_publisher_client.models.AsyncStatus
"""
_attribute_map = {
'stamp_kind': {'key': 'stampKind', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppServicePlanEventTypeDetail, self).__init__(**kwargs)
self.stamp_kind = kwargs.get('stamp_kind', None)
self.action = kwargs.get('action', None)
self.status = kwargs.get('status', None)
class CloudEvent(msrest.serialization.Model):
"""Properties of an event published to an Event Grid topic using the CloudEvent 1.0 Schema.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param id: Required. An identifier for the event. The combination of id and source must be
unique for each distinct event.
:type id: str
:param source: Required. Identifies the context in which an event happened. The combination of
id and source must be unique for each distinct event.
:type source: str
:param data: Event data specific to the event type.
:type data: object
:param data_base64: Event data specific to the event type, encoded as a base64 string.
:type data_base64: bytearray
:param type: Required. Type of event related to the originating occurrence.
:type type: str
:param time: The time (in UTC) the event was generated, in RFC3339 format.
:type time: ~datetime.datetime
:param specversion: Required. The version of the CloudEvents specification which the event
uses.
:type specversion: str
:param dataschema: Identifies the schema that data adheres to.
:type dataschema: str
:param datacontenttype: Content type of data value.
:type datacontenttype: str
:param subject: This describes the subject of the event in the context of the event producer
(identified by source).
:type subject: str
"""
_validation = {
'id': {'required': True},
'source': {'required': True},
'type': {'required': True},
'specversion': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'id': {'key': 'id', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'data_base64': {'key': 'data_base64', 'type': 'bytearray'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'specversion': {'key': 'specversion', 'type': 'str'},
'dataschema': {'key': 'dataschema', 'type': 'str'},
'datacontenttype': {'key': 'datacontenttype', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudEvent, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.id = kwargs['id']
self.source = kwargs['source']
self.data = kwargs.get('data', None)
self.data_base64 = kwargs.get('data_base64', None)
self.type = kwargs['type']
self.time = kwargs.get('time', None)
self.specversion = kwargs['specversion']
self.dataschema = kwargs.get('dataschema', None)
self.datacontenttype = kwargs.get('datacontenttype', None)
self.subject = kwargs.get('subject', None)
class CommunicationIdentifierModel(msrest.serialization.Model):
"""Identifies a participant in Azure Communication services. A participant is, for example, a phone number or an Azure communication user. This model must be interpreted as a union: Apart from rawId, at most one further property may be set.
:param raw_id: Raw Id of the identifier. Optional in requests, required in responses.
:type raw_id: str
:param communication_user: The communication user.
:type communication_user: ~event_grid_publisher_client.models.CommunicationUserIdentifierModel
:param phone_number: The phone number.
:type phone_number: ~event_grid_publisher_client.models.PhoneNumberIdentifierModel
:param microsoft_teams_user: The Microsoft Teams user.
:type microsoft_teams_user:
~event_grid_publisher_client.models.MicrosoftTeamsUserIdentifierModel
"""
_attribute_map = {
'raw_id': {'key': 'rawId', 'type': 'str'},
'communication_user': {'key': 'communicationUser', 'type': 'CommunicationUserIdentifierModel'},
'phone_number': {'key': 'phoneNumber', 'type': 'PhoneNumberIdentifierModel'},
'microsoft_teams_user': {'key': 'microsoftTeamsUser', 'type': 'MicrosoftTeamsUserIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(CommunicationIdentifierModel, self).__init__(**kwargs)
self.raw_id = kwargs.get('raw_id', None)
self.communication_user = kwargs.get('communication_user', None)
self.phone_number = kwargs.get('phone_number', None)
self.microsoft_teams_user = kwargs.get('microsoft_teams_user', None)
class CommunicationUserIdentifierModel(msrest.serialization.Model):
"""A user that got created with an Azure Communication Services resource.
All required parameters must be populated in order to send to Azure.
:param id: Required. The Id of the communication user.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CommunicationUserIdentifierModel, self).__init__(**kwargs)
self.id = kwargs['id']
class ContainerRegistryArtifactEventData(msrest.serialization.Model):
"""The content of the event request message.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
class ContainerRegistryArtifactEventTarget(msrest.serialization.Model):
"""The target of the event.
:param media_type: The MIME type of the artifact.
:type media_type: str
:param size: The size in bytes of the artifact.
:type size: long
:param digest: The digest of the artifact.
:type digest: str
:param repository: The repository name of the artifact.
:type repository: str
:param tag: The tag of the artifact.
:type tag: str
:param name: The name of the artifact.
:type name: str
:param version: The version of the artifact.
:type version: str
"""
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.repository = kwargs.get('repository', None)
self.tag = kwargs.get('tag', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class ContainerRegistryChartDeletedEventData(ContainerRegistryArtifactEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ChartDeleted event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryChartPushedEventData(ContainerRegistryArtifactEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ChartPushed event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryArtifactEventTarget
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartPushedEventData, self).__init__(**kwargs)
class ContainerRegistryEventActor(msrest.serialization.Model):
"""The agent that initiated the event. For most situations, this could be from the authorization context of the request.
:param name: The subject or username associated with the request context that generated the
event.
:type name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventActor, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class ContainerRegistryEventData(msrest.serialization.Model):
"""The content of the event request message.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
self.request = kwargs.get('request', None)
self.actor = kwargs.get('actor', None)
self.source = kwargs.get('source', None)
class ContainerRegistryEventRequest(msrest.serialization.Model):
"""The request that generated the event.
:param id: The ID of the request that initiated the event.
:type id: str
:param addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:type addr: str
:param host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:type host: str
:param method: The request method that generated the event.
:type method: str
:param useragent: The user agent header of the request.
:type useragent: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'addr': {'key': 'addr', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'useragent': {'key': 'useragent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventRequest, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.addr = kwargs.get('addr', None)
self.host = kwargs.get('host', None)
self.method = kwargs.get('method', None)
self.useragent = kwargs.get('useragent', None)
class ContainerRegistryEventSource(msrest.serialization.Model):
"""The registry node that generated the event. Put differently, while the actor initiates the event, the source generates it.
:param addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:type addr: str
:param instance_id: The running instance of an application. Changes after each restart.
:type instance_id: str
"""
_attribute_map = {
'addr': {'key': 'addr', 'type': 'str'},
'instance_id': {'key': 'instanceID', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventSource, self).__init__(**kwargs)
self.addr = kwargs.get('addr', None)
self.instance_id = kwargs.get('instance_id', None)
class ContainerRegistryEventTarget(msrest.serialization.Model):
"""The target of the event.
:param media_type: The MIME type of the referenced object.
:type media_type: str
:param size: The number of bytes of the content. Same as Length field.
:type size: long
:param digest: The digest of the content, as defined by the Registry V2 HTTP API Specification.
:type digest: str
:param length: The number of bytes of the content. Same as Size field.
:type length: long
:param repository: The repository name.
:type repository: str
:param url: The direct URL to the content.
:type url: str
:param tag: The tag name.
:type tag: str
"""
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'length': {'key': 'length', 'type': 'long'},
'repository': {'key': 'repository', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.length = kwargs.get('length', None)
self.repository = kwargs.get('repository', None)
self.url = kwargs.get('url', None)
self.tag = kwargs.get('tag', None)
class ContainerRegistryImageDeletedEventData(ContainerRegistryEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ImageDeleted event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImageDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryImagePushedEventData(ContainerRegistryEventData):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ContainerRegistry.ImagePushed event.
:param id: The event ID.
:type id: str
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param action: The action that encompasses the provided event.
:type action: str
:param target: The target of the event.
:type target: ~event_grid_publisher_client.models.ContainerRegistryEventTarget
:param request: The request that generated the event.
:type request: ~event_grid_publisher_client.models.ContainerRegistryEventRequest
:param actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:type actor: ~event_grid_publisher_client.models.ContainerRegistryEventActor
:param source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:type source: ~event_grid_publisher_client.models.ContainerRegistryEventSource
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImagePushedEventData, self).__init__(**kwargs)
class DeviceConnectionStateEventInfo(msrest.serialization.Model):
"""Information about the device connection state event.
:param sequence_number: Sequence number is string representation of a hexadecimal number.
string compare can be used to identify the larger number because both in ASCII and HEX numbers
come after alphabets. If you are converting the string to hex, then the number is a 256 bit
number.
:type sequence_number: str
"""
_attribute_map = {
'sequence_number': {'key': 'sequenceNumber', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventInfo, self).__init__(**kwargs)
self.sequence_number = kwargs.get('sequence_number', None)
class DeviceConnectionStateEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device connection state event (DeviceConnected, DeviceDisconnected).
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.module_id = kwargs.get('module_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.device_connection_state_event_info = kwargs.get('device_connection_state_event_info', None)
class DeviceLifeCycleEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device life cycle event (DeviceCreated, DeviceDeleted).
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceLifeCycleEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.twin = kwargs.get('twin', None)
class DeviceTelemetryEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a device telemetry event (DeviceTelemetry).
:param body: The content of the message from the device.
:type body: object
:param properties: Application properties are user-defined strings that can be added to the
message. These fields are optional.
:type properties: dict[str, str]
:param system_properties: System properties help identify contents and source of the messages.
:type system_properties: dict[str, str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(DeviceTelemetryEventProperties, self).__init__(**kwargs)
self.body = kwargs.get('body', None)
self.properties = kwargs.get('properties', None)
self.system_properties = kwargs.get('system_properties', None)
class DeviceTwinInfo(msrest.serialization.Model):
"""Information about the device twin, which is the cloud representation of application device metadata.
:param authentication_type: Authentication type used for this device: either SAS, SelfSigned,
or CertificateAuthority.
:type authentication_type: str
:param cloud_to_device_message_count: Count of cloud to device messages sent to this device.
:type cloud_to_device_message_count: float
:param connection_state: Whether the device is connected or disconnected.
:type connection_state: str
:param device_id: The unique identifier of the device twin.
:type device_id: str
:param etag: A piece of information that describes the content of the device twin. Each etag is
guaranteed to be unique per device twin.
:type etag: str
:param last_activity_time: The ISO8601 timestamp of the last activity.
:type last_activity_time: str
:param properties: Properties JSON element.
:type properties: ~event_grid_publisher_client.models.DeviceTwinInfoProperties
:param status: Whether the device twin is enabled or disabled.
:type status: str
:param status_update_time: The ISO8601 timestamp of the last device twin status update.
:type status_update_time: str
:param version: An integer that is incremented by one each time the device twin is updated.
:type version: float
:param x509_thumbprint: The thumbprint is a unique value for the x509 certificate, commonly
used to find a particular certificate in a certificate store. The thumbprint is dynamically
generated using the SHA1 algorithm, and does not physically exist in the certificate.
:type x509_thumbprint: ~event_grid_publisher_client.models.DeviceTwinInfoX509Thumbprint
"""
_attribute_map = {
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'cloud_to_device_message_count': {'key': 'cloudToDeviceMessageCount', 'type': 'float'},
'connection_state': {'key': 'connectionState', 'type': 'str'},
'device_id': {'key': 'deviceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'DeviceTwinInfoProperties'},
'status': {'key': 'status', 'type': 'str'},
'status_update_time': {'key': 'statusUpdateTime', 'type': 'str'},
'version': {'key': 'version', 'type': 'float'},
'x509_thumbprint': {'key': 'x509Thumbprint', 'type': 'DeviceTwinInfoX509Thumbprint'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfo, self).__init__(**kwargs)
self.authentication_type = kwargs.get('authentication_type', None)
self.cloud_to_device_message_count = kwargs.get('cloud_to_device_message_count', None)
self.connection_state = kwargs.get('connection_state', None)
self.device_id = kwargs.get('device_id', None)
self.etag = kwargs.get('etag', None)
self.last_activity_time = kwargs.get('last_activity_time', None)
self.properties = kwargs.get('properties', None)
self.status = kwargs.get('status', None)
self.status_update_time = kwargs.get('status_update_time', None)
self.version = kwargs.get('version', None)
self.x509_thumbprint = kwargs.get('x509_thumbprint', None)
class DeviceTwinInfoProperties(msrest.serialization.Model):
"""Properties JSON element.
:param desired: A portion of the properties that can be written only by the application back-
end, and read by the device.
:type desired: ~event_grid_publisher_client.models.DeviceTwinProperties
:param reported: A portion of the properties that can be written only by the device, and read
by the application back-end.
:type reported: ~event_grid_publisher_client.models.DeviceTwinProperties
"""
_attribute_map = {
'desired': {'key': 'desired', 'type': 'DeviceTwinProperties'},
'reported': {'key': 'reported', 'type': 'DeviceTwinProperties'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoProperties, self).__init__(**kwargs)
self.desired = kwargs.get('desired', None)
self.reported = kwargs.get('reported', None)
class DeviceTwinInfoX509Thumbprint(msrest.serialization.Model):
"""The thumbprint is a unique value for the x509 certificate, commonly used to find a particular certificate in a certificate store. The thumbprint is dynamically generated using the SHA1 algorithm, and does not physically exist in the certificate.
:param primary_thumbprint: Primary thumbprint for the x509 certificate.
:type primary_thumbprint: str
:param secondary_thumbprint: Secondary thumbprint for the x509 certificate.
:type secondary_thumbprint: str
"""
_attribute_map = {
'primary_thumbprint': {'key': 'primaryThumbprint', 'type': 'str'},
'secondary_thumbprint': {'key': 'secondaryThumbprint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoX509Thumbprint, self).__init__(**kwargs)
self.primary_thumbprint = kwargs.get('primary_thumbprint', None)
self.secondary_thumbprint = kwargs.get('secondary_thumbprint', None)
class DeviceTwinMetadata(msrest.serialization.Model):
"""Metadata information for the properties JSON document.
:param last_updated: The ISO8601 timestamp of the last time the properties were updated.
:type last_updated: str
"""
_attribute_map = {
'last_updated': {'key': 'lastUpdated', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinMetadata, self).__init__(**kwargs)
self.last_updated = kwargs.get('last_updated', None)
class DeviceTwinProperties(msrest.serialization.Model):
"""A portion of the properties that can be written only by the application back-end, and read by the device.
:param metadata: Metadata information for the properties JSON document.
:type metadata: ~event_grid_publisher_client.models.DeviceTwinMetadata
:param version: Version of device twin properties.
:type version: float
"""
_attribute_map = {
'metadata': {'key': 'metadata', 'type': 'DeviceTwinMetadata'},
'version': {'key': 'version', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinProperties, self).__init__(**kwargs)
self.metadata = kwargs.get('metadata', None)
self.version = kwargs.get('version', None)
class EventGridEvent(msrest.serialization.Model):
"""Properties of an event published to an Event Grid topic using the EventGrid Schema.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Required. An unique identifier for the event.
:type id: str
:param topic: The resource path of the event source.
:type topic: str
:param subject: Required. A resource path relative to the topic path.
:type subject: str
:param data: Required. Event data specific to the event type.
:type data: object
:param event_type: Required. The type of the event that occurred.
:type event_type: str
:param event_time: Required. The time (in UTC) the event was generated.
:type event_time: ~datetime.datetime
:ivar metadata_version: The schema version of the event metadata.
:vartype metadata_version: str
:param data_version: Required. The schema version of the data object.
:type data_version: str
"""
_validation = {
'id': {'required': True},
'subject': {'required': True},
'data': {'required': True},
'event_type': {'required': True},
'event_time': {'required': True},
'metadata_version': {'readonly': True},
'data_version': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'event_type': {'key': 'eventType', 'type': 'str'},
'event_time': {'key': 'eventTime', 'type': 'iso-8601'},
'metadata_version': {'key': 'metadataVersion', 'type': 'str'},
'data_version': {'key': 'dataVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EventGridEvent, self).__init__(**kwargs)
self.id = kwargs['id']
self.topic = kwargs.get('topic', None)
self.subject = kwargs['subject']
self.data = kwargs['data']
self.event_type = kwargs['event_type']
self.event_time = kwargs['event_time']
self.metadata_version = None
self.data_version = kwargs['data_version']
class EventHubCaptureFileCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventHub.CaptureFileCreated event.
:param fileurl: The path to the capture file.
:type fileurl: str
:param file_type: The file type of the capture file.
:type file_type: str
:param partition_id: The shard ID.
:type partition_id: str
:param size_in_bytes: The file size.
:type size_in_bytes: int
:param event_count: The number of events in the file.
:type event_count: int
:param first_sequence_number: The smallest sequence number from the queue.
:type first_sequence_number: int
:param last_sequence_number: The last sequence number from the queue.
:type last_sequence_number: int
:param first_enqueue_time: The first time from the queue.
:type first_enqueue_time: ~datetime.datetime
:param last_enqueue_time: The last time from the queue.
:type last_enqueue_time: ~datetime.datetime
"""
_attribute_map = {
'fileurl': {'key': 'fileurl', 'type': 'str'},
'file_type': {'key': 'fileType', 'type': 'str'},
'partition_id': {'key': 'partitionId', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'int'},
'event_count': {'key': 'eventCount', 'type': 'int'},
'first_sequence_number': {'key': 'firstSequenceNumber', 'type': 'int'},
'last_sequence_number': {'key': 'lastSequenceNumber', 'type': 'int'},
'first_enqueue_time': {'key': 'firstEnqueueTime', 'type': 'iso-8601'},
'last_enqueue_time': {'key': 'lastEnqueueTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(EventHubCaptureFileCreatedEventData, self).__init__(**kwargs)
self.fileurl = kwargs.get('fileurl', None)
self.file_type = kwargs.get('file_type', None)
self.partition_id = kwargs.get('partition_id', None)
self.size_in_bytes = kwargs.get('size_in_bytes', None)
self.event_count = kwargs.get('event_count', None)
self.first_sequence_number = kwargs.get('first_sequence_number', None)
self.last_sequence_number = kwargs.get('last_sequence_number', None)
self.first_enqueue_time = kwargs.get('first_enqueue_time', None)
self.last_enqueue_time = kwargs.get('last_enqueue_time', None)
class IotHubDeviceConnectedEventData(DeviceConnectionStateEventProperties):
"""Event data for Microsoft.Devices.DeviceConnected event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceConnectedEventData, self).__init__(**kwargs)
class IotHubDeviceCreatedEventData(DeviceLifeCycleEventProperties):
"""Event data for Microsoft.Devices.DeviceCreated event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceCreatedEventData, self).__init__(**kwargs)
class IotHubDeviceDeletedEventData(DeviceLifeCycleEventProperties):
"""Event data for Microsoft.Devices.DeviceDeleted event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param twin: Information about the device twin, which is the cloud representation of
application device metadata.
:type twin: ~event_grid_publisher_client.models.DeviceTwinInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDeletedEventData, self).__init__(**kwargs)
class IotHubDeviceDisconnectedEventData(DeviceConnectionStateEventProperties):
"""Event data for Microsoft.Devices.DeviceDisconnected event.
:param device_id: The unique identifier of the device. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type device_id: str
:param module_id: The unique identifier of the module. This case-sensitive string can be up to
128 characters long, and supports ASCII 7-bit alphanumeric characters plus the following
special characters: - : . + % _ # * ? ! ( ) , = @ ; $ '.
:type module_id: str
:param hub_name: Name of the IoT Hub where the device was created or deleted.
:type hub_name: str
:param device_connection_state_event_info: Information about the device connection state event.
:type device_connection_state_event_info:
~event_grid_publisher_client.models.DeviceConnectionStateEventInfo
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDisconnectedEventData, self).__init__(**kwargs)
class IotHubDeviceTelemetryEventData(DeviceTelemetryEventProperties):
"""Event data for Microsoft.Devices.DeviceTelemetry event.
:param body: The content of the message from the device.
:type body: object
:param properties: Application properties are user-defined strings that can be added to the
message. These fields are optional.
:type properties: dict[str, str]
:param system_properties: System properties help identify contents and source of the messages.
:type system_properties: dict[str, str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceTelemetryEventData, self).__init__(**kwargs)
class KeyVaultAccessPolicyChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.VaultAccessPolicyChanged event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultAccessPolicyChangedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.CertificateNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.KeyNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretExpiredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretExpired event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNearExpiryEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretNearExpiry event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNewVersionCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.KeyVault.SecretNewVersionCreated event.
:param id: The id of the object that triggered this event.
:type id: str
:param vault_name: Key vault name of the object that triggered this event.
:type vault_name: str
:param object_type: The type of the object that triggered this event.
:type object_type: str
:param object_name: The name of the object that triggered this event.
:type object_name: str
:param version: The version of the object that triggered this event.
:type version: str
:param nbf: Not before date of the object that triggered this event.
:type nbf: float
:param exp: The expiration date of the object that triggered this event.
:type exp: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class MachineLearningServicesDatasetDriftDetectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.DatasetDriftDetected event.
:param data_drift_id: The ID of the data drift monitor that triggered the event.
:type data_drift_id: str
:param data_drift_name: The name of the data drift monitor that triggered the event.
:type data_drift_name: str
:param run_id: The ID of the Run that detected data drift.
:type run_id: str
:param base_dataset_id: The ID of the base Dataset used to detect drift.
:type base_dataset_id: str
:param target_dataset_id: The ID of the target Dataset used to detect drift.
:type target_dataset_id: str
:param drift_coefficient: The coefficient result that triggered the event.
:type drift_coefficient: float
:param start_time: The start time of the target dataset time series that resulted in drift
detection.
:type start_time: ~datetime.datetime
:param end_time: The end time of the target dataset time series that resulted in drift
detection.
:type end_time: ~datetime.datetime
"""
_attribute_map = {
'data_drift_id': {'key': 'dataDriftId', 'type': 'str'},
'data_drift_name': {'key': 'dataDriftName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'base_dataset_id': {'key': 'baseDatasetId', 'type': 'str'},
'target_dataset_id': {'key': 'targetDatasetId', 'type': 'str'},
'drift_coefficient': {'key': 'driftCoefficient', 'type': 'float'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesDatasetDriftDetectedEventData, self).__init__(**kwargs)
self.data_drift_id = kwargs.get('data_drift_id', None)
self.data_drift_name = kwargs.get('data_drift_name', None)
self.run_id = kwargs.get('run_id', None)
self.base_dataset_id = kwargs.get('base_dataset_id', None)
self.target_dataset_id = kwargs.get('target_dataset_id', None)
self.drift_coefficient = kwargs.get('drift_coefficient', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
class MachineLearningServicesModelDeployedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.ModelDeployed event.
:param service_name: The name of the deployed service.
:type service_name: str
:param service_compute_type: The compute type (e.g. ACI, AKS) of the deployed service.
:type service_compute_type: str
:param model_ids: A common separated list of model IDs. The IDs of the models deployed in the
service.
:type model_ids: str
:param service_tags: The tags of the deployed service.
:type service_tags: object
:param service_properties: The properties of the deployed service.
:type service_properties: object
"""
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
'service_compute_type': {'key': 'serviceComputeType', 'type': 'str'},
'model_ids': {'key': 'modelIds', 'type': 'str'},
'service_tags': {'key': 'serviceTags', 'type': 'object'},
'service_properties': {'key': 'serviceProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelDeployedEventData, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.service_compute_type = kwargs.get('service_compute_type', None)
self.model_ids = kwargs.get('model_ids', None)
self.service_tags = kwargs.get('service_tags', None)
self.service_properties = kwargs.get('service_properties', None)
class MachineLearningServicesModelRegisteredEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.ModelRegistered event.
:param model_name: The name of the model that was registered.
:type model_name: str
:param model_version: The version of the model that was registered.
:type model_version: str
:param model_tags: The tags of the model that was registered.
:type model_tags: object
:param model_properties: The properties of the model that was registered.
:type model_properties: object
"""
_attribute_map = {
'model_name': {'key': 'modelName', 'type': 'str'},
'model_version': {'key': 'modelVersion', 'type': 'str'},
'model_tags': {'key': 'modelTags', 'type': 'object'},
'model_properties': {'key': 'modelProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelRegisteredEventData, self).__init__(**kwargs)
self.model_name = kwargs.get('model_name', None)
self.model_version = kwargs.get('model_version', None)
self.model_tags = kwargs.get('model_tags', None)
self.model_properties = kwargs.get('model_properties', None)
class MachineLearningServicesRunCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.RunCompleted event.
:param experiment_id: The ID of the experiment that the run belongs to.
:type experiment_id: str
:param experiment_name: The name of the experiment that the run belongs to.
:type experiment_name: str
:param run_id: The ID of the Run that was completed.
:type run_id: str
:param run_type: The Run Type of the completed Run.
:type run_type: str
:param run_tags: The tags of the completed Run.
:type run_tags: object
:param run_properties: The properties of the completed Run.
:type run_properties: object
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunCompletedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
class MachineLearningServicesRunStatusChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.MachineLearningServices.RunStatusChanged event.
:param experiment_id: The ID of the experiment that the Machine Learning Run belongs to.
:type experiment_id: str
:param experiment_name: The name of the experiment that the Machine Learning Run belongs to.
:type experiment_name: str
:param run_id: The ID of the Machine Learning Run.
:type run_id: str
:param run_type: The Run Type of the Machine Learning Run.
:type run_type: str
:param run_tags: The tags of the Machine Learning Run.
:type run_tags: object
:param run_properties: The properties of the Machine Learning Run.
:type run_properties: object
:param run_status: The status of the Machine Learning Run.
:type run_status: str
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
'run_status': {'key': 'runStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunStatusChangedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
self.run_status = kwargs.get('run_status', None)
class MapsGeofenceEventProperties(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Geofence event (GeofenceEntered, GeofenceExited, GeofenceResult).
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEventProperties, self).__init__(**kwargs)
self.expired_geofence_geometry_id = kwargs.get('expired_geofence_geometry_id', None)
self.geometries = kwargs.get('geometries', None)
self.invalid_period_geofence_geometry_id = kwargs.get('invalid_period_geofence_geometry_id', None)
self.is_event_published = kwargs.get('is_event_published', None)
class MapsGeofenceEnteredEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceEntered event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEnteredEventData, self).__init__(**kwargs)
class MapsGeofenceExitedEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceExited event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceExitedEventData, self).__init__(**kwargs)
class MapsGeofenceGeometry(msrest.serialization.Model):
"""The geofence geometry.
:param device_id: ID of the device.
:type device_id: str
:param distance: Distance from the coordinate to the closest border of the geofence. Positive
means the coordinate is outside of the geofence. If the coordinate is outside of the geofence,
but more than the value of searchBuffer away from the closest geofence border, then the value
is 999. Negative means the coordinate is inside of the geofence. If the coordinate is inside
the polygon, but more than the value of searchBuffer away from the closest geofencing
border,then the value is -999. A value of 999 means that there is great confidence the
coordinate is well outside the geofence. A value of -999 means that there is great confidence
the coordinate is well within the geofence.
:type distance: float
:param geometry_id: The unique ID for the geofence geometry.
:type geometry_id: str
:param nearest_lat: Latitude of the nearest point of the geometry.
:type nearest_lat: float
:param nearest_lon: Longitude of the nearest point of the geometry.
:type nearest_lon: float
:param ud_id: The unique id returned from user upload service when uploading a geofence. Will
not be included in geofencing post API.
:type ud_id: str
"""
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'distance': {'key': 'distance', 'type': 'float'},
'geometry_id': {'key': 'geometryId', 'type': 'str'},
'nearest_lat': {'key': 'nearestLat', 'type': 'float'},
'nearest_lon': {'key': 'nearestLon', 'type': 'float'},
'ud_id': {'key': 'udId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceGeometry, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.distance = kwargs.get('distance', None)
self.geometry_id = kwargs.get('geometry_id', None)
self.nearest_lat = kwargs.get('nearest_lat', None)
self.nearest_lon = kwargs.get('nearest_lon', None)
self.ud_id = kwargs.get('ud_id', None)
class MapsGeofenceResultEventData(MapsGeofenceEventProperties):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Maps.GeofenceResult event.
:param expired_geofence_geometry_id: Lists of the geometry ID of the geofence which is expired
relative to the user time in the request.
:type expired_geofence_geometry_id: list[str]
:param geometries: Lists the fence geometries that either fully contain the coordinate position
or have an overlap with the searchBuffer around the fence.
:type geometries: list[~event_grid_publisher_client.models.MapsGeofenceGeometry]
:param invalid_period_geofence_geometry_id: Lists of the geometry ID of the geofence which is
in invalid period relative to the user time in the request.
:type invalid_period_geofence_geometry_id: list[str]
:param is_event_published: True if at least one event is published to the Azure Maps event
subscriber, false if no event is published to the Azure Maps event subscriber.
:type is_event_published: bool
"""
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceResultEventData, self).__init__(**kwargs)
class MediaJobStateChangeEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobStateChange event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.state = None
self.correlation_data = kwargs.get('correlation_data', None)
class MediaJobCanceledEventData(MediaJobStateChangeEventData):
"""Job canceled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobCanceled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCanceledEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobCancelingEventData(MediaJobStateChangeEventData):
"""Job canceling event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobCanceling event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCancelingEventData, self).__init__(**kwargs)
class MediaJobError(msrest.serialization.Model):
"""Details of JobOutput errors.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Error code describing the error. Possible values include: "ServiceError",
"ServiceTransientError", "DownloadNotAccessible", "DownloadTransientError",
"UploadNotAccessible", "UploadTransientError", "ConfigurationUnsupported", "ContentMalformed",
"ContentUnsupported".
:vartype code: str or ~event_grid_publisher_client.models.MediaJobErrorCode
:ivar message: A human-readable language-dependent representation of the error.
:vartype message: str
:ivar category: Helps with categorization of errors. Possible values include: "Service",
"Download", "Upload", "Configuration", "Content".
:vartype category: str or ~event_grid_publisher_client.models.MediaJobErrorCategory
:ivar retry: Indicates that it may be possible to retry the Job. If retry is unsuccessful,
please contact Azure support via Azure Portal. Possible values include: "DoNotRetry",
"MayRetry".
:vartype retry: str or ~event_grid_publisher_client.models.MediaJobRetry
:ivar details: An array of details about specific errors that led to this reported error.
:vartype details: list[~event_grid_publisher_client.models.MediaJobErrorDetail]
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'category': {'readonly': True},
'retry': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'category': {'key': 'category', 'type': 'str'},
'retry': {'key': 'retry', 'type': 'str'},
'details': {'key': 'details', 'type': '[MediaJobErrorDetail]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobError, self).__init__(**kwargs)
self.code = None
self.message = None
self.category = None
self.retry = None
self.details = None
class MediaJobErrorDetail(msrest.serialization.Model):
"""Details of JobOutput errors.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Code describing the error detail.
:vartype code: str
:ivar message: A human-readable representation of the error.
:vartype message: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErrorDetail, self).__init__(**kwargs)
self.code = None
self.message = None
class MediaJobErroredEventData(MediaJobStateChangeEventData):
"""Job error state event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobErrored event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErroredEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobFinishedEventData(MediaJobStateChangeEventData):
"""Job finished event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobFinished event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
:param outputs: Gets the Job outputs.
:type outputs: list[~event_grid_publisher_client.models.MediaJobOutput]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobFinishedEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobOutput(msrest.serialization.Model):
"""The event data for a Job output.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: MediaJobOutputAsset.
All required parameters must be populated in order to send to Azure.
:param odata_type: The discriminator for derived types.Constant filled by server.
:type odata_type: str
:param error: Gets the Job output error.
:type error: ~event_grid_publisher_client.models.MediaJobError
:param label: Gets the Job output label.
:type label: str
:param progress: Required. Gets the Job output progress.
:type progress: long
:param state: Required. Gets the Job output state. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:type state: str or ~event_grid_publisher_client.models.MediaJobState
"""
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'},
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
}
_subtype_map = {
'odata_type': {'#Microsoft.Media.JobOutputAsset': 'MediaJobOutputAsset'}
}
def __init__(
self,
**kwargs
):
super(MediaJobOutput, self).__init__(**kwargs)
self.odata_type = None # type: Optional[str]
self.error = kwargs.get('error', None)
self.label = kwargs.get('label', None)
self.progress = kwargs['progress']
self.state = kwargs['state']
class MediaJobOutputAsset(MediaJobOutput):
"""The event data for a Job output asset.
All required parameters must be populated in order to send to Azure.
:param odata_type: The discriminator for derived types.Constant filled by server.
:type odata_type: str
:param error: Gets the Job output error.
:type error: ~event_grid_publisher_client.models.MediaJobError
:param label: Gets the Job output label.
:type label: str
:param progress: Required. Gets the Job output progress.
:type progress: long
:param state: Required. Gets the Job output state. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:type state: str or ~event_grid_publisher_client.models.MediaJobState
:param asset_name: Gets the Job output asset name.
:type asset_name: str
"""
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'},
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputAsset, self).__init__(**kwargs)
self.odata_type = '#Microsoft.Media.JobOutputAsset' # type: str
self.asset_name = kwargs.get('asset_name', None)
class MediaJobOutputStateChangeEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobOutputStateChange event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.output = kwargs.get('output', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputCanceledEventData(MediaJobOutputStateChangeEventData):
"""Job output canceled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputCanceled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCanceledEventData, self).__init__(**kwargs)
class MediaJobOutputCancelingEventData(MediaJobOutputStateChangeEventData):
"""Job output canceling event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputCanceling event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCancelingEventData, self).__init__(**kwargs)
class MediaJobOutputErroredEventData(MediaJobOutputStateChangeEventData):
"""Job output error event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputErrored event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputErroredEventData, self).__init__(**kwargs)
class MediaJobOutputFinishedEventData(MediaJobOutputStateChangeEventData):
"""Job output finished event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputFinished event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputFinishedEventData, self).__init__(**kwargs)
class MediaJobOutputProcessingEventData(MediaJobOutputStateChangeEventData):
"""Job output processing event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputProcessing event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProcessingEventData, self).__init__(**kwargs)
class MediaJobOutputProgressEventData(msrest.serialization.Model):
"""Job Output Progress Event Data. Schema of the Data property of an EventGridEvent for a Microsoft.Media.JobOutputProgress event.
:param label: Gets the Job output label.
:type label: str
:param progress: Gets the Job output progress.
:type progress: long
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_attribute_map = {
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProgressEventData, self).__init__(**kwargs)
self.label = kwargs.get('label', None)
self.progress = kwargs.get('progress', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputScheduledEventData(MediaJobOutputStateChangeEventData):
"""Job output scheduled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobOutputScheduled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:param output: Gets the output.
:type output: ~event_grid_publisher_client.models.MediaJobOutput
:param job_correlation_data: Gets the Job correlation data.
:type job_correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputScheduledEventData, self).__init__(**kwargs)
class MediaJobProcessingEventData(MediaJobStateChangeEventData):
"""Job processing event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobProcessing event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobProcessingEventData, self).__init__(**kwargs)
class MediaJobScheduledEventData(MediaJobStateChangeEventData):
"""Job scheduled event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.JobScheduled event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar previous_state: The previous state of the Job. Possible values include: "Canceled",
"Canceling", "Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype previous_state: str or ~event_grid_publisher_client.models.MediaJobState
:ivar state: The new state of the Job. Possible values include: "Canceled", "Canceling",
"Error", "Finished", "Processing", "Queued", "Scheduled".
:vartype state: str or ~event_grid_publisher_client.models.MediaJobState
:param correlation_data: Gets the Job correlation data.
:type correlation_data: dict[str, str]
"""
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobScheduledEventData, self).__init__(**kwargs)
class MediaLiveEventConnectionRejectedEventData(msrest.serialization.Model):
"""Encoder connection rejected event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventConnectionRejected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar result_code: Gets the result code.
:vartype result_code: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventConnectionRejectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventEncoderConnectedEventData(msrest.serialization.Model):
"""Encoder connect event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventEncoderConnected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderConnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
class MediaLiveEventEncoderDisconnectedEventData(msrest.serialization.Model):
"""Encoder disconnected event data. Schema of the Data property of an EventGridEvent for a Microsoft.Media.LiveEventEncoderDisconnected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar stream_id: Gets the stream Id.
:vartype stream_id: str
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar result_code: Gets the result code.
:vartype result_code: str
"""
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderDisconnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventIncomingDataChunkDroppedEventData(msrest.serialization.Model):
"""Ingest fragment dropped event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingDataChunkDropped event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp: Gets the timestamp of the data chunk dropped.
:vartype timestamp: str
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar timescale: Gets the timescale of the Timestamp.
:vartype timescale: str
:ivar result_code: Gets the result code for fragment drop operation.
:vartype result_code: str
:ivar track_name: Gets the name of the track for which fragment is dropped.
:vartype track_name: str
"""
_validation = {
'timestamp': {'readonly': True},
'track_type': {'readonly': True},
'bitrate': {'readonly': True},
'timescale': {'readonly': True},
'result_code': {'readonly': True},
'track_name': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'timescale': {'key': 'timescale', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingDataChunkDroppedEventData, self).__init__(**kwargs)
self.timestamp = None
self.track_type = None
self.bitrate = None
self.timescale = None
self.result_code = None
self.track_name = None
class MediaLiveEventIncomingStreamReceivedEventData(msrest.serialization.Model):
"""Encoder connect event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingStreamReceived event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar ingest_url: Gets the ingest URL provided by the live event.
:vartype ingest_url: str
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar encoder_ip: Gets the remote IP.
:vartype encoder_ip: str
:ivar encoder_port: Gets the remote port.
:vartype encoder_port: str
:ivar timestamp: Gets the first timestamp of the data chunk received.
:vartype timestamp: str
:ivar duration: Gets the duration of the first data chunk.
:vartype duration: str
:ivar timescale: Gets the timescale in which timestamp is represented.
:vartype timescale: str
"""
_validation = {
'ingest_url': {'readonly': True},
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'timestamp': {'readonly': True},
'duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamReceivedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.track_type = None
self.track_name = None
self.bitrate = None
self.encoder_ip = None
self.encoder_port = None
self.timestamp = None
self.duration = None
self.timescale = None
class MediaLiveEventIncomingStreamsOutOfSyncEventData(msrest.serialization.Model):
"""Incoming streams out of sync event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingStreamsOutOfSync event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar min_last_timestamp: Gets the minimum last timestamp received.
:vartype min_last_timestamp: str
:ivar type_of_stream_with_min_last_timestamp: Gets the type of stream with minimum last
timestamp.
:vartype type_of_stream_with_min_last_timestamp: str
:ivar max_last_timestamp: Gets the maximum timestamp among all the tracks (audio or video).
:vartype max_last_timestamp: str
:ivar type_of_stream_with_max_last_timestamp: Gets the type of stream with maximum last
timestamp.
:vartype type_of_stream_with_max_last_timestamp: str
:ivar timescale_of_min_last_timestamp: Gets the timescale in which "MinLastTimestamp" is
represented.
:vartype timescale_of_min_last_timestamp: str
:ivar timescale_of_max_last_timestamp: Gets the timescale in which "MaxLastTimestamp" is
represented.
:vartype timescale_of_max_last_timestamp: str
"""
_validation = {
'min_last_timestamp': {'readonly': True},
'type_of_stream_with_min_last_timestamp': {'readonly': True},
'max_last_timestamp': {'readonly': True},
'type_of_stream_with_max_last_timestamp': {'readonly': True},
'timescale_of_min_last_timestamp': {'readonly': True},
'timescale_of_max_last_timestamp': {'readonly': True},
}
_attribute_map = {
'min_last_timestamp': {'key': 'minLastTimestamp', 'type': 'str'},
'type_of_stream_with_min_last_timestamp': {'key': 'typeOfStreamWithMinLastTimestamp', 'type': 'str'},
'max_last_timestamp': {'key': 'maxLastTimestamp', 'type': 'str'},
'type_of_stream_with_max_last_timestamp': {'key': 'typeOfStreamWithMaxLastTimestamp', 'type': 'str'},
'timescale_of_min_last_timestamp': {'key': 'timescaleOfMinLastTimestamp', 'type': 'str'},
'timescale_of_max_last_timestamp': {'key': 'timescaleOfMaxLastTimestamp', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.min_last_timestamp = None
self.type_of_stream_with_min_last_timestamp = None
self.max_last_timestamp = None
self.type_of_stream_with_max_last_timestamp = None
self.timescale_of_min_last_timestamp = None
self.timescale_of_max_last_timestamp = None
class MediaLiveEventIncomingVideoStreamsOutOfSyncEventData(msrest.serialization.Model):
"""Incoming video stream out of synch event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIncomingVideoStreamsOutOfSync event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar first_timestamp: Gets the first timestamp received for one of the quality levels.
:vartype first_timestamp: str
:ivar first_duration: Gets the duration of the data chunk with first timestamp.
:vartype first_duration: str
:ivar second_timestamp: Gets the timestamp received for some other quality levels.
:vartype second_timestamp: str
:ivar second_duration: Gets the duration of the data chunk with second timestamp.
:vartype second_duration: str
:ivar timescale: Gets the timescale in which both the timestamps and durations are represented.
:vartype timescale: str
"""
_validation = {
'first_timestamp': {'readonly': True},
'first_duration': {'readonly': True},
'second_timestamp': {'readonly': True},
'second_duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'first_timestamp': {'key': 'firstTimestamp', 'type': 'str'},
'first_duration': {'key': 'firstDuration', 'type': 'str'},
'second_timestamp': {'key': 'secondTimestamp', 'type': 'str'},
'second_duration': {'key': 'secondDuration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingVideoStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.first_timestamp = None
self.first_duration = None
self.second_timestamp = None
self.second_duration = None
self.timescale = None
class MediaLiveEventIngestHeartbeatEventData(msrest.serialization.Model):
"""Ingest fragment dropped event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventIngestHeartbeat event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate of the track.
:vartype bitrate: long
:ivar incoming_bitrate: Gets the incoming bitrate.
:vartype incoming_bitrate: long
:ivar last_timestamp: Gets the last timestamp.
:vartype last_timestamp: str
:ivar timescale: Gets the timescale of the last timestamp.
:vartype timescale: str
:ivar overlap_count: Gets the fragment Overlap count.
:vartype overlap_count: long
:ivar discontinuity_count: Gets the fragment Discontinuity count.
:vartype discontinuity_count: long
:ivar nonincreasing_count: Gets Non increasing count.
:vartype nonincreasing_count: long
:ivar unexpected_bitrate: Gets a value indicating whether unexpected bitrate is present or not.
:vartype unexpected_bitrate: bool
:ivar state: Gets the state of the live event.
:vartype state: str
:ivar healthy: Gets a value indicating whether preview is healthy or not.
:vartype healthy: bool
"""
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'incoming_bitrate': {'readonly': True},
'last_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'overlap_count': {'readonly': True},
'discontinuity_count': {'readonly': True},
'nonincreasing_count': {'readonly': True},
'unexpected_bitrate': {'readonly': True},
'state': {'readonly': True},
'healthy': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'incoming_bitrate': {'key': 'incomingBitrate', 'type': 'long'},
'last_timestamp': {'key': 'lastTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'overlap_count': {'key': 'overlapCount', 'type': 'long'},
'discontinuity_count': {'key': 'discontinuityCount', 'type': 'long'},
'nonincreasing_count': {'key': 'nonincreasingCount', 'type': 'long'},
'unexpected_bitrate': {'key': 'unexpectedBitrate', 'type': 'bool'},
'state': {'key': 'state', 'type': 'str'},
'healthy': {'key': 'healthy', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIngestHeartbeatEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.incoming_bitrate = None
self.last_timestamp = None
self.timescale = None
self.overlap_count = None
self.discontinuity_count = None
self.nonincreasing_count = None
self.unexpected_bitrate = None
self.state = None
self.healthy = None
class MediaLiveEventTrackDiscontinuityDetectedEventData(msrest.serialization.Model):
"""Ingest track discontinuity detected event data. Schema of the data property of an EventGridEvent for a Microsoft.Media.LiveEventTrackDiscontinuityDetected event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar track_type: Gets the type of the track (Audio / Video).
:vartype track_type: str
:ivar track_name: Gets the track name.
:vartype track_name: str
:ivar bitrate: Gets the bitrate.
:vartype bitrate: long
:ivar previous_timestamp: Gets the timestamp of the previous fragment.
:vartype previous_timestamp: str
:ivar new_timestamp: Gets the timestamp of the current fragment.
:vartype new_timestamp: str
:ivar timescale: Gets the timescale in which both timestamps and discontinuity gap are
represented.
:vartype timescale: str
:ivar discontinuity_gap: Gets the discontinuity gap between PreviousTimestamp and NewTimestamp.
:vartype discontinuity_gap: str
"""
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'previous_timestamp': {'readonly': True},
'new_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'discontinuity_gap': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'previous_timestamp': {'key': 'previousTimestamp', 'type': 'str'},
'new_timestamp': {'key': 'newTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'discontinuity_gap': {'key': 'discontinuityGap', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventTrackDiscontinuityDetectedEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.previous_timestamp = None
self.new_timestamp = None
self.timescale = None
self.discontinuity_gap = None
class MicrosoftTeamsUserIdentifierModel(msrest.serialization.Model):
"""A Microsoft Teams user.
All required parameters must be populated in order to send to Azure.
:param user_id: Required. The Id of the Microsoft Teams user. If not anonymous, this is the AAD
object Id of the user.
:type user_id: str
:param is_anonymous: True if the Microsoft Teams user is anonymous. By default false if
missing.
:type is_anonymous: bool
:param cloud: The cloud that the Microsoft Teams user belongs to. By default 'public' if
missing. Possible values include: "public", "dod", "gcch".
:type cloud: str or ~event_grid_publisher_client.models.CommunicationCloudEnvironmentModel
"""
_validation = {
'user_id': {'required': True},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'cloud': {'key': 'cloud', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MicrosoftTeamsUserIdentifierModel, self).__init__(**kwargs)
self.user_id = kwargs['user_id']
self.is_anonymous = kwargs.get('is_anonymous', None)
self.cloud = kwargs.get('cloud', None)
class PhoneNumberIdentifierModel(msrest.serialization.Model):
"""A phone number.
All required parameters must be populated in order to send to Azure.
:param value: Required. The phone number in E.164 format.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PhoneNumberIdentifierModel, self).__init__(**kwargs)
self.value = kwargs['value']
class RedisExportRDBCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ExportRDBCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisExportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisImportRDBCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ImportRDBCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisImportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisPatchingCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.PatchingCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisPatchingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisScalingCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Cache.ScalingCompleted event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param name: The name of this event.
:type name: str
:param status: The status of this event. Failed or succeeded.
:type status: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisScalingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class ResourceActionCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionCancel event. This is raised when a resource action operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionFailure event. This is raised when a resource action operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceActionSuccess event. This is raised when a resource action operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteCancel event. This is raised when a resource delete operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteFailure event. This is raised when a resource delete operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceDeleteSuccess event. This is raised when a resource delete operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteCancelData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteCancel event. This is raised when a resource create or update operation is canceled.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteFailureData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteFailure event. This is raised when a resource create or update operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteSuccessData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Resources.ResourceWriteSuccess event. This is raised when a resource create or update operation succeeds.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.ActiveMessagesAvailablePeriodicNotifications event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusActiveMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.ActiveMessagesAvailableWithNoListeners event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.DeadletterMessagesAvailablePeriodicNotifications event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.ServiceBus.DeadletterMessagesAvailableWithNoListenersEvent event.
:param namespace_name: The namespace name of the Microsoft.ServiceBus resource.
:type namespace_name: str
:param request_uri: The endpoint of the Microsoft.ServiceBus resource.
:type request_uri: str
:param entity_type: The entity type of the Microsoft.ServiceBus resource. Could be one of
'queue' or 'subscriber'.
:type entity_type: str
:param queue_name: The name of the Microsoft.ServiceBus queue. If the entity type is of type
'subscriber', then this value will be null.
:type queue_name: str
:param topic_name: The name of the Microsoft.ServiceBus topic. If the entity type is of type
'queue', then this value will be null.
:type topic_name: str
:param subscription_name: The name of the Microsoft.ServiceBus topic's subscription. If the
entity type is of type 'queue', then this value will be null.
:type subscription_name: str
"""
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class SignalRServiceClientConnectionConnectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionConnected event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param hub_name: The hub of connected client connection.
:type hub_name: str
:param connection_id: The connection Id of connected client connection.
:type connection_id: str
:param user_id: The user Id of connected client connection.
:type user_id: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionConnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
class SignalRServiceClientConnectionDisconnectedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.SignalRService.ClientConnectionDisconnected event.
:param timestamp: The time at which the event occurred.
:type timestamp: ~datetime.datetime
:param hub_name: The hub of connected client connection.
:type hub_name: str
:param connection_id: The connection Id of connected client connection.
:type connection_id: str
:param user_id: The user Id of connected client connection.
:type user_id: str
:param error_message: The message of error that cause the client connection disconnected.
:type error_message: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionDisconnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
self.error_message = kwargs.get('error_message', None)
class StorageAsyncOperationInitiatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.AsyncOperationInitiated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageAsyncOperationInitiatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobCreated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param e_tag: The etag of the blob at the time this event was triggered.
:type e_tag: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param content_offset: The offset of the blob in bytes.
:type content_offset: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'content_offset': {'key': 'contentOffset', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.content_offset = kwargs.get('content_offset', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobDeleted event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobRenamedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobRenamed event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param source_url: The path to the blob that was renamed.
:type source_url: str
:param destination_url: The new path to the blob after the rename operation.
:type destination_url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobTierChangedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.BlobTierChanged event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the Storage service for the storage API
operation that triggered this event.
:type request_id: str
:param content_type: The content type of the blob. This is the same as what would be returned
in the Content-Type header from the blob.
:type content_type: str
:param content_length: The size of the blob in bytes. This is the same as what would be
returned in the Content-Length header from the blob.
:type content_length: long
:param blob_type: The type of blob.
:type blob_type: str
:param url: The path to the blob.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular blob name. Users can use standard string comparison to understand the relative
sequence of two events on the same blob name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobTierChangedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryCreatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryCreated event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param e_tag: The etag of the directory at the time this event was triggered.
:type e_tag: str
:param url: The path to the directory.
:type url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryDeleted event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param url: The path to the deleted directory.
:type url: str
:param recursive: Is this event for a recursive delete operation.
:type recursive: bool
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'recursive': {'key': 'recursive', 'type': 'bool'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.url = kwargs.get('url', None)
self.recursive = kwargs.get('recursive', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryRenamedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.DirectoryRenamed event.
:param api: The name of the API/operation that triggered this event.
:type api: str
:param client_request_id: A request id provided by the client of the storage API operation that
triggered this event.
:type client_request_id: str
:param request_id: The request id generated by the storage service for the storage API
operation that triggered this event.
:type request_id: str
:param source_url: The path to the directory that was renamed.
:type source_url: str
:param destination_url: The new path to the directory after the rename operation.
:type destination_url: str
:param sequencer: An opaque string value representing the logical sequence of events for any
particular directory name. Users can use standard string comparison to understand the relative
sequence of two events on the same directory name.
:type sequencer: str
:param identity: The identity of the requester that triggered this event.
:type identity: str
:param storage_diagnostics: For service use only. Diagnostic data occasionally included by the
Azure Storage service. This property should be ignored by event consumers.
:type storage_diagnostics: object
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageLifecyclePolicyActionSummaryDetail(msrest.serialization.Model):
"""Execution statistics of a specific policy action in a Blob Management cycle.
:param total_objects_count: Total number of objects to be acted on by this action.
:type total_objects_count: long
:param success_count: Number of success operations of this action.
:type success_count: long
:param error_list: Error messages of this action if any.
:type error_list: str
"""
_attribute_map = {
'total_objects_count': {'key': 'totalObjectsCount', 'type': 'long'},
'success_count': {'key': 'successCount', 'type': 'long'},
'error_list': {'key': 'errorList', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyActionSummaryDetail, self).__init__(**kwargs)
self.total_objects_count = kwargs.get('total_objects_count', None)
self.success_count = kwargs.get('success_count', None)
self.error_list = kwargs.get('error_list', None)
class StorageLifecyclePolicyCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Storage.LifecyclePolicyCompleted event.
:param schedule_time: The time the policy task was scheduled.
:type schedule_time: str
:param delete_summary: Execution statistics of a specific policy action in a Blob Management
cycle.
:type delete_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
:param tier_to_cool_summary: Execution statistics of a specific policy action in a Blob
Management cycle.
:type tier_to_cool_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
:param tier_to_archive_summary: Execution statistics of a specific policy action in a Blob
Management cycle.
:type tier_to_archive_summary:
~event_grid_publisher_client.models.StorageLifecyclePolicyActionSummaryDetail
"""
_attribute_map = {
'schedule_time': {'key': 'scheduleTime', 'type': 'str'},
'delete_summary': {'key': 'deleteSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_cool_summary': {'key': 'tierToCoolSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_archive_summary': {'key': 'tierToArchiveSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyCompletedEventData, self).__init__(**kwargs)
self.schedule_time = kwargs.get('schedule_time', None)
self.delete_summary = kwargs.get('delete_summary', None)
self.tier_to_cool_summary = kwargs.get('tier_to_cool_summary', None)
self.tier_to_archive_summary = kwargs.get('tier_to_archive_summary', None)
class SubscriptionDeletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventGrid.SubscriptionDeletedEvent event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar event_subscription_id: The Azure resource ID of the deleted event subscription.
:vartype event_subscription_id: str
"""
_validation = {
'event_subscription_id': {'readonly': True},
}
_attribute_map = {
'event_subscription_id': {'key': 'eventSubscriptionId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionDeletedEventData, self).__init__(**kwargs)
self.event_subscription_id = None
class SubscriptionValidationEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.EventGrid.SubscriptionValidationEvent event.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar validation_code: The validation code sent by Azure Event Grid to validate an event
subscription. To complete the validation handshake, the subscriber must either respond with
this validation code as part of the validation response, or perform a GET request on the
validationUrl (available starting version 2018-05-01-preview).
:vartype validation_code: str
:ivar validation_url: The validation URL sent by Azure Event Grid (available starting version
2018-05-01-preview). To complete the validation handshake, the subscriber must either respond
with the validationCode as part of the validation response, or perform a GET request on the
validationUrl (available starting version 2018-05-01-preview).
:vartype validation_url: str
"""
_validation = {
'validation_code': {'readonly': True},
'validation_url': {'readonly': True},
}
_attribute_map = {
'validation_code': {'key': 'validationCode', 'type': 'str'},
'validation_url': {'key': 'validationUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationEventData, self).__init__(**kwargs)
self.validation_code = None
self.validation_url = None
class SubscriptionValidationResponse(msrest.serialization.Model):
"""To complete an event subscription validation handshake, a subscriber can use either the validationCode or the validationUrl received in a SubscriptionValidationEvent. When the validationCode is used, the SubscriptionValidationResponse can be used to build the response.
:param validation_response: The validation response sent by the subscriber to Azure Event Grid
to complete the validation of an event subscription.
:type validation_response: str
"""
_attribute_map = {
'validation_response': {'key': 'validationResponse', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationResponse, self).__init__(**kwargs)
self.validation_response = kwargs.get('validation_response', None)
class WebAppServicePlanUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.AppServicePlanUpdated event.
:param app_service_plan_event_type_detail: Detail of action on the app service plan.
:type app_service_plan_event_type_detail:
~event_grid_publisher_client.models.AppServicePlanEventTypeDetail
:param sku: sku of app service plan.
:type sku: ~event_grid_publisher_client.models.WebAppServicePlanUpdatedEventDataSku
:param name: name of the app service plan that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the app
service plan API operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
app service plan API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the app service plan API
operation that triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_service_plan_event_type_detail': {'key': 'appServicePlanEventTypeDetail', 'type': 'AppServicePlanEventTypeDetail'},
'sku': {'key': 'sku', 'type': 'WebAppServicePlanUpdatedEventDataSku'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventData, self).__init__(**kwargs)
self.app_service_plan_event_type_detail = kwargs.get('app_service_plan_event_type_detail', None)
self.sku = kwargs.get('sku', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebAppServicePlanUpdatedEventDataSku(msrest.serialization.Model):
"""sku of app service plan.
:param name: name of app service plan sku.
:type name: str
:param tier: tier of app service plan sku.
:type tier: str
:param size: size of app service plan sku.
:type size: str
:param family: family of app service plan sku.
:type family: str
:param capacity: capacity of app service plan sku.
:type capacity: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'Tier', 'type': 'str'},
'size': {'key': 'Size', 'type': 'str'},
'family': {'key': 'Family', 'type': 'str'},
'capacity': {'key': 'Capacity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventDataSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.size = kwargs.get('size', None)
self.family = kwargs.get('family', None)
self.capacity = kwargs.get('capacity', None)
class WebAppUpdatedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.AppUpdated event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppUpdatedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.BackupOperationStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.RestoreOperationStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapCompletedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapCompleted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapFailedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapFailed event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewCancelledEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapWithPreviewCancelled event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewCancelledEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewStartedEventData(msrest.serialization.Model):
"""Schema of the Data property of an EventGridEvent for a Microsoft.Web.SlotSwapWithPreviewStarted event.
:param app_event_type_detail: Detail of action on the app.
:type app_event_type_detail: ~event_grid_publisher_client.models.AppEventTypeDetail
:param name: name of the web site that had this event.
:type name: str
:param client_request_id: The client request id generated by the app service for the site API
operation that triggered this event.
:type client_request_id: str
:param correlation_request_id: The correlation request id generated by the app service for the
site API operation that triggered this event.
:type correlation_request_id: str
:param request_id: The request id generated by the app service for the site API operation that
triggered this event.
:type request_id: str
:param address: HTTP request URL of this operation.
:type address: str
:param verb: HTTP verb of this operation.
:type verb: str
"""
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
| 43.177377 | 276 | 0.671298 |
import msrest.serialization
class AcsChatEventBaseProperties(msrest.serialization.Model):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventBaseProperties, self).__init__(**kwargs)
self.recipient_communication_identifier = kwargs.get('recipient_communication_identifier', None)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatEventInThreadBaseProperties(msrest.serialization.Model):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatEventInThreadBaseProperties, self).__init__(**kwargs)
self.transaction_id = kwargs.get('transaction_id', None)
self.thread_id = kwargs.get('thread_id', None)
class AcsChatMessageEventBaseProperties(AcsChatEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedEventData(AcsChatMessageEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEventInThreadBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.sender_communication_identifier = kwargs.get('sender_communication_identifier', None)
self.sender_display_name = kwargs.get('sender_display_name', None)
self.compose_time = kwargs.get('compose_time', None)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', None)
class AcsChatMessageDeletedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageDeletedInThreadEventData, self).__init__(**kwargs)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatMessageEditedEventData(AcsChatMessageEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageEditedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageEditedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
self.edit_time = kwargs.get('edit_time', None)
class AcsChatMessageReceivedEventData(AcsChatMessageEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatMessageReceivedInThreadEventData(AcsChatMessageEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'compose_time': {'key': 'composeTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'long'},
'message_body': {'key': 'messageBody', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsChatMessageReceivedInThreadEventData, self).__init__(**kwargs)
self.message_body = kwargs.get('message_body', None)
class AcsChatParticipantAddedToThreadEventData(AcsChatEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
self.version = kwargs.get('version', None)
class AcsChatThreadEventBaseProperties(AcsChatEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantAddedToThreadWithUserEventData(AcsChatThreadEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'added_by_communication_identifier': {'key': 'addedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_added': {'key': 'participantAdded', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantAddedToThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.added_by_communication_identifier = kwargs.get('added_by_communication_identifier', None)
self.participant_added = kwargs.get('participant_added', None)
class AcsChatParticipantRemovedFromThreadEventData(AcsChatEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
self.version = kwargs.get('version', None)
class AcsChatParticipantRemovedFromThreadWithUserEventData(AcsChatThreadEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'time': {'key': 'time', 'type': 'iso-8601'},
'removed_by_communication_identifier': {'key': 'removedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'participant_removed': {'key': 'participantRemoved', 'type': 'AcsChatThreadParticipantProperties'},
}
def __init__(
self,
**kwargs
):
super(AcsChatParticipantRemovedFromThreadWithUserEventData, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.removed_by_communication_identifier = kwargs.get('removed_by_communication_identifier', None)
self.participant_removed = kwargs.get('participant_removed', None)
class AcsChatThreadEventInThreadBaseProperties(AcsChatEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadEventInThreadBaseProperties, self).__init__(**kwargs)
self.create_time = kwargs.get('create_time', None)
self.version = kwargs.get('version', None)
class AcsChatThreadCreatedEventData(AcsChatThreadEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadCreatedWithUserEventData(AcsChatThreadEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'properties': {'key': 'properties', 'type': '{object}'},
'participants': {'key': 'participants', 'type': '[AcsChatThreadParticipantProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadCreatedWithUserEventData, self).__init__(**kwargs)
self.created_by_communication_identifier = kwargs.get('created_by_communication_identifier', None)
self.properties = kwargs.get('properties', None)
self.participants = kwargs.get('participants', None)
class AcsChatThreadDeletedEventData(AcsChatThreadEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsChatThreadParticipantProperties(msrest.serialization.Model):
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'participant_communication_identifier': {'key': 'participantCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadParticipantProperties, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.participant_communication_identifier = kwargs.get('participant_communication_identifier', None)
class AcsChatThreadPropertiesUpdatedEventData(AcsChatThreadEventInThreadBaseProperties):
_attribute_map = {
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadPropertiesUpdatedPerUserEventData(AcsChatThreadEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'edited_by_communication_identifier': {'key': 'editedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'edit_time': {'key': 'editTime', 'type': 'iso-8601'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadPropertiesUpdatedPerUserEventData, self).__init__(**kwargs)
self.edited_by_communication_identifier = kwargs.get('edited_by_communication_identifier', None)
self.edit_time = kwargs.get('edit_time', None)
self.properties = kwargs.get('properties', None)
class AcsChatThreadWithUserDeletedEventData(AcsChatThreadEventBaseProperties):
_attribute_map = {
'recipient_communication_identifier': {'key': 'recipientCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'transaction_id': {'key': 'transactionId', 'type': 'str'},
'thread_id': {'key': 'threadId', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'iso-8601'},
'version': {'key': 'version', 'type': 'long'},
'deleted_by_communication_identifier': {'key': 'deletedByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'delete_time': {'key': 'deleteTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsChatThreadWithUserDeletedEventData, self).__init__(**kwargs)
self.deleted_by_communication_identifier = kwargs.get('deleted_by_communication_identifier', None)
self.delete_time = kwargs.get('delete_time', None)
class AcsRecordingChunkInfoProperties(msrest.serialization.Model):
_attribute_map = {
'document_id': {'key': 'documentId', 'type': 'str'},
'index': {'key': 'index', 'type': 'long'},
'end_reason': {'key': 'endReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingChunkInfoProperties, self).__init__(**kwargs)
self.document_id = kwargs.get('document_id', None)
self.index = kwargs.get('index', None)
self.end_reason = kwargs.get('end_reason', None)
class AcsRecordingFileStatusUpdatedEventData(msrest.serialization.Model):
_attribute_map = {
'recording_storage_info': {'key': 'recordingStorageInfo', 'type': 'AcsRecordingStorageInfoProperties'},
'recording_start_time': {'key': 'recordingStartTime', 'type': 'iso-8601'},
'recording_duration_ms': {'key': 'recordingDurationMs', 'type': 'long'},
'session_end_reason': {'key': 'sessionEndReason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingFileStatusUpdatedEventData, self).__init__(**kwargs)
self.recording_storage_info = kwargs.get('recording_storage_info', None)
self.recording_start_time = kwargs.get('recording_start_time', None)
self.recording_duration_ms = kwargs.get('recording_duration_ms', None)
self.session_end_reason = kwargs.get('session_end_reason', None)
class AcsRecordingStorageInfoProperties(msrest.serialization.Model):
_attribute_map = {
'recording_chunks': {'key': 'recordingChunks', 'type': '[AcsRecordingChunkInfoProperties]'},
}
def __init__(
self,
**kwargs
):
super(AcsRecordingStorageInfoProperties, self).__init__(**kwargs)
self.recording_chunks = kwargs.get('recording_chunks', None)
class AcsSmsDeliveryAttemptProperties(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'segments_succeeded': {'key': 'segmentsSucceeded', 'type': 'int'},
'segments_failed': {'key': 'segmentsFailed', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryAttemptProperties, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.segments_succeeded = kwargs.get('segments_succeeded', None)
self.segments_failed = kwargs.get('segments_failed', None)
class AcsSmsEventBaseProperties(msrest.serialization.Model):
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsEventBaseProperties, self).__init__(**kwargs)
self.message_id = kwargs.get('message_id', None)
self.from_property = kwargs.get('from_property', None)
self.to = kwargs.get('to', None)
class AcsSmsDeliveryReportReceivedEventData(AcsSmsEventBaseProperties):
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'delivery_status': {'key': 'deliveryStatus', 'type': 'str'},
'delivery_status_details': {'key': 'deliveryStatusDetails', 'type': 'str'},
'delivery_attempts': {'key': 'deliveryAttempts', 'type': '[AcsSmsDeliveryAttemptProperties]'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsDeliveryReportReceivedEventData, self).__init__(**kwargs)
self.delivery_status = kwargs.get('delivery_status', None)
self.delivery_status_details = kwargs.get('delivery_status_details', None)
self.delivery_attempts = kwargs.get('delivery_attempts', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
self.tag = kwargs.get('tag', None)
class AcsSmsReceivedEventData(AcsSmsEventBaseProperties):
_attribute_map = {
'message_id': {'key': 'messageId', 'type': 'str'},
'from_property': {'key': 'from', 'type': 'str'},
'to': {'key': 'to', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'received_timestamp': {'key': 'receivedTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AcsSmsReceivedEventData, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
self.received_timestamp = kwargs.get('received_timestamp', None)
class AppConfigurationKeyValueDeletedEventData(msrest.serialization.Model):
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueDeletedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppConfigurationKeyValueModifiedEventData(msrest.serialization.Model):
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'sync_token': {'key': 'syncToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppConfigurationKeyValueModifiedEventData, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.label = kwargs.get('label', None)
self.etag = kwargs.get('etag', None)
self.sync_token = kwargs.get('sync_token', None)
class AppEventTypeDetail(msrest.serialization.Model):
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppEventTypeDetail, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
class AppServicePlanEventTypeDetail(msrest.serialization.Model):
_attribute_map = {
'stamp_kind': {'key': 'stampKind', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AppServicePlanEventTypeDetail, self).__init__(**kwargs)
self.stamp_kind = kwargs.get('stamp_kind', None)
self.action = kwargs.get('action', None)
self.status = kwargs.get('status', None)
class CloudEvent(msrest.serialization.Model):
_validation = {
'id': {'required': True},
'source': {'required': True},
'type': {'required': True},
'specversion': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'id': {'key': 'id', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'data_base64': {'key': 'data_base64', 'type': 'bytearray'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'specversion': {'key': 'specversion', 'type': 'str'},
'dataschema': {'key': 'dataschema', 'type': 'str'},
'datacontenttype': {'key': 'datacontenttype', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudEvent, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.id = kwargs['id']
self.source = kwargs['source']
self.data = kwargs.get('data', None)
self.data_base64 = kwargs.get('data_base64', None)
self.type = kwargs['type']
self.time = kwargs.get('time', None)
self.specversion = kwargs['specversion']
self.dataschema = kwargs.get('dataschema', None)
self.datacontenttype = kwargs.get('datacontenttype', None)
self.subject = kwargs.get('subject', None)
class CommunicationIdentifierModel(msrest.serialization.Model):
_attribute_map = {
'raw_id': {'key': 'rawId', 'type': 'str'},
'communication_user': {'key': 'communicationUser', 'type': 'CommunicationUserIdentifierModel'},
'phone_number': {'key': 'phoneNumber', 'type': 'PhoneNumberIdentifierModel'},
'microsoft_teams_user': {'key': 'microsoftTeamsUser', 'type': 'MicrosoftTeamsUserIdentifierModel'},
}
def __init__(
self,
**kwargs
):
super(CommunicationIdentifierModel, self).__init__(**kwargs)
self.raw_id = kwargs.get('raw_id', None)
self.communication_user = kwargs.get('communication_user', None)
self.phone_number = kwargs.get('phone_number', None)
self.microsoft_teams_user = kwargs.get('microsoft_teams_user', None)
class CommunicationUserIdentifierModel(msrest.serialization.Model):
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CommunicationUserIdentifierModel, self).__init__(**kwargs)
self.id = kwargs['id']
class ContainerRegistryArtifactEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
class ContainerRegistryArtifactEventTarget(msrest.serialization.Model):
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryArtifactEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.repository = kwargs.get('repository', None)
self.tag = kwargs.get('tag', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class ContainerRegistryChartDeletedEventData(ContainerRegistryArtifactEventData):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryChartPushedEventData(ContainerRegistryArtifactEventData):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryArtifactEventTarget'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryChartPushedEventData, self).__init__(**kwargs)
class ContainerRegistryEventActor(msrest.serialization.Model):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventActor, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class ContainerRegistryEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.timestamp = kwargs.get('timestamp', None)
self.action = kwargs.get('action', None)
self.target = kwargs.get('target', None)
self.request = kwargs.get('request', None)
self.actor = kwargs.get('actor', None)
self.source = kwargs.get('source', None)
class ContainerRegistryEventRequest(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'addr': {'key': 'addr', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'useragent': {'key': 'useragent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventRequest, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.addr = kwargs.get('addr', None)
self.host = kwargs.get('host', None)
self.method = kwargs.get('method', None)
self.useragent = kwargs.get('useragent', None)
class ContainerRegistryEventSource(msrest.serialization.Model):
_attribute_map = {
'addr': {'key': 'addr', 'type': 'str'},
'instance_id': {'key': 'instanceID', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventSource, self).__init__(**kwargs)
self.addr = kwargs.get('addr', None)
self.instance_id = kwargs.get('instance_id', None)
class ContainerRegistryEventTarget(msrest.serialization.Model):
_attribute_map = {
'media_type': {'key': 'mediaType', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
'digest': {'key': 'digest', 'type': 'str'},
'length': {'key': 'length', 'type': 'long'},
'repository': {'key': 'repository', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryEventTarget, self).__init__(**kwargs)
self.media_type = kwargs.get('media_type', None)
self.size = kwargs.get('size', None)
self.digest = kwargs.get('digest', None)
self.length = kwargs.get('length', None)
self.repository = kwargs.get('repository', None)
self.url = kwargs.get('url', None)
self.tag = kwargs.get('tag', None)
class ContainerRegistryImageDeletedEventData(ContainerRegistryEventData):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImageDeletedEventData, self).__init__(**kwargs)
class ContainerRegistryImagePushedEventData(ContainerRegistryEventData):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'action': {'key': 'action', 'type': 'str'},
'target': {'key': 'target', 'type': 'ContainerRegistryEventTarget'},
'request': {'key': 'request', 'type': 'ContainerRegistryEventRequest'},
'actor': {'key': 'actor', 'type': 'ContainerRegistryEventActor'},
'source': {'key': 'source', 'type': 'ContainerRegistryEventSource'},
}
def __init__(
self,
**kwargs
):
super(ContainerRegistryImagePushedEventData, self).__init__(**kwargs)
class DeviceConnectionStateEventInfo(msrest.serialization.Model):
_attribute_map = {
'sequence_number': {'key': 'sequenceNumber', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventInfo, self).__init__(**kwargs)
self.sequence_number = kwargs.get('sequence_number', None)
class DeviceConnectionStateEventProperties(msrest.serialization.Model):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceConnectionStateEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.module_id = kwargs.get('module_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.device_connection_state_event_info = kwargs.get('device_connection_state_event_info', None)
class DeviceLifeCycleEventProperties(msrest.serialization.Model):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(DeviceLifeCycleEventProperties, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.hub_name = kwargs.get('hub_name', None)
self.twin = kwargs.get('twin', None)
class DeviceTelemetryEventProperties(msrest.serialization.Model):
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(DeviceTelemetryEventProperties, self).__init__(**kwargs)
self.body = kwargs.get('body', None)
self.properties = kwargs.get('properties', None)
self.system_properties = kwargs.get('system_properties', None)
class DeviceTwinInfo(msrest.serialization.Model):
_attribute_map = {
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'cloud_to_device_message_count': {'key': 'cloudToDeviceMessageCount', 'type': 'float'},
'connection_state': {'key': 'connectionState', 'type': 'str'},
'device_id': {'key': 'deviceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'DeviceTwinInfoProperties'},
'status': {'key': 'status', 'type': 'str'},
'status_update_time': {'key': 'statusUpdateTime', 'type': 'str'},
'version': {'key': 'version', 'type': 'float'},
'x509_thumbprint': {'key': 'x509Thumbprint', 'type': 'DeviceTwinInfoX509Thumbprint'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfo, self).__init__(**kwargs)
self.authentication_type = kwargs.get('authentication_type', None)
self.cloud_to_device_message_count = kwargs.get('cloud_to_device_message_count', None)
self.connection_state = kwargs.get('connection_state', None)
self.device_id = kwargs.get('device_id', None)
self.etag = kwargs.get('etag', None)
self.last_activity_time = kwargs.get('last_activity_time', None)
self.properties = kwargs.get('properties', None)
self.status = kwargs.get('status', None)
self.status_update_time = kwargs.get('status_update_time', None)
self.version = kwargs.get('version', None)
self.x509_thumbprint = kwargs.get('x509_thumbprint', None)
class DeviceTwinInfoProperties(msrest.serialization.Model):
_attribute_map = {
'desired': {'key': 'desired', 'type': 'DeviceTwinProperties'},
'reported': {'key': 'reported', 'type': 'DeviceTwinProperties'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoProperties, self).__init__(**kwargs)
self.desired = kwargs.get('desired', None)
self.reported = kwargs.get('reported', None)
class DeviceTwinInfoX509Thumbprint(msrest.serialization.Model):
_attribute_map = {
'primary_thumbprint': {'key': 'primaryThumbprint', 'type': 'str'},
'secondary_thumbprint': {'key': 'secondaryThumbprint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinInfoX509Thumbprint, self).__init__(**kwargs)
self.primary_thumbprint = kwargs.get('primary_thumbprint', None)
self.secondary_thumbprint = kwargs.get('secondary_thumbprint', None)
class DeviceTwinMetadata(msrest.serialization.Model):
_attribute_map = {
'last_updated': {'key': 'lastUpdated', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinMetadata, self).__init__(**kwargs)
self.last_updated = kwargs.get('last_updated', None)
class DeviceTwinProperties(msrest.serialization.Model):
_attribute_map = {
'metadata': {'key': 'metadata', 'type': 'DeviceTwinMetadata'},
'version': {'key': 'version', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(DeviceTwinProperties, self).__init__(**kwargs)
self.metadata = kwargs.get('metadata', None)
self.version = kwargs.get('version', None)
class EventGridEvent(msrest.serialization.Model):
_validation = {
'id': {'required': True},
'subject': {'required': True},
'data': {'required': True},
'event_type': {'required': True},
'event_time': {'required': True},
'metadata_version': {'readonly': True},
'data_version': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
'data': {'key': 'data', 'type': 'object'},
'event_type': {'key': 'eventType', 'type': 'str'},
'event_time': {'key': 'eventTime', 'type': 'iso-8601'},
'metadata_version': {'key': 'metadataVersion', 'type': 'str'},
'data_version': {'key': 'dataVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EventGridEvent, self).__init__(**kwargs)
self.id = kwargs['id']
self.topic = kwargs.get('topic', None)
self.subject = kwargs['subject']
self.data = kwargs['data']
self.event_type = kwargs['event_type']
self.event_time = kwargs['event_time']
self.metadata_version = None
self.data_version = kwargs['data_version']
class EventHubCaptureFileCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'fileurl': {'key': 'fileurl', 'type': 'str'},
'file_type': {'key': 'fileType', 'type': 'str'},
'partition_id': {'key': 'partitionId', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'int'},
'event_count': {'key': 'eventCount', 'type': 'int'},
'first_sequence_number': {'key': 'firstSequenceNumber', 'type': 'int'},
'last_sequence_number': {'key': 'lastSequenceNumber', 'type': 'int'},
'first_enqueue_time': {'key': 'firstEnqueueTime', 'type': 'iso-8601'},
'last_enqueue_time': {'key': 'lastEnqueueTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(EventHubCaptureFileCreatedEventData, self).__init__(**kwargs)
self.fileurl = kwargs.get('fileurl', None)
self.file_type = kwargs.get('file_type', None)
self.partition_id = kwargs.get('partition_id', None)
self.size_in_bytes = kwargs.get('size_in_bytes', None)
self.event_count = kwargs.get('event_count', None)
self.first_sequence_number = kwargs.get('first_sequence_number', None)
self.last_sequence_number = kwargs.get('last_sequence_number', None)
self.first_enqueue_time = kwargs.get('first_enqueue_time', None)
self.last_enqueue_time = kwargs.get('last_enqueue_time', None)
class IotHubDeviceConnectedEventData(DeviceConnectionStateEventProperties):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceConnectedEventData, self).__init__(**kwargs)
class IotHubDeviceCreatedEventData(DeviceLifeCycleEventProperties):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceCreatedEventData, self).__init__(**kwargs)
class IotHubDeviceDeletedEventData(DeviceLifeCycleEventProperties):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'twin': {'key': 'twin', 'type': 'DeviceTwinInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDeletedEventData, self).__init__(**kwargs)
class IotHubDeviceDisconnectedEventData(DeviceConnectionStateEventProperties):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'device_connection_state_event_info': {'key': 'deviceConnectionStateEventInfo', 'type': 'DeviceConnectionStateEventInfo'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceDisconnectedEventData, self).__init__(**kwargs)
class IotHubDeviceTelemetryEventData(DeviceTelemetryEventProperties):
_attribute_map = {
'body': {'key': 'body', 'type': 'object'},
'properties': {'key': 'properties', 'type': '{str}'},
'system_properties': {'key': 'systemProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(IotHubDeviceTelemetryEventData, self).__init__(**kwargs)
class KeyVaultAccessPolicyChangedEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultAccessPolicyChangedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateExpiredEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNearExpiryEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultCertificateNewVersionCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultCertificateNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyExpiredEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNearExpiryEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultKeyNewVersionCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultKeyNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretExpiredEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretExpiredEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNearExpiryEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNearExpiryEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class KeyVaultSecretNewVersionCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'object_type': {'key': 'objectType', 'type': 'str'},
'object_name': {'key': 'objectName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'nbf': {'key': 'nbf', 'type': 'float'},
'exp': {'key': 'exp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(KeyVaultSecretNewVersionCreatedEventData, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.vault_name = kwargs.get('vault_name', None)
self.object_type = kwargs.get('object_type', None)
self.object_name = kwargs.get('object_name', None)
self.version = kwargs.get('version', None)
self.nbf = kwargs.get('nbf', None)
self.exp = kwargs.get('exp', None)
class MachineLearningServicesDatasetDriftDetectedEventData(msrest.serialization.Model):
_attribute_map = {
'data_drift_id': {'key': 'dataDriftId', 'type': 'str'},
'data_drift_name': {'key': 'dataDriftName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'base_dataset_id': {'key': 'baseDatasetId', 'type': 'str'},
'target_dataset_id': {'key': 'targetDatasetId', 'type': 'str'},
'drift_coefficient': {'key': 'driftCoefficient', 'type': 'float'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesDatasetDriftDetectedEventData, self).__init__(**kwargs)
self.data_drift_id = kwargs.get('data_drift_id', None)
self.data_drift_name = kwargs.get('data_drift_name', None)
self.run_id = kwargs.get('run_id', None)
self.base_dataset_id = kwargs.get('base_dataset_id', None)
self.target_dataset_id = kwargs.get('target_dataset_id', None)
self.drift_coefficient = kwargs.get('drift_coefficient', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
class MachineLearningServicesModelDeployedEventData(msrest.serialization.Model):
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
'service_compute_type': {'key': 'serviceComputeType', 'type': 'str'},
'model_ids': {'key': 'modelIds', 'type': 'str'},
'service_tags': {'key': 'serviceTags', 'type': 'object'},
'service_properties': {'key': 'serviceProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelDeployedEventData, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.service_compute_type = kwargs.get('service_compute_type', None)
self.model_ids = kwargs.get('model_ids', None)
self.service_tags = kwargs.get('service_tags', None)
self.service_properties = kwargs.get('service_properties', None)
class MachineLearningServicesModelRegisteredEventData(msrest.serialization.Model):
_attribute_map = {
'model_name': {'key': 'modelName', 'type': 'str'},
'model_version': {'key': 'modelVersion', 'type': 'str'},
'model_tags': {'key': 'modelTags', 'type': 'object'},
'model_properties': {'key': 'modelProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesModelRegisteredEventData, self).__init__(**kwargs)
self.model_name = kwargs.get('model_name', None)
self.model_version = kwargs.get('model_version', None)
self.model_tags = kwargs.get('model_tags', None)
self.model_properties = kwargs.get('model_properties', None)
class MachineLearningServicesRunCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunCompletedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
class MachineLearningServicesRunStatusChangedEventData(msrest.serialization.Model):
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_tags': {'key': 'runTags', 'type': 'object'},
'run_properties': {'key': 'runProperties', 'type': 'object'},
'run_status': {'key': 'runStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MachineLearningServicesRunStatusChangedEventData, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_tags = kwargs.get('run_tags', None)
self.run_properties = kwargs.get('run_properties', None)
self.run_status = kwargs.get('run_status', None)
class MapsGeofenceEventProperties(msrest.serialization.Model):
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEventProperties, self).__init__(**kwargs)
self.expired_geofence_geometry_id = kwargs.get('expired_geofence_geometry_id', None)
self.geometries = kwargs.get('geometries', None)
self.invalid_period_geofence_geometry_id = kwargs.get('invalid_period_geofence_geometry_id', None)
self.is_event_published = kwargs.get('is_event_published', None)
class MapsGeofenceEnteredEventData(MapsGeofenceEventProperties):
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceEnteredEventData, self).__init__(**kwargs)
class MapsGeofenceExitedEventData(MapsGeofenceEventProperties):
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceExitedEventData, self).__init__(**kwargs)
class MapsGeofenceGeometry(msrest.serialization.Model):
_attribute_map = {
'device_id': {'key': 'deviceId', 'type': 'str'},
'distance': {'key': 'distance', 'type': 'float'},
'geometry_id': {'key': 'geometryId', 'type': 'str'},
'nearest_lat': {'key': 'nearestLat', 'type': 'float'},
'nearest_lon': {'key': 'nearestLon', 'type': 'float'},
'ud_id': {'key': 'udId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceGeometry, self).__init__(**kwargs)
self.device_id = kwargs.get('device_id', None)
self.distance = kwargs.get('distance', None)
self.geometry_id = kwargs.get('geometry_id', None)
self.nearest_lat = kwargs.get('nearest_lat', None)
self.nearest_lon = kwargs.get('nearest_lon', None)
self.ud_id = kwargs.get('ud_id', None)
class MapsGeofenceResultEventData(MapsGeofenceEventProperties):
_attribute_map = {
'expired_geofence_geometry_id': {'key': 'expiredGeofenceGeometryId', 'type': '[str]'},
'geometries': {'key': 'geometries', 'type': '[MapsGeofenceGeometry]'},
'invalid_period_geofence_geometry_id': {'key': 'invalidPeriodGeofenceGeometryId', 'type': '[str]'},
'is_event_published': {'key': 'isEventPublished', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MapsGeofenceResultEventData, self).__init__(**kwargs)
class MediaJobStateChangeEventData(msrest.serialization.Model):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.state = None
self.correlation_data = kwargs.get('correlation_data', None)
class MediaJobCanceledEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCanceledEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobCancelingEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobCancelingEventData, self).__init__(**kwargs)
class MediaJobError(msrest.serialization.Model):
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'category': {'readonly': True},
'retry': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'category': {'key': 'category', 'type': 'str'},
'retry': {'key': 'retry', 'type': 'str'},
'details': {'key': 'details', 'type': '[MediaJobErrorDetail]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobError, self).__init__(**kwargs)
self.code = None
self.message = None
self.category = None
self.retry = None
self.details = None
class MediaJobErrorDetail(msrest.serialization.Model):
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErrorDetail, self).__init__(**kwargs)
self.code = None
self.message = None
class MediaJobErroredEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobErroredEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobFinishedEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '[MediaJobOutput]'},
}
def __init__(
self,
**kwargs
):
super(MediaJobFinishedEventData, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
class MediaJobOutput(msrest.serialization.Model):
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'},
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
}
_subtype_map = {
'odata_type': {'#Microsoft.Media.JobOutputAsset': 'MediaJobOutputAsset'}
}
def __init__(
self,
**kwargs
):
super(MediaJobOutput, self).__init__(**kwargs)
self.odata_type = None
self.error = kwargs.get('error', None)
self.label = kwargs.get('label', None)
self.progress = kwargs['progress']
self.state = kwargs['state']
class MediaJobOutputAsset(MediaJobOutput):
_validation = {
'progress': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'error': {'key': 'error', 'type': 'MediaJobError'},
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'state': {'key': 'state', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputAsset, self).__init__(**kwargs)
self.odata_type = '#Microsoft.Media.JobOutputAsset'
self.asset_name = kwargs.get('asset_name', None)
class MediaJobOutputStateChangeEventData(msrest.serialization.Model):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputStateChangeEventData, self).__init__(**kwargs)
self.previous_state = None
self.output = kwargs.get('output', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputCanceledEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCanceledEventData, self).__init__(**kwargs)
class MediaJobOutputCancelingEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputCancelingEventData, self).__init__(**kwargs)
class MediaJobOutputErroredEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputErroredEventData, self).__init__(**kwargs)
class MediaJobOutputFinishedEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputFinishedEventData, self).__init__(**kwargs)
class MediaJobOutputProcessingEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProcessingEventData, self).__init__(**kwargs)
class MediaJobOutputProgressEventData(msrest.serialization.Model):
_attribute_map = {
'label': {'key': 'label', 'type': 'str'},
'progress': {'key': 'progress', 'type': 'long'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputProgressEventData, self).__init__(**kwargs)
self.label = kwargs.get('label', None)
self.progress = kwargs.get('progress', None)
self.job_correlation_data = kwargs.get('job_correlation_data', None)
class MediaJobOutputScheduledEventData(MediaJobOutputStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'output': {'key': 'output', 'type': 'MediaJobOutput'},
'job_correlation_data': {'key': 'jobCorrelationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobOutputScheduledEventData, self).__init__(**kwargs)
class MediaJobProcessingEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobProcessingEventData, self).__init__(**kwargs)
class MediaJobScheduledEventData(MediaJobStateChangeEventData):
_validation = {
'previous_state': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'previous_state': {'key': 'previousState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'correlation_data': {'key': 'correlationData', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(MediaJobScheduledEventData, self).__init__(**kwargs)
class MediaLiveEventConnectionRejectedEventData(msrest.serialization.Model):
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventConnectionRejectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventEncoderConnectedEventData(msrest.serialization.Model):
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderConnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
class MediaLiveEventEncoderDisconnectedEventData(msrest.serialization.Model):
_validation = {
'ingest_url': {'readonly': True},
'stream_id': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'result_code': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'stream_id': {'key': 'streamId', 'type': 'str'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventEncoderDisconnectedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.stream_id = None
self.encoder_ip = None
self.encoder_port = None
self.result_code = None
class MediaLiveEventIncomingDataChunkDroppedEventData(msrest.serialization.Model):
_validation = {
'timestamp': {'readonly': True},
'track_type': {'readonly': True},
'bitrate': {'readonly': True},
'timescale': {'readonly': True},
'result_code': {'readonly': True},
'track_name': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'timescale': {'key': 'timescale', 'type': 'str'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingDataChunkDroppedEventData, self).__init__(**kwargs)
self.timestamp = None
self.track_type = None
self.bitrate = None
self.timescale = None
self.result_code = None
self.track_name = None
class MediaLiveEventIncomingStreamReceivedEventData(msrest.serialization.Model):
_validation = {
'ingest_url': {'readonly': True},
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'encoder_ip': {'readonly': True},
'encoder_port': {'readonly': True},
'timestamp': {'readonly': True},
'duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'ingest_url': {'key': 'ingestUrl', 'type': 'str'},
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'encoder_ip': {'key': 'encoderIp', 'type': 'str'},
'encoder_port': {'key': 'encoderPort', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'str'},
'duration': {'key': 'duration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamReceivedEventData, self).__init__(**kwargs)
self.ingest_url = None
self.track_type = None
self.track_name = None
self.bitrate = None
self.encoder_ip = None
self.encoder_port = None
self.timestamp = None
self.duration = None
self.timescale = None
class MediaLiveEventIncomingStreamsOutOfSyncEventData(msrest.serialization.Model):
_validation = {
'min_last_timestamp': {'readonly': True},
'type_of_stream_with_min_last_timestamp': {'readonly': True},
'max_last_timestamp': {'readonly': True},
'type_of_stream_with_max_last_timestamp': {'readonly': True},
'timescale_of_min_last_timestamp': {'readonly': True},
'timescale_of_max_last_timestamp': {'readonly': True},
}
_attribute_map = {
'min_last_timestamp': {'key': 'minLastTimestamp', 'type': 'str'},
'type_of_stream_with_min_last_timestamp': {'key': 'typeOfStreamWithMinLastTimestamp', 'type': 'str'},
'max_last_timestamp': {'key': 'maxLastTimestamp', 'type': 'str'},
'type_of_stream_with_max_last_timestamp': {'key': 'typeOfStreamWithMaxLastTimestamp', 'type': 'str'},
'timescale_of_min_last_timestamp': {'key': 'timescaleOfMinLastTimestamp', 'type': 'str'},
'timescale_of_max_last_timestamp': {'key': 'timescaleOfMaxLastTimestamp', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.min_last_timestamp = None
self.type_of_stream_with_min_last_timestamp = None
self.max_last_timestamp = None
self.type_of_stream_with_max_last_timestamp = None
self.timescale_of_min_last_timestamp = None
self.timescale_of_max_last_timestamp = None
class MediaLiveEventIncomingVideoStreamsOutOfSyncEventData(msrest.serialization.Model):
_validation = {
'first_timestamp': {'readonly': True},
'first_duration': {'readonly': True},
'second_timestamp': {'readonly': True},
'second_duration': {'readonly': True},
'timescale': {'readonly': True},
}
_attribute_map = {
'first_timestamp': {'key': 'firstTimestamp', 'type': 'str'},
'first_duration': {'key': 'firstDuration', 'type': 'str'},
'second_timestamp': {'key': 'secondTimestamp', 'type': 'str'},
'second_duration': {'key': 'secondDuration', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIncomingVideoStreamsOutOfSyncEventData, self).__init__(**kwargs)
self.first_timestamp = None
self.first_duration = None
self.second_timestamp = None
self.second_duration = None
self.timescale = None
class MediaLiveEventIngestHeartbeatEventData(msrest.serialization.Model):
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'incoming_bitrate': {'readonly': True},
'last_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'overlap_count': {'readonly': True},
'discontinuity_count': {'readonly': True},
'nonincreasing_count': {'readonly': True},
'unexpected_bitrate': {'readonly': True},
'state': {'readonly': True},
'healthy': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'incoming_bitrate': {'key': 'incomingBitrate', 'type': 'long'},
'last_timestamp': {'key': 'lastTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'overlap_count': {'key': 'overlapCount', 'type': 'long'},
'discontinuity_count': {'key': 'discontinuityCount', 'type': 'long'},
'nonincreasing_count': {'key': 'nonincreasingCount', 'type': 'long'},
'unexpected_bitrate': {'key': 'unexpectedBitrate', 'type': 'bool'},
'state': {'key': 'state', 'type': 'str'},
'healthy': {'key': 'healthy', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventIngestHeartbeatEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.incoming_bitrate = None
self.last_timestamp = None
self.timescale = None
self.overlap_count = None
self.discontinuity_count = None
self.nonincreasing_count = None
self.unexpected_bitrate = None
self.state = None
self.healthy = None
class MediaLiveEventTrackDiscontinuityDetectedEventData(msrest.serialization.Model):
_validation = {
'track_type': {'readonly': True},
'track_name': {'readonly': True},
'bitrate': {'readonly': True},
'previous_timestamp': {'readonly': True},
'new_timestamp': {'readonly': True},
'timescale': {'readonly': True},
'discontinuity_gap': {'readonly': True},
}
_attribute_map = {
'track_type': {'key': 'trackType', 'type': 'str'},
'track_name': {'key': 'trackName', 'type': 'str'},
'bitrate': {'key': 'bitrate', 'type': 'long'},
'previous_timestamp': {'key': 'previousTimestamp', 'type': 'str'},
'new_timestamp': {'key': 'newTimestamp', 'type': 'str'},
'timescale': {'key': 'timescale', 'type': 'str'},
'discontinuity_gap': {'key': 'discontinuityGap', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MediaLiveEventTrackDiscontinuityDetectedEventData, self).__init__(**kwargs)
self.track_type = None
self.track_name = None
self.bitrate = None
self.previous_timestamp = None
self.new_timestamp = None
self.timescale = None
self.discontinuity_gap = None
class MicrosoftTeamsUserIdentifierModel(msrest.serialization.Model):
_validation = {
'user_id': {'required': True},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'cloud': {'key': 'cloud', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MicrosoftTeamsUserIdentifierModel, self).__init__(**kwargs)
self.user_id = kwargs['user_id']
self.is_anonymous = kwargs.get('is_anonymous', None)
self.cloud = kwargs.get('cloud', None)
class PhoneNumberIdentifierModel(msrest.serialization.Model):
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PhoneNumberIdentifierModel, self).__init__(**kwargs)
self.value = kwargs['value']
class RedisExportRDBCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisExportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisImportRDBCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisImportRDBCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisPatchingCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisPatchingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class RedisScalingCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RedisScalingCompletedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
class ResourceActionCancelData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionFailureData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceActionSuccessData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceActionSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteCancelData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteFailureData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceDeleteSuccessData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceDeleteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteCancelData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteCancelData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteFailureData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteFailureData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ResourceWriteSuccessData(msrest.serialization.Model):
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceWriteSuccessData, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group = kwargs.get('resource_group', None)
self.resource_provider = kwargs.get('resource_provider', None)
self.resource_uri = kwargs.get('resource_uri', None)
self.operation_name = kwargs.get('operation_name', None)
self.status = kwargs.get('status', None)
self.authorization = kwargs.get('authorization', None)
self.claims = kwargs.get('claims', None)
self.correlation_id = kwargs.get('correlation_id', None)
self.http_request = kwargs.get('http_request', None)
class ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusActiveMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusActiveMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData(msrest.serialization.Model):
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailablePeriodicNotificationsEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class ServiceBusDeadletterMessagesAvailableWithNoListenersEventData(msrest.serialization.Model):
_attribute_map = {
'namespace_name': {'key': 'namespaceName', 'type': 'str'},
'request_uri': {'key': 'requestUri', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'queue_name': {'key': 'queueName', 'type': 'str'},
'topic_name': {'key': 'topicName', 'type': 'str'},
'subscription_name': {'key': 'subscriptionName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceBusDeadletterMessagesAvailableWithNoListenersEventData, self).__init__(**kwargs)
self.namespace_name = kwargs.get('namespace_name', None)
self.request_uri = kwargs.get('request_uri', None)
self.entity_type = kwargs.get('entity_type', None)
self.queue_name = kwargs.get('queue_name', None)
self.topic_name = kwargs.get('topic_name', None)
self.subscription_name = kwargs.get('subscription_name', None)
class SignalRServiceClientConnectionConnectedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionConnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
class SignalRServiceClientConnectionDisconnectedEventData(msrest.serialization.Model):
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'hub_name': {'key': 'hubName', 'type': 'str'},
'connection_id': {'key': 'connectionId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SignalRServiceClientConnectionDisconnectedEventData, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.hub_name = kwargs.get('hub_name', None)
self.connection_id = kwargs.get('connection_id', None)
self.user_id = kwargs.get('user_id', None)
self.error_message = kwargs.get('error_message', None)
class StorageAsyncOperationInitiatedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageAsyncOperationInitiatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'content_offset': {'key': 'contentOffset', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.content_offset = kwargs.get('content_offset', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobDeletedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobRenamedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageBlobTierChangedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'content_type': {'key': 'contentType', 'type': 'str'},
'content_length': {'key': 'contentLength', 'type': 'long'},
'blob_type': {'key': 'blobType', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageBlobTierChangedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.content_type = kwargs.get('content_type', None)
self.content_length = kwargs.get('content_length', None)
self.blob_type = kwargs.get('blob_type', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryCreatedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryCreatedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.e_tag = kwargs.get('e_tag', None)
self.url = kwargs.get('url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryDeletedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'recursive': {'key': 'recursive', 'type': 'bool'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryDeletedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.url = kwargs.get('url', None)
self.recursive = kwargs.get('recursive', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageDirectoryRenamedEventData(msrest.serialization.Model):
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'destination_url': {'key': 'destinationUrl', 'type': 'str'},
'sequencer': {'key': 'sequencer', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'storage_diagnostics': {'key': 'storageDiagnostics', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
super(StorageDirectoryRenamedEventData, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.source_url = kwargs.get('source_url', None)
self.destination_url = kwargs.get('destination_url', None)
self.sequencer = kwargs.get('sequencer', None)
self.identity = kwargs.get('identity', None)
self.storage_diagnostics = kwargs.get('storage_diagnostics', None)
class StorageLifecyclePolicyActionSummaryDetail(msrest.serialization.Model):
_attribute_map = {
'total_objects_count': {'key': 'totalObjectsCount', 'type': 'long'},
'success_count': {'key': 'successCount', 'type': 'long'},
'error_list': {'key': 'errorList', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyActionSummaryDetail, self).__init__(**kwargs)
self.total_objects_count = kwargs.get('total_objects_count', None)
self.success_count = kwargs.get('success_count', None)
self.error_list = kwargs.get('error_list', None)
class StorageLifecyclePolicyCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'schedule_time': {'key': 'scheduleTime', 'type': 'str'},
'delete_summary': {'key': 'deleteSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_cool_summary': {'key': 'tierToCoolSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
'tier_to_archive_summary': {'key': 'tierToArchiveSummary', 'type': 'StorageLifecyclePolicyActionSummaryDetail'},
}
def __init__(
self,
**kwargs
):
super(StorageLifecyclePolicyCompletedEventData, self).__init__(**kwargs)
self.schedule_time = kwargs.get('schedule_time', None)
self.delete_summary = kwargs.get('delete_summary', None)
self.tier_to_cool_summary = kwargs.get('tier_to_cool_summary', None)
self.tier_to_archive_summary = kwargs.get('tier_to_archive_summary', None)
class SubscriptionDeletedEventData(msrest.serialization.Model):
_validation = {
'event_subscription_id': {'readonly': True},
}
_attribute_map = {
'event_subscription_id': {'key': 'eventSubscriptionId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionDeletedEventData, self).__init__(**kwargs)
self.event_subscription_id = None
class SubscriptionValidationEventData(msrest.serialization.Model):
_validation = {
'validation_code': {'readonly': True},
'validation_url': {'readonly': True},
}
_attribute_map = {
'validation_code': {'key': 'validationCode', 'type': 'str'},
'validation_url': {'key': 'validationUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationEventData, self).__init__(**kwargs)
self.validation_code = None
self.validation_url = None
class SubscriptionValidationResponse(msrest.serialization.Model):
_attribute_map = {
'validation_response': {'key': 'validationResponse', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubscriptionValidationResponse, self).__init__(**kwargs)
self.validation_response = kwargs.get('validation_response', None)
class WebAppServicePlanUpdatedEventData(msrest.serialization.Model):
_attribute_map = {
'app_service_plan_event_type_detail': {'key': 'appServicePlanEventTypeDetail', 'type': 'AppServicePlanEventTypeDetail'},
'sku': {'key': 'sku', 'type': 'WebAppServicePlanUpdatedEventDataSku'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventData, self).__init__(**kwargs)
self.app_service_plan_event_type_detail = kwargs.get('app_service_plan_event_type_detail', None)
self.sku = kwargs.get('sku', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebAppServicePlanUpdatedEventDataSku(msrest.serialization.Model):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'Tier', 'type': 'str'},
'size': {'key': 'Size', 'type': 'str'},
'family': {'key': 'Family', 'type': 'str'},
'capacity': {'key': 'Capacity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppServicePlanUpdatedEventDataSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.size = kwargs.get('size', None)
self.family = kwargs.get('family', None)
self.capacity = kwargs.get('capacity', None)
class WebAppUpdatedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebAppUpdatedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationFailedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebBackupOperationStartedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebBackupOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationFailedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebRestoreOperationStartedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebRestoreOperationStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapCompletedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapCompletedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapFailedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapFailedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapStartedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewCancelledEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewCancelledEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
class WebSlotSwapWithPreviewStartedEventData(msrest.serialization.Model):
_attribute_map = {
'app_event_type_detail': {'key': 'appEventTypeDetail', 'type': 'AppEventTypeDetail'},
'name': {'key': 'name', 'type': 'str'},
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'correlation_request_id': {'key': 'correlationRequestId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'verb': {'key': 'verb', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebSlotSwapWithPreviewStartedEventData, self).__init__(**kwargs)
self.app_event_type_detail = kwargs.get('app_event_type_detail', None)
self.name = kwargs.get('name', None)
self.client_request_id = kwargs.get('client_request_id', None)
self.correlation_request_id = kwargs.get('correlation_request_id', None)
self.request_id = kwargs.get('request_id', None)
self.address = kwargs.get('address', None)
self.verb = kwargs.get('verb', None)
| true | true |
f71b922170a32d261f523c660af62772c3182168 | 5,687 | py | Python | mjmpc/control/olgaussian_mpc.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | 2 | 2021-08-15T22:23:50.000Z | 2021-12-03T13:09:13.000Z | mjmpc/control/olgaussian_mpc.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | null | null | null | mjmpc/control/olgaussian_mpc.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | 1 | 2022-02-18T10:22:49.000Z | 2022-02-18T10:22:49.000Z | """
MPC with open-loop Gaussian policies
"""
from .controller import Controller
from mjmpc.utils.control_utils import generate_noise, scale_ctrl
import copy
import numpy as np
import scipy.special
class OLGaussianMPC(Controller):
def __init__(self,
d_state,
d_obs,
d_action,
action_lows,
action_highs,
horizon,
init_cov,
init_mean,
base_action,
num_particles,
gamma,
n_iters,
step_size,
filter_coeffs,
set_sim_state_fn=None,
rollout_fn=None,
cov_type='diagonal',
sample_mode='mean',
batch_size=1,
seed=0,
use_zero_control_seq=False):
"""
Parameters
__________
base_action : str
Action to append at the end when shifting solution to next timestep
'random' : appends random action
'null' : appends zero action
'repeat' : repeats second to last action
num_particles : int
Number of particles sampled at every iteration
"""
super(OLGaussianMPC, self).__init__(d_state,
d_obs,
d_action,
action_lows,
action_highs,
horizon,
gamma,
n_iters,
set_sim_state_fn,
rollout_fn,
sample_mode,
batch_size,
seed)
self.init_cov = np.array([init_cov] * self.d_action)
self.init_mean = init_mean.copy()
self.mean_action = init_mean
self.base_action = base_action
self.num_particles = num_particles
self.cov_type = cov_type
self.cov_action = np.diag(self.init_cov)
self.step_size = step_size
self.filter_coeffs = filter_coeffs
self.use_zero_control_seq = use_zero_control_seq
def _get_next_action(self, state, mode='mean'):
if mode == 'mean':
next_action = self.mean_action[0].copy()
elif mode == 'sample':
delta = generate_noise(self.cov_action, self.filter_coeffs,
shape=(1, 1), base_seed=self.seed_val + 123*self.num_steps)
next_action = self.mean_action[0].copy() + delta.reshape(self.d_action).copy()
else:
raise ValueError('Unidentified sampling mode in get_next_action')
return next_action
# def sample_actions(self):
# delta = generate_noise(self.cov_action, self.filter_coeffs,
# shape=(self.num_particles, self.horizon),
# base_seed = self.seed_val + self.num_steps)
# act_seq = self.mean_action[None, :, :] + delta
# # act_seq = scale_ctrl(act_seq, self.action_lows, self.action_highs)
# return np.array(act_seq)
def sample_noise(self):
delta = generate_noise(self.cov_action, self.filter_coeffs,
shape=(self.num_particles, self.horizon),
base_seed = self.seed_val + self.num_steps)
# act_seq = scale_ctrl(act_seq, self.action_lows, self.action_highs)
return delta
def generate_rollouts(self, state):
"""
Samples a batch of actions, rolls out trajectories for each particle
and returns the resulting observations, costs,
actions
Parameters
----------
state : dict or np.ndarray
Initial state to set the simulation env to
"""
self._set_sim_state_fn(copy.deepcopy(state)) #set state of simulation
# input('....')
delta = self.sample_noise() #sample noise from covariance of current control distribution
if self.use_zero_control_seq:
delta[-1,:] = -1.0 * self.mean_action.copy()
trajectories = self._rollout_fn(self.num_particles, self.horizon,
self.mean_action, delta, mode="open_loop")
return trajectories
def _shift(self):
"""
Predict good parameters for the next time step by
shifting the mean forward one step
"""
self.mean_action[:-1] = self.mean_action[1:]
if self.base_action == 'random':
self.mean_action[-1] = np.random.normal(0, self.init_cov, self.d_action)
elif self.base_action == 'null':
self.mean_action[-1] = np.zeros((self.d_action, ))
elif self.base_action == 'repeat':
self.mean_action[-1] = self.mean_action[-2]
else:
raise NotImplementedError("invalid option for base action during shift")
def reset(self):
self.num_steps = 0
self.mean_action = np.zeros(shape=(self.horizon, self.d_action))
self.cov_action = np.diag(self.init_cov)
self.gamma_seq = np.cumprod([1.0] + [self.gamma] * (self.horizon - 1)).reshape(1, self.horizon)
def _calc_val(self, cost_seq, act_seq):
raise NotImplementedError("_calc_val not implemented")
| 40.621429 | 103 | 0.523475 | from .controller import Controller
from mjmpc.utils.control_utils import generate_noise, scale_ctrl
import copy
import numpy as np
import scipy.special
class OLGaussianMPC(Controller):
def __init__(self,
d_state,
d_obs,
d_action,
action_lows,
action_highs,
horizon,
init_cov,
init_mean,
base_action,
num_particles,
gamma,
n_iters,
step_size,
filter_coeffs,
set_sim_state_fn=None,
rollout_fn=None,
cov_type='diagonal',
sample_mode='mean',
batch_size=1,
seed=0,
use_zero_control_seq=False):
super(OLGaussianMPC, self).__init__(d_state,
d_obs,
d_action,
action_lows,
action_highs,
horizon,
gamma,
n_iters,
set_sim_state_fn,
rollout_fn,
sample_mode,
batch_size,
seed)
self.init_cov = np.array([init_cov] * self.d_action)
self.init_mean = init_mean.copy()
self.mean_action = init_mean
self.base_action = base_action
self.num_particles = num_particles
self.cov_type = cov_type
self.cov_action = np.diag(self.init_cov)
self.step_size = step_size
self.filter_coeffs = filter_coeffs
self.use_zero_control_seq = use_zero_control_seq
def _get_next_action(self, state, mode='mean'):
if mode == 'mean':
next_action = self.mean_action[0].copy()
elif mode == 'sample':
delta = generate_noise(self.cov_action, self.filter_coeffs,
shape=(1, 1), base_seed=self.seed_val + 123*self.num_steps)
next_action = self.mean_action[0].copy() + delta.reshape(self.d_action).copy()
else:
raise ValueError('Unidentified sampling mode in get_next_action')
return next_action
lf.cov_action, self.filter_coeffs,
shape=(self.num_particles, self.horizon),
base_seed = self.seed_val + self.num_steps)
return delta
def generate_rollouts(self, state):
self._set_sim_state_fn(copy.deepcopy(state))
delta = self.sample_noise()
if self.use_zero_control_seq:
delta[-1,:] = -1.0 * self.mean_action.copy()
trajectories = self._rollout_fn(self.num_particles, self.horizon,
self.mean_action, delta, mode="open_loop")
return trajectories
def _shift(self):
self.mean_action[:-1] = self.mean_action[1:]
if self.base_action == 'random':
self.mean_action[-1] = np.random.normal(0, self.init_cov, self.d_action)
elif self.base_action == 'null':
self.mean_action[-1] = np.zeros((self.d_action, ))
elif self.base_action == 'repeat':
self.mean_action[-1] = self.mean_action[-2]
else:
raise NotImplementedError("invalid option for base action during shift")
def reset(self):
self.num_steps = 0
self.mean_action = np.zeros(shape=(self.horizon, self.d_action))
self.cov_action = np.diag(self.init_cov)
self.gamma_seq = np.cumprod([1.0] + [self.gamma] * (self.horizon - 1)).reshape(1, self.horizon)
def _calc_val(self, cost_seq, act_seq):
raise NotImplementedError("_calc_val not implemented")
| true | true |
f71b92878fc1fad9e2f4829b6b8365831bd39735 | 612 | py | Python | combine/indicator/combination_indicator.py | cwwang15/fudan-monte-carlo-pwd | 807a4d9f45112ed6520a08d14ea65ca79efe33ea | [
"Apache-2.0"
] | 1 | 2021-08-04T09:51:55.000Z | 2021-08-04T09:51:55.000Z | combine/indicator/combination_indicator.py | cwwang15/pwd-monte-carlo | 807a4d9f45112ed6520a08d14ea65ca79efe33ea | [
"Apache-2.0"
] | null | null | null | combine/indicator/combination_indicator.py | cwwang15/pwd-monte-carlo | 807a4d9f45112ed6520a08d14ea65ca79efe33ea | [
"Apache-2.0"
] | null | null | null | import abc
class CombinationIndicator(metaclass=abc.ABCMeta):
def __init__(self, threshold: float):
self.__threshold: float = threshold
@property
def threshold(self):
return self.__threshold
@threshold.setter
def threshold(self, new_threshold: float):
self.__threshold = new_threshold
pass
def can_combine(self, master_set: set, servant_set: set) -> bool:
return self.similarity(master_set, servant_set) >= self.threshold
pass
@abc.abstractmethod
def similarity(self, master_set: set, servant_set: set) -> float:
pass
| 25.5 | 73 | 0.676471 | import abc
class CombinationIndicator(metaclass=abc.ABCMeta):
def __init__(self, threshold: float):
self.__threshold: float = threshold
@property
def threshold(self):
return self.__threshold
@threshold.setter
def threshold(self, new_threshold: float):
self.__threshold = new_threshold
pass
def can_combine(self, master_set: set, servant_set: set) -> bool:
return self.similarity(master_set, servant_set) >= self.threshold
pass
@abc.abstractmethod
def similarity(self, master_set: set, servant_set: set) -> float:
pass
| true | true |
f71b95dcc7666004de7d0b909f2b67e00806bb40 | 1,523 | py | Python | simulation/src/simulation_evaluation/src/state_machine/state_machines/priority.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 13 | 2020-06-30T17:18:28.000Z | 2021-07-20T16:55:35.000Z | simulation/src/simulation_evaluation/src/state_machine/state_machines/priority.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 1 | 2020-11-10T20:15:42.000Z | 2020-12-25T18:27:56.000Z | simulation/src/simulation_evaluation/src/state_machine/state_machines/priority.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 3 | 2020-07-20T09:09:08.000Z | 2021-07-20T17:00:37.000Z | """PriorityStateMachine keeps track of stoping or halting in front of stop or halt lines.
See :mod:`simulation.src.simulation_evaluation.src.state_machine.states.priority` for
implementation details of the states used in this StateMachine.
"""
from typing import Callable
from simulation.src.simulation_evaluation.src.state_machine.states.priority import (
FailureInStopZone,
InHaltZone,
InStopZone,
Off,
SuccessfullyStopped,
)
from .state_machine import StateMachine
__copyright__ = "KITcar"
class PriorityStateMachine(StateMachine):
"""Keep track of stoping and halting in front of stop or halt lines."""
off: "State" = Off() # noqa: F821
"""Default state"""
in_stop_zone: "State" = InStopZone() # noqa: F821
"""The car is inside a stop zone"""
in_halt_zone: "State" = InHaltZone() # noqa: F821
"""The car is inside a halt zone"""
successfully_stopped: "State" = SuccessfullyStopped() # noqa: F821
"""The car successfully stopes in the stop zone"""
failure_in_stop_zone: "State" = FailureInStopZone() # noqa: F821
"""End state when the car does not stop inside the stop zone"""
def __init__(self, callback: Callable[[], None]):
"""Initialize PriorityStateMachine.
Arguments:
callback: Function which gets executed when the state changes
"""
super().__init__(
state_machine=self.__class__,
initial_state=PriorityStateMachine.off,
callback=callback,
)
| 32.404255 | 89 | 0.688116 |
from typing import Callable
from simulation.src.simulation_evaluation.src.state_machine.states.priority import (
FailureInStopZone,
InHaltZone,
InStopZone,
Off,
SuccessfullyStopped,
)
from .state_machine import StateMachine
__copyright__ = "KITcar"
class PriorityStateMachine(StateMachine):
off: "State" = Off()
in_stop_zone: "State" = InStopZone()
in_halt_zone: "State" = InHaltZone()
successfully_stopped: "State" = SuccessfullyStopped()
failure_in_stop_zone: "State" = FailureInStopZone()
def __init__(self, callback: Callable[[], None]):
super().__init__(
state_machine=self.__class__,
initial_state=PriorityStateMachine.off,
callback=callback,
)
| true | true |
f71b9795d1f9e2621b52a1fb8f3fffa662517f05 | 6,626 | py | Python | wizmann-pic/18-11-19/encrypt.py | Wizmann/assets | 1a34a18e65bc4c57676f9a04d6eb5c2a3806fcfc | [
"MIT"
] | null | null | null | wizmann-pic/18-11-19/encrypt.py | Wizmann/assets | 1a34a18e65bc4c57676f9a04d6eb5c2a3806fcfc | [
"MIT"
] | null | null | null | wizmann-pic/18-11-19/encrypt.py | Wizmann/assets | 1a34a18e65bc4c57676f9a04d6eb5c2a3806fcfc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Copyright 2012-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
import random
import string
import binascii
from shadowsocks import common
from shadowsocks.crypto import rc4_md5, openssl, sodium, table
NONCE_RANGE = (32, 512)
NONCE_CONSTANT = binascii.unhexlify('deadbeef')
def make_nonce():
nonce_length = random.randint(*NONCE_RANGE)
nonce = ''.join(
[random.choice(string.ascii_letters) for i in xrange(nonce_length)])
return NONCE_CONSTANT + nonce + NONCE_CONSTANT
method_supported = {}
method_supported.update(rc4_md5.ciphers)
method_supported.update(openssl.ciphers)
method_supported.update(sodium.ciphers)
method_supported.update(table.ciphers)
def random_string(length):
return os.urandom(length)
cached_keys = {}
def try_cipher(key, method=None):
Encryptor(key, method)
def EVP_BytesToKey(password, key_len, iv_len):
# equivalent to OpenSSL's EVP_BytesToKey() with count 1
# so that we make the same key and iv as nodejs version
cached_key = '%s-%d-%d' % (password, key_len, iv_len)
r = cached_keys.get(cached_key, None)
if r:
return r
m = []
i = 0
while len(b''.join(m)) < (key_len + iv_len):
md5 = hashlib.md5()
data = password
if i > 0:
data = m[i - 1] + password
md5.update(data)
m.append(md5.digest())
i += 1
ms = b''.join(m)
key = ms[:key_len]
iv = ms[key_len:key_len + iv_len]
cached_keys[cached_key] = (key, iv)
return key, iv
class Encryptor(object):
def __init__(self, key, method):
self.key = key
self.method = method
self.iv = None
self.iv_sent = False
self.cipher_iv = b''
self.decipher = None
method = method.lower()
self._method_info = self.get_method_info(method)
self.obf_buffer = ''
self.obf_max_length = random.randint(NONCE_RANGE[1], 4096)
self.obf_flag = 0
if self._method_info:
self.cipher = self.get_cipher(key, method, 1,
random_string(self._method_info[1]))
else:
logging.error('method %s not supported' % method)
sys.exit(1)
def get_method_info(self, method):
method = method.lower()
m = method_supported.get(method)
return m
def iv_len(self):
return len(self.cipher_iv)
def get_cipher(self, password, method, op, iv):
password = common.to_bytes(password)
m = self._method_info
if m[0] > 0:
key, iv_ = EVP_BytesToKey(password, m[0], m[1])
else:
# key_length == 0 indicates we should use the key directly
key, iv = password, b''
iv = iv[:m[1]]
if op == 1:
# this iv is for cipher not decipher
self.cipher_iv = iv[:m[1]]
return m[2](method, key, iv, op)
def encrypt(self, buf):
if len(buf) == 0:
return buf
if self.iv_sent:
return self.cipher.update(buf)
else:
self.iv_sent = True
nonce = make_nonce()
return self.cipher_iv + self.cipher.update(nonce + buf)
def decrypt(self, buf):
if len(buf) == 0:
return buf
if self.obf_flag == -1:
return ''
if self.decipher is None:
decipher_iv_len = self._method_info[1]
decipher_iv = buf[:decipher_iv_len]
self.decipher = self.get_cipher(self.key, self.method, 0,
iv=decipher_iv)
buf = buf[decipher_iv_len:]
if len(buf) == 0:
return buf
res = self.decipher.update(buf)
self.obf_buffer += res
if self.obf_flag:
return res
if self.obf_buffer.startswith(NONCE_CONSTANT) \
and self.obf_buffer.index(NONCE_CONSTANT, 1) > 0:
self.obf_flag = 1
pos = self.obf_buffer.index(NONCE_CONSTANT, 1)
return self.obf_buffer[pos + len(NONCE_CONSTANT):]
elif len(self.obf_buffer) > self.obf_max_length:
self.obf_flag = -1
return ''
def encrypt_all(password, method, op, data):
result = []
method = method.lower()
(key_len, iv_len, m) = method_supported[method]
if key_len > 0:
key, _ = EVP_BytesToKey(password, key_len, iv_len)
else:
key = password
if op:
iv = random_string(iv_len)
result.append(iv)
nonce = make_nonce()
data = nonce + data
cipher = m(method, key, iv, op)
result.append(cipher.update(data))
return b''.join(result)
else:
iv = data[:iv_len]
data = data[iv_len:]
cipher = m(method, key, iv, op)
data = cipher.update(data)
if data.startswith(NONCE_CONSTANT) and data.index(NONCE_CONSTANT, 1) > 0:
pos = data.index(NONCE_CONSTANT, 1)
data = data[pos + len(NONCE_CONSTANT):]
else:
data = ''
return data
CIPHERS_TO_TEST = [
'aes-128-cfb',
'aes-256-cfb',
'rc4-md5',
'salsa20',
'chacha20',
'table',
]
def test_encryptor():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
encryptor = Encryptor(b'key', method)
decryptor = Encryptor(b'key', method)
for i in xrange(100):
cipher = encryptor.encrypt(plain)
plain2 = decryptor.decrypt(cipher)
assert plain == plain2
def test_encrypt_all():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
cipher = encrypt_all(b'key', method, 1, plain)
plain2 = encrypt_all(b'key', method, 0, cipher)
assert plain == plain2
if __name__ == '__main__':
test_encrypt_all()
test_encryptor()
| 27.957806 | 81 | 0.601419 |
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
import random
import string
import binascii
from shadowsocks import common
from shadowsocks.crypto import rc4_md5, openssl, sodium, table
NONCE_RANGE = (32, 512)
NONCE_CONSTANT = binascii.unhexlify('deadbeef')
def make_nonce():
nonce_length = random.randint(*NONCE_RANGE)
nonce = ''.join(
[random.choice(string.ascii_letters) for i in xrange(nonce_length)])
return NONCE_CONSTANT + nonce + NONCE_CONSTANT
method_supported = {}
method_supported.update(rc4_md5.ciphers)
method_supported.update(openssl.ciphers)
method_supported.update(sodium.ciphers)
method_supported.update(table.ciphers)
def random_string(length):
return os.urandom(length)
cached_keys = {}
def try_cipher(key, method=None):
Encryptor(key, method)
def EVP_BytesToKey(password, key_len, iv_len):
# so that we make the same key and iv as nodejs version
cached_key = '%s-%d-%d' % (password, key_len, iv_len)
r = cached_keys.get(cached_key, None)
if r:
return r
m = []
i = 0
while len(b''.join(m)) < (key_len + iv_len):
md5 = hashlib.md5()
data = password
if i > 0:
data = m[i - 1] + password
md5.update(data)
m.append(md5.digest())
i += 1
ms = b''.join(m)
key = ms[:key_len]
iv = ms[key_len:key_len + iv_len]
cached_keys[cached_key] = (key, iv)
return key, iv
class Encryptor(object):
def __init__(self, key, method):
self.key = key
self.method = method
self.iv = None
self.iv_sent = False
self.cipher_iv = b''
self.decipher = None
method = method.lower()
self._method_info = self.get_method_info(method)
self.obf_buffer = ''
self.obf_max_length = random.randint(NONCE_RANGE[1], 4096)
self.obf_flag = 0
if self._method_info:
self.cipher = self.get_cipher(key, method, 1,
random_string(self._method_info[1]))
else:
logging.error('method %s not supported' % method)
sys.exit(1)
def get_method_info(self, method):
method = method.lower()
m = method_supported.get(method)
return m
def iv_len(self):
return len(self.cipher_iv)
def get_cipher(self, password, method, op, iv):
password = common.to_bytes(password)
m = self._method_info
if m[0] > 0:
key, iv_ = EVP_BytesToKey(password, m[0], m[1])
else:
# key_length == 0 indicates we should use the key directly
key, iv = password, b''
iv = iv[:m[1]]
if op == 1:
# this iv is for cipher not decipher
self.cipher_iv = iv[:m[1]]
return m[2](method, key, iv, op)
def encrypt(self, buf):
if len(buf) == 0:
return buf
if self.iv_sent:
return self.cipher.update(buf)
else:
self.iv_sent = True
nonce = make_nonce()
return self.cipher_iv + self.cipher.update(nonce + buf)
def decrypt(self, buf):
if len(buf) == 0:
return buf
if self.obf_flag == -1:
return ''
if self.decipher is None:
decipher_iv_len = self._method_info[1]
decipher_iv = buf[:decipher_iv_len]
self.decipher = self.get_cipher(self.key, self.method, 0,
iv=decipher_iv)
buf = buf[decipher_iv_len:]
if len(buf) == 0:
return buf
res = self.decipher.update(buf)
self.obf_buffer += res
if self.obf_flag:
return res
if self.obf_buffer.startswith(NONCE_CONSTANT) \
and self.obf_buffer.index(NONCE_CONSTANT, 1) > 0:
self.obf_flag = 1
pos = self.obf_buffer.index(NONCE_CONSTANT, 1)
return self.obf_buffer[pos + len(NONCE_CONSTANT):]
elif len(self.obf_buffer) > self.obf_max_length:
self.obf_flag = -1
return ''
def encrypt_all(password, method, op, data):
result = []
method = method.lower()
(key_len, iv_len, m) = method_supported[method]
if key_len > 0:
key, _ = EVP_BytesToKey(password, key_len, iv_len)
else:
key = password
if op:
iv = random_string(iv_len)
result.append(iv)
nonce = make_nonce()
data = nonce + data
cipher = m(method, key, iv, op)
result.append(cipher.update(data))
return b''.join(result)
else:
iv = data[:iv_len]
data = data[iv_len:]
cipher = m(method, key, iv, op)
data = cipher.update(data)
if data.startswith(NONCE_CONSTANT) and data.index(NONCE_CONSTANT, 1) > 0:
pos = data.index(NONCE_CONSTANT, 1)
data = data[pos + len(NONCE_CONSTANT):]
else:
data = ''
return data
CIPHERS_TO_TEST = [
'aes-128-cfb',
'aes-256-cfb',
'rc4-md5',
'salsa20',
'chacha20',
'table',
]
def test_encryptor():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
encryptor = Encryptor(b'key', method)
decryptor = Encryptor(b'key', method)
for i in xrange(100):
cipher = encryptor.encrypt(plain)
plain2 = decryptor.decrypt(cipher)
assert plain == plain2
def test_encrypt_all():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
cipher = encrypt_all(b'key', method, 1, plain)
plain2 = encrypt_all(b'key', method, 0, cipher)
assert plain == plain2
if __name__ == '__main__':
test_encrypt_all()
test_encryptor()
| true | true |
f71b97d646bf147a35345ec60a2da27de1631ea8 | 968 | py | Python | mysite/mysite/urls.py | FullGhettoAlchemist/cepheusProduction | 951c244d454fafa817b34dd37aaea28a10afa655 | [
"MIT"
] | null | null | null | mysite/mysite/urls.py | FullGhettoAlchemist/cepheusProduction | 951c244d454fafa817b34dd37aaea28a10afa655 | [
"MIT"
] | null | null | null | mysite/mysite/urls.py | FullGhettoAlchemist/cepheusProduction | 951c244d454fafa817b34dd37aaea28a10afa655 | [
"MIT"
] | null | null | null | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', views.index, name='index'),
url(r'^position/$', views.position, name='position'),
url(r'^details/$', views.details, name='details'),
]
| 35.851852 | 80 | 0.670455 | from django.conf.urls import url
from django.contrib import admin
from app import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', views.index, name='index'),
url(r'^position/$', views.position, name='position'),
url(r'^details/$', views.details, name='details'),
]
| true | true |
f71b995b9c4ad66cf9f2feb286b113ed39ca86d1 | 8,848 | py | Python | STS_v2/compute_high_low_limit_v3.py | kite8/quant_learning | d823974cd2b5a6b8e2a20fe42d7334051fa46ea0 | [
"MIT"
] | 1 | 2019-02-22T08:12:41.000Z | 2019-02-22T08:12:41.000Z | STS_v2/compute_high_low_limit_v3.py | kite8/quant_learning | d823974cd2b5a6b8e2a20fe42d7334051fa46ea0 | [
"MIT"
] | null | null | null | STS_v2/compute_high_low_limit_v3.py | kite8/quant_learning | d823974cd2b5a6b8e2a20fe42d7334051fa46ea0 | [
"MIT"
] | 5 | 2019-02-22T08:14:09.000Z | 2020-06-28T05:54:39.000Z | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 2 15:19:45 2018
@author: kite
"""
import datetime, time
from pymongo import UpdateOne, ASCENDING, UpdateMany
from database import DB_CONN
from stock_util import get_trading_dates, get_all_codes
import tushare as ts
import numpy as np
import pandas as pd
import requests
import json
import datetime
"""
计算涨跌停价格
只要获取到前一天的价格
获取name和上市日期
最新ipo规则
如果是上市当天,则涨停价是上市发行价格的1.44倍
所以需要获取到发行价格
要不是
"""
# 获取发行价格并保存到数据库中
def fill_issueprice_and_timeToMarket():
"""
ipo_info.xlsx 是从东方choice中提取出来;
columns:
code -- 股票代码
name -- 股票当前名字
issueprice -- 发行价格
timeToMarket -- 上市时间
"""
df = pd.read_excel('data/ipo_info.xlsx', header=0, dtype={'code':str})
df = df.set_index('code')
codes = df.index.tolist()
update_requests = []
for i,code in enumerate(codes):
try:
update_requests.append(
UpdateOne(
{'code':code},
{'$set':{'issueprice':df.issueprice[code],
'timeToMarket':df.timeToMarket[code]}},
upsert=True))
except:
print('code: %s, has problem' % code)
if len(update_requests)>0:
update_result = DB_CONN['basic'].bulk_write(update_requests, ordered=False)
print('填充字段, 字段名: issueprice,数据集:%s,插入:%4d条,更新:%4d条' %
('basic', update_result.upserted_count, update_result.modified_count), flush=True)
def fixing_is_st(start, end):
# 第一阶段
df = pd.read_excel('data/stock_basic.xlsx', header=0, dtype={'code':str})
df = df.set_index('code')
codes = df[df['是否ST过'] == 1].index.tolist()
total = len(codes)
# all_dates = get_trading_dates(start, end)
daily = DB_CONN['daily']
excel_name = 'data/st_info.xlsx'
for i in range(4):
if i == 0:
all_dates = get_trading_dates('2015-01-01', '2015-12-31')
elif i == 1:
all_dates = get_trading_dates('2016-01-01', '2016-12-31')
if i == 2:
all_dates = get_trading_dates('2017-01-01', '2017-12-31')
elif i == 3:
all_dates = get_trading_dates('2018-01-01', '2018-09-30')
print('数据读取中')
df = pd.read_excel(excel_name, i, header=0, dtype={'code':str})
df = df.set_index(['code','state'])
df.columns = df.columns.astype(np.datetime64)
df.columns = df.columns.to_period('D')
df.columns = df.columns.astype('str')
print('数据读取完毕')
for j, code in enumerate(codes):
update_requests = []
for date in all_dates:
try:
st_state = df.xs([code])[date]['是否ST']
sst_state = df.xs([code])[date]['是否*ST']
if (st_state == '否') and (sst_state == '否'):
is_st_flag = False
else:
is_st_flag = True
update_requests.append(
UpdateOne(
{'code':code, 'date':date, 'index':False},
{'$set':{'is_st':is_st_flag}}
)
)
except:
print('something is wrong, code : %s, date : %s' % (code, date))
if len(update_requests)>0:
update_result = daily.bulk_write(update_requests, ordered=False)
print('第%s年填充进度: %s/%s, 字段名: is_st,数据集:%s,插入:%4d条,更新:%4d条' %
(i+1, j+1, total, 'daily', update_result.upserted_count, update_result.modified_count), flush=True)
def fill_high_and_low_price_between(start, end):
"""
for code in codes:
timeToMarket = basic.find()
for
"""
# st_mark = ['st', 'ST', '*st', '*ST']
codes = ts.get_stock_basics().index.tolist()
_df = pd.read_excel('data/stock_basic.xlsx', header=0, dtype={'code':str})
_df = _df.set_index('code')
st_codes = _df[_df['是否ST过'] == 1].index.tolist()
total = len(codes)
error_code = []
for i,code in enumerate(codes):
try:
timeToMarket = DB_CONN['basic'].find_one({'code':code},
projection={'code':True, 'timeToMarket':True, '_id':False})['timeToMarket']
except:
error_code.append(code)
continue
daily_cursor = DB_CONN['daily'].find(
{'code':code, 'date':{'$lte': end, '$gte': timeToMarket}, 'index':False},
projection={'code':True, 'date':True, 'pre_close':True, '_id':False})
update_requests = []
for j,daily in enumerate(daily_cursor):
date = daily['date']
try:
pre_close = daily['pre_close']
except:
if (j == 0) & (timeToMarket != date):
pass
# print('code: %s, time: %s, 数据初始日没有pre_close' % (code, date))
elif timeToMarket == date:
# print('code: %s, date: %s' % (code, date))
issueprice = DB_CONN['basic'].find_one({'code':code},
projection={'issueprice':True, '_id':False})['issueprice']
high_limit = np.round(np.round(issueprice * 1.2, 2) * 1.2, 2)
low_limit = np.round(np.round(issueprice * 0.8, 2) * 0.8, 2)
update_requests.append(
UpdateOne({'code':code, 'date':date, 'index':False},
{'$set':{'high_limit':high_limit, 'low_limit':low_limit}},
upsert=True))
else:
print('code: %s, time: %s, ipo_date: %s, 请速查原因' % (code, date, timeToMarket))
error_code.append(code)
continue
# if date < '2016-08-09':
# _date = '2016-08-09'
# else:
# _date = date
#
# try:
# name = DB_CONN['basic'].find_one({'code':code, 'date':_date},
# projection={'name':True, '_id':False})['name']
# last_name = name
# except:
# if j == 0:
# name = DB_CONN['basic'].find_one({'code':code},
# projection={'name':True, '_id':False})['name']
# last_name = name
# else:
## print('code: %s, date: %s' % (code, date))
# name = last_name
# if timeToMarket == date:
#
# issueprice = DB_CONN['basic'].find_one({'code':code},
# projection={'issueprice':True, '_id':False})['issueprice']
#
# high_limit = np.round(np.round(issueprice * 1.2, 2) * 1.2, 2)
# low_limit = np.round(np.round(issueprice * 0.8, 2) * 0.8, 2)
# if daily['is_st'] :
if code in st_codes:
st_flag = DB_CONN['daily'].find_one({'code':code, 'date':date, 'index':False})['is_st']
if st_flag:
high_limit = np.round(pre_close * 1.05, 2)
low_limit = np.round(pre_close * 0.95, 2)
else:
high_limit = np.round(pre_close * 1.1, 2)
low_limit = np.round(pre_close * 0.9, 2)
update_requests.append(
UpdateOne({'code':code, 'date':date, 'index':False},
{'$set':{'high_limit':high_limit, 'low_limit':low_limit}},
upsert=True))
if len(update_requests)>0:
update_result = DB_CONN['daily'].bulk_write(update_requests, ordered=False)
print('涨跌停计算, 进度: (%s/%s), code:%s, 数据集:%s, 插入:%4d条, 更新:%4d条' %
(i+1, total, code, 'daily', update_result.upserted_count, update_result.modified_count), flush=True)
# print('stock: %s high low limit complish, 进度: (%s/%s)' % (code, i+1, total), flush=True)
# main funciton
if __name__ == '__main__':
daily_col = DB_CONN['daily']
if 'code_1_index_1' not in daily_col.index_information().keys():
daily_col.create_index(
[('code', ASCENDING), ('index', ASCENDING)]
)
start = '2015-01-01'
end = '2018-09-30'
tic = time.process_time()
fixing_is_st(start, end)
# fill_issueprice_and_timeToMarket()
fill_high_and_low_price_between(start, end)
toc = time.process_time()
delta = toc - tic
print(delta) | 36.561983 | 125 | 0.495592 |
import datetime, time
from pymongo import UpdateOne, ASCENDING, UpdateMany
from database import DB_CONN
from stock_util import get_trading_dates, get_all_codes
import tushare as ts
import numpy as np
import pandas as pd
import requests
import json
import datetime
def fill_issueprice_and_timeToMarket():
df = pd.read_excel('data/ipo_info.xlsx', header=0, dtype={'code':str})
df = df.set_index('code')
codes = df.index.tolist()
update_requests = []
for i,code in enumerate(codes):
try:
update_requests.append(
UpdateOne(
{'code':code},
{'$set':{'issueprice':df.issueprice[code],
'timeToMarket':df.timeToMarket[code]}},
upsert=True))
except:
print('code: %s, has problem' % code)
if len(update_requests)>0:
update_result = DB_CONN['basic'].bulk_write(update_requests, ordered=False)
print('填充字段, 字段名: issueprice,数据集:%s,插入:%4d条,更新:%4d条' %
('basic', update_result.upserted_count, update_result.modified_count), flush=True)
def fixing_is_st(start, end):
df = pd.read_excel('data/stock_basic.xlsx', header=0, dtype={'code':str})
df = df.set_index('code')
codes = df[df['是否ST过'] == 1].index.tolist()
total = len(codes)
daily = DB_CONN['daily']
excel_name = 'data/st_info.xlsx'
for i in range(4):
if i == 0:
all_dates = get_trading_dates('2015-01-01', '2015-12-31')
elif i == 1:
all_dates = get_trading_dates('2016-01-01', '2016-12-31')
if i == 2:
all_dates = get_trading_dates('2017-01-01', '2017-12-31')
elif i == 3:
all_dates = get_trading_dates('2018-01-01', '2018-09-30')
print('数据读取中')
df = pd.read_excel(excel_name, i, header=0, dtype={'code':str})
df = df.set_index(['code','state'])
df.columns = df.columns.astype(np.datetime64)
df.columns = df.columns.to_period('D')
df.columns = df.columns.astype('str')
print('数据读取完毕')
for j, code in enumerate(codes):
update_requests = []
for date in all_dates:
try:
st_state = df.xs([code])[date]['是否ST']
sst_state = df.xs([code])[date]['是否*ST']
if (st_state == '否') and (sst_state == '否'):
is_st_flag = False
else:
is_st_flag = True
update_requests.append(
UpdateOne(
{'code':code, 'date':date, 'index':False},
{'$set':{'is_st':is_st_flag}}
)
)
except:
print('something is wrong, code : %s, date : %s' % (code, date))
if len(update_requests)>0:
update_result = daily.bulk_write(update_requests, ordered=False)
print('第%s年填充进度: %s/%s, 字段名: is_st,数据集:%s,插入:%4d条,更新:%4d条' %
(i+1, j+1, total, 'daily', update_result.upserted_count, update_result.modified_count), flush=True)
def fill_high_and_low_price_between(start, end):
codes = ts.get_stock_basics().index.tolist()
_df = pd.read_excel('data/stock_basic.xlsx', header=0, dtype={'code':str})
_df = _df.set_index('code')
st_codes = _df[_df['是否ST过'] == 1].index.tolist()
total = len(codes)
error_code = []
for i,code in enumerate(codes):
try:
timeToMarket = DB_CONN['basic'].find_one({'code':code},
projection={'code':True, 'timeToMarket':True, '_id':False})['timeToMarket']
except:
error_code.append(code)
continue
daily_cursor = DB_CONN['daily'].find(
{'code':code, 'date':{'$lte': end, '$gte': timeToMarket}, 'index':False},
projection={'code':True, 'date':True, 'pre_close':True, '_id':False})
update_requests = []
for j,daily in enumerate(daily_cursor):
date = daily['date']
try:
pre_close = daily['pre_close']
except:
if (j == 0) & (timeToMarket != date):
pass
elif timeToMarket == date:
issueprice = DB_CONN['basic'].find_one({'code':code},
projection={'issueprice':True, '_id':False})['issueprice']
high_limit = np.round(np.round(issueprice * 1.2, 2) * 1.2, 2)
low_limit = np.round(np.round(issueprice * 0.8, 2) * 0.8, 2)
update_requests.append(
UpdateOne({'code':code, 'date':date, 'index':False},
{'$set':{'high_limit':high_limit, 'low_limit':low_limit}},
upsert=True))
else:
print('code: %s, time: %s, ipo_date: %s, 请速查原因' % (code, date, timeToMarket))
error_code.append(code)
continue
st_flag = DB_CONN['daily'].find_one({'code':code, 'date':date, 'index':False})['is_st']
if st_flag:
high_limit = np.round(pre_close * 1.05, 2)
low_limit = np.round(pre_close * 0.95, 2)
else:
high_limit = np.round(pre_close * 1.1, 2)
low_limit = np.round(pre_close * 0.9, 2)
update_requests.append(
UpdateOne({'code':code, 'date':date, 'index':False},
{'$set':{'high_limit':high_limit, 'low_limit':low_limit}},
upsert=True))
if len(update_requests)>0:
update_result = DB_CONN['daily'].bulk_write(update_requests, ordered=False)
print('涨跌停计算, 进度: (%s/%s), code:%s, 数据集:%s, 插入:%4d条, 更新:%4d条' %
(i+1, total, code, 'daily', update_result.upserted_count, update_result.modified_count), flush=True)
if __name__ == '__main__':
daily_col = DB_CONN['daily']
if 'code_1_index_1' not in daily_col.index_information().keys():
daily_col.create_index(
[('code', ASCENDING), ('index', ASCENDING)]
)
start = '2015-01-01'
end = '2018-09-30'
tic = time.process_time()
fixing_is_st(start, end)
fill_high_and_low_price_between(start, end)
toc = time.process_time()
delta = toc - tic
print(delta) | true | true |
f71b9a749d420870b13e967659e88b311fd71f8e | 5,142 | py | Python | dsw_mailer/connection/smtp.py | ds-wizard/mailer | f919cf42a413a9fa530607358900255b55fc233a | [
"Apache-2.0"
] | null | null | null | dsw_mailer/connection/smtp.py | ds-wizard/mailer | f919cf42a413a9fa530607358900255b55fc233a | [
"Apache-2.0"
] | null | null | null | dsw_mailer/connection/smtp.py | ds-wizard/mailer | f919cf42a413a9fa530607358900255b55fc233a | [
"Apache-2.0"
] | null | null | null | import logging
import pathvalidate
import smtplib
import ssl
import tenacity
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr
from ..config import MailConfig
from ..context import Context
from ..model import MailMessage, MailAttachment
RETRY_SMTP_MULTIPLIER = 0.5
RETRY_SMTP_TRIES = 3
EMAIL_ENCODING = 'utf-8'
class SMTPSender:
def __init__(self, cfg: MailConfig):
self.cfg = cfg
@tenacity.retry(
reraise=True,
wait=tenacity.wait_exponential(multiplier=RETRY_SMTP_MULTIPLIER),
stop=tenacity.stop_after_attempt(RETRY_SMTP_TRIES),
before=tenacity.before_log(Context.logger, logging.DEBUG),
after=tenacity.after_log(Context.logger, logging.DEBUG),
)
def send(self, message: MailMessage):
self._send(message)
def _send(self, mail: MailMessage):
if self.cfg.is_ssl:
return self._send_smtp_ssl(mail=mail)
return self._send_smtp(mail=mail)
def _send_smtp_ssl(self, mail: MailMessage):
context = ssl.create_default_context()
with smtplib.SMTP_SSL(
host=self.cfg.host,
port=self.cfg.port,
context=context,
timeout=self.cfg.timeout,
) as server:
if self.cfg.auth:
server.login(
user=self.cfg.login_user,
password=self.cfg.login_password,
)
return server.send_message(
msg=self._convert_email(mail),
from_addr=formataddr((mail.from_name, mail.from_mail)),
to_addrs=mail.recipients,
)
def _send_smtp(self, mail: MailMessage):
context = ssl.create_default_context()
with smtplib.SMTP(
host=self.cfg.host,
port=self.cfg.port,
timeout=self.cfg.timeout,
) as server:
if self.cfg.is_tls:
server.starttls(context=context)
if self.cfg.auth:
server.login(
user=self.cfg.login_user,
password=self.cfg.login_password,
)
return server.send_message(
msg=self._convert_email(mail),
from_addr=formataddr((mail.from_name, mail.from_mail)),
to_addrs=mail.recipients,
)
def _convert_inline_image(self, image: MailAttachment) -> MIMEBase:
mtype, msubtype = image.content_type.split('/', maxsplit=1)
part = MIMEBase(mtype, msubtype)
part.set_payload(image.data)
encoders.encode_base64(part)
filename = pathvalidate.sanitize_filename(image.name)
part.add_header('Content-ID', f'<{filename}>')
part.add_header('Content-Disposition', f'inline; filename={filename}')
return part
def _convert_html_part(self, mail: MailMessage) -> MIMEBase:
if mail.html_body is None:
raise RuntimeError('Requested HTML body but there is none')
txt_part = MIMEText(mail.html_body, 'html', EMAIL_ENCODING)
txt_part.set_charset(EMAIL_ENCODING)
if len(mail.html_images) > 0:
part = MIMEMultipart('related')
part.attach(txt_part)
for image in mail.html_images:
part.attach(self._convert_inline_image(image))
return part
return txt_part
def _convert_plain_part(self, mail: MailMessage) -> MIMEText:
if mail.plain_body is None:
raise RuntimeError('Requested plain body but there is none')
return MIMEText(mail.plain_body, 'plain', EMAIL_ENCODING)
def _convert_txt_parts(self, mail: MailMessage) -> MIMEBase:
if mail.plain_body is None:
return self._convert_html_part(mail)
if mail.html_body is None:
return self._convert_plain_part(mail)
part = MIMEMultipart('alternative')
part.set_charset(EMAIL_ENCODING)
part.attach(self._convert_plain_part(mail))
part.attach(self._convert_html_part(mail))
return part
def _convert_attachment(self, attachment: MailAttachment) -> MIMEBase:
mtype, msubtype = attachment.content_type.split('/', maxsplit=1)
part = MIMEBase(mtype, msubtype)
part.set_payload(attachment.data)
encoders.encode_base64(part)
filename = pathvalidate.sanitize_filename(attachment.name)
part.add_header('Content-Disposition', f'attachment; filename={filename}')
return part
def _convert_email(self, mail: MailMessage) -> MIMEBase:
msg = self._convert_txt_parts(mail)
if len(mail.attachments) > 0:
txt = msg
msg = MIMEMultipart('mixed')
msg.attach(txt)
for attachment in mail.attachments:
msg.attach(self._convert_attachment(attachment))
msg['From'] = formataddr((mail.from_name, mail.from_mail))
msg['To'] = ', '.join(mail.recipients)
msg['Subject'] = mail.subject
return msg
| 36.211268 | 82 | 0.632439 | import logging
import pathvalidate
import smtplib
import ssl
import tenacity
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr
from ..config import MailConfig
from ..context import Context
from ..model import MailMessage, MailAttachment
RETRY_SMTP_MULTIPLIER = 0.5
RETRY_SMTP_TRIES = 3
EMAIL_ENCODING = 'utf-8'
class SMTPSender:
def __init__(self, cfg: MailConfig):
self.cfg = cfg
@tenacity.retry(
reraise=True,
wait=tenacity.wait_exponential(multiplier=RETRY_SMTP_MULTIPLIER),
stop=tenacity.stop_after_attempt(RETRY_SMTP_TRIES),
before=tenacity.before_log(Context.logger, logging.DEBUG),
after=tenacity.after_log(Context.logger, logging.DEBUG),
)
def send(self, message: MailMessage):
self._send(message)
def _send(self, mail: MailMessage):
if self.cfg.is_ssl:
return self._send_smtp_ssl(mail=mail)
return self._send_smtp(mail=mail)
def _send_smtp_ssl(self, mail: MailMessage):
context = ssl.create_default_context()
with smtplib.SMTP_SSL(
host=self.cfg.host,
port=self.cfg.port,
context=context,
timeout=self.cfg.timeout,
) as server:
if self.cfg.auth:
server.login(
user=self.cfg.login_user,
password=self.cfg.login_password,
)
return server.send_message(
msg=self._convert_email(mail),
from_addr=formataddr((mail.from_name, mail.from_mail)),
to_addrs=mail.recipients,
)
def _send_smtp(self, mail: MailMessage):
context = ssl.create_default_context()
with smtplib.SMTP(
host=self.cfg.host,
port=self.cfg.port,
timeout=self.cfg.timeout,
) as server:
if self.cfg.is_tls:
server.starttls(context=context)
if self.cfg.auth:
server.login(
user=self.cfg.login_user,
password=self.cfg.login_password,
)
return server.send_message(
msg=self._convert_email(mail),
from_addr=formataddr((mail.from_name, mail.from_mail)),
to_addrs=mail.recipients,
)
def _convert_inline_image(self, image: MailAttachment) -> MIMEBase:
mtype, msubtype = image.content_type.split('/', maxsplit=1)
part = MIMEBase(mtype, msubtype)
part.set_payload(image.data)
encoders.encode_base64(part)
filename = pathvalidate.sanitize_filename(image.name)
part.add_header('Content-ID', f'<{filename}>')
part.add_header('Content-Disposition', f'inline; filename={filename}')
return part
def _convert_html_part(self, mail: MailMessage) -> MIMEBase:
if mail.html_body is None:
raise RuntimeError('Requested HTML body but there is none')
txt_part = MIMEText(mail.html_body, 'html', EMAIL_ENCODING)
txt_part.set_charset(EMAIL_ENCODING)
if len(mail.html_images) > 0:
part = MIMEMultipart('related')
part.attach(txt_part)
for image in mail.html_images:
part.attach(self._convert_inline_image(image))
return part
return txt_part
def _convert_plain_part(self, mail: MailMessage) -> MIMEText:
if mail.plain_body is None:
raise RuntimeError('Requested plain body but there is none')
return MIMEText(mail.plain_body, 'plain', EMAIL_ENCODING)
def _convert_txt_parts(self, mail: MailMessage) -> MIMEBase:
if mail.plain_body is None:
return self._convert_html_part(mail)
if mail.html_body is None:
return self._convert_plain_part(mail)
part = MIMEMultipart('alternative')
part.set_charset(EMAIL_ENCODING)
part.attach(self._convert_plain_part(mail))
part.attach(self._convert_html_part(mail))
return part
def _convert_attachment(self, attachment: MailAttachment) -> MIMEBase:
mtype, msubtype = attachment.content_type.split('/', maxsplit=1)
part = MIMEBase(mtype, msubtype)
part.set_payload(attachment.data)
encoders.encode_base64(part)
filename = pathvalidate.sanitize_filename(attachment.name)
part.add_header('Content-Disposition', f'attachment; filename={filename}')
return part
def _convert_email(self, mail: MailMessage) -> MIMEBase:
msg = self._convert_txt_parts(mail)
if len(mail.attachments) > 0:
txt = msg
msg = MIMEMultipart('mixed')
msg.attach(txt)
for attachment in mail.attachments:
msg.attach(self._convert_attachment(attachment))
msg['From'] = formataddr((mail.from_name, mail.from_mail))
msg['To'] = ', '.join(mail.recipients)
msg['Subject'] = mail.subject
return msg
| true | true |
f71b9ac9f4d7813d05814baf5ec329e7feb1f6b6 | 1,576 | py | Python | setup.py | JakubBlaha/python-jsonstore | 9f79f17e7947fe89aea1e67483d1f8d7313ea4ab | [
"MIT"
] | 2 | 2020-04-30T12:22:15.000Z | 2020-05-15T22:40:39.000Z | setup.py | JakubBlaha/python-jsonstore | 9f79f17e7947fe89aea1e67483d1f8d7313ea4ab | [
"MIT"
] | 6 | 2018-09-05T17:46:21.000Z | 2020-06-01T11:34:26.000Z | setup.py | JakubBlaha/python-jsonstore | 9f79f17e7947fe89aea1e67483d1f8d7313ea4ab | [
"MIT"
] | 5 | 2017-11-25T20:31:28.000Z | 2020-09-04T00:57:07.000Z | import codecs
from os import path
from textwrap import dedent
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, "README.rst"), encoding='utf-8') as f:
long_description = f.read()
setup(
name='python-jsonstore',
use_scm_version=True,
description="",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Oliver Bristow",
author_email='github+pypi@oliverbristow.co.uk',
license='MIT',
classifiers=dedent("""
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Database
Topic :: Software Development
""").strip().split('\n'),
keywords='json key value store',
url='https://github.com/Code0x58/python-jsonstore/',
py_modules=dedent("""
jsonstore
""").strip().split('\n'),
setup_requires=["setuptools_scm", "wheel"],
)
| 33.531915 | 71 | 0.645939 | import codecs
from os import path
from textwrap import dedent
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, "README.rst"), encoding='utf-8') as f:
long_description = f.read()
setup(
name='python-jsonstore',
use_scm_version=True,
description="",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Oliver Bristow",
author_email='github+pypi@oliverbristow.co.uk',
license='MIT',
classifiers=dedent("""
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Topic :: Database
Topic :: Software Development
""").strip().split('\n'),
keywords='json key value store',
url='https://github.com/Code0x58/python-jsonstore/',
py_modules=dedent("""
jsonstore
""").strip().split('\n'),
setup_requires=["setuptools_scm", "wheel"],
)
| true | true |
f71b9b08d59205e762bc081291995a3dce88426a | 778 | py | Python | ws2122-lspm/Lib/site-packages/pm4py/objects/log/exporter/xes/util/__init__.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-19T04:02:46.000Z | 2022-01-19T04:02:46.000Z | ws2122-lspm/Lib/site-packages/pm4py/objects/log/exporter/xes/util/__init__.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2021-11-19T07:21:48.000Z | 2021-11-19T07:21:48.000Z | ws2122-lspm/Lib/site-packages/pm4py/objects/log/exporter/xes/util/__init__.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-14T17:15:38.000Z | 2022-01-14T17:15:38.000Z | '''
This file is part of PM4Py (More Info: https://pm4py.fit.fraunhofer.de).
PM4Py is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PM4Py is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PM4Py. If not, see <https://www.gnu.org/licenses/>.
'''
from pm4py.objects.log.exporter.xes.util import compression
| 43.222222 | 76 | 0.737789 | from pm4py.objects.log.exporter.xes.util import compression
| true | true |
f71b9c6dbdb4556fd1c62de37e6dbb97379d445f | 4,113 | py | Python | homeassistant/components/sensor/rtorrent.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | 1 | 2019-07-24T09:26:57.000Z | 2019-07-24T09:26:57.000Z | homeassistant/components/sensor/rtorrent.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | 5 | 2021-02-08T20:32:11.000Z | 2022-01-13T01:19:23.000Z | homeassistant/components/sensor/rtorrent.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | null | null | null | """Support for monitoring the rtorrent BitTorrent client API."""
import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_URL, CONF_NAME,
CONF_MONITORED_VARIABLES, STATE_IDLE)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.exceptions import PlatformNotReady
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPE_CURRENT_STATUS = 'current_status'
SENSOR_TYPE_DOWNLOAD_SPEED = 'download_speed'
SENSOR_TYPE_UPLOAD_SPEED = 'upload_speed'
DEFAULT_NAME = 'rtorrent'
SENSOR_TYPES = {
SENSOR_TYPE_CURRENT_STATUS: ['Status', None],
SENSOR_TYPE_DOWNLOAD_SPEED: ['Down Speed', 'kB/s'],
SENSOR_TYPE_UPLOAD_SPEED: ['Up Speed', 'kB/s'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_URL): cv.url,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the rtorrent sensors."""
url = config[CONF_URL]
name = config[CONF_NAME]
try:
rtorrent = xmlrpc.client.ServerProxy(url)
except (xmlrpc.client.ProtocolError, ConnectionRefusedError):
_LOGGER.error("Connection to rtorrent daemon failed")
raise PlatformNotReady
dev = []
for variable in config[CONF_MONITORED_VARIABLES]:
dev.append(RTorrentSensor(variable, rtorrent, name))
add_entities(dev)
def format_speed(speed):
"""Return a bytes/s measurement as a human readable string."""
kb_spd = float(speed) / 1024
return round(kb_spd, 2 if kb_spd < 0.1 else 1)
class RTorrentSensor(Entity):
"""Representation of an rtorrent sensor."""
def __init__(self, sensor_type, rtorrent_client, client_name):
"""Initialize the sensor."""
self._name = SENSOR_TYPES[sensor_type][0]
self.client = rtorrent_client
self.type = sensor_type
self.client_name = client_name
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.data = None
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self.client_name, self._name)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Get the latest data from rtorrent and updates the state."""
multicall = xmlrpc.client.MultiCall(self.client)
multicall.throttle.global_up.rate()
multicall.throttle.global_down.rate()
try:
self.data = multicall()
self._available = True
except (xmlrpc.client.ProtocolError, ConnectionRefusedError):
_LOGGER.error("Connection to rtorrent lost")
self._available = False
return
upload = self.data[0]
download = self.data[1]
if self.type == SENSOR_TYPE_CURRENT_STATUS:
if self.data:
if upload > 0 and download > 0:
self._state = 'Up/Down'
elif upload > 0 and download == 0:
self._state = 'Seeding'
elif upload == 0 and download > 0:
self._state = 'Downloading'
else:
self._state = STATE_IDLE
else:
self._state = None
if self.data:
if self.type == SENSOR_TYPE_DOWNLOAD_SPEED:
self._state = format_speed(download)
elif self.type == SENSOR_TYPE_UPLOAD_SPEED:
self._state = format_speed(upload)
| 32.132813 | 70 | 0.650863 | import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_URL, CONF_NAME,
CONF_MONITORED_VARIABLES, STATE_IDLE)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
from homeassistant.exceptions import PlatformNotReady
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPE_CURRENT_STATUS = 'current_status'
SENSOR_TYPE_DOWNLOAD_SPEED = 'download_speed'
SENSOR_TYPE_UPLOAD_SPEED = 'upload_speed'
DEFAULT_NAME = 'rtorrent'
SENSOR_TYPES = {
SENSOR_TYPE_CURRENT_STATUS: ['Status', None],
SENSOR_TYPE_DOWNLOAD_SPEED: ['Down Speed', 'kB/s'],
SENSOR_TYPE_UPLOAD_SPEED: ['Up Speed', 'kB/s'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_URL): cv.url,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
url = config[CONF_URL]
name = config[CONF_NAME]
try:
rtorrent = xmlrpc.client.ServerProxy(url)
except (xmlrpc.client.ProtocolError, ConnectionRefusedError):
_LOGGER.error("Connection to rtorrent daemon failed")
raise PlatformNotReady
dev = []
for variable in config[CONF_MONITORED_VARIABLES]:
dev.append(RTorrentSensor(variable, rtorrent, name))
add_entities(dev)
def format_speed(speed):
kb_spd = float(speed) / 1024
return round(kb_spd, 2 if kb_spd < 0.1 else 1)
class RTorrentSensor(Entity):
def __init__(self, sensor_type, rtorrent_client, client_name):
self._name = SENSOR_TYPES[sensor_type][0]
self.client = rtorrent_client
self.type = sensor_type
self.client_name = client_name
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.data = None
self._available = False
@property
def name(self):
return '{} {}'.format(self.client_name, self._name)
@property
def state(self):
return self._state
@property
def available(self):
return self._available
@property
def unit_of_measurement(self):
return self._unit_of_measurement
def update(self):
multicall = xmlrpc.client.MultiCall(self.client)
multicall.throttle.global_up.rate()
multicall.throttle.global_down.rate()
try:
self.data = multicall()
self._available = True
except (xmlrpc.client.ProtocolError, ConnectionRefusedError):
_LOGGER.error("Connection to rtorrent lost")
self._available = False
return
upload = self.data[0]
download = self.data[1]
if self.type == SENSOR_TYPE_CURRENT_STATUS:
if self.data:
if upload > 0 and download > 0:
self._state = 'Up/Down'
elif upload > 0 and download == 0:
self._state = 'Seeding'
elif upload == 0 and download > 0:
self._state = 'Downloading'
else:
self._state = STATE_IDLE
else:
self._state = None
if self.data:
if self.type == SENSOR_TYPE_DOWNLOAD_SPEED:
self._state = format_speed(download)
elif self.type == SENSOR_TYPE_UPLOAD_SPEED:
self._state = format_speed(upload)
| true | true |
f71b9da7eaef7e2b15246349f4b4f1045f95882f | 799 | py | Python | backend/scrape_amazon/update_product_db_amazon.py | jayleenli/the-legend-of-compurator | 7fc747ebf6b011acec8733a394861f7fed368d73 | [
"MIT"
] | null | null | null | backend/scrape_amazon/update_product_db_amazon.py | jayleenli/the-legend-of-compurator | 7fc747ebf6b011acec8733a394861f7fed368d73 | [
"MIT"
] | null | null | null | backend/scrape_amazon/update_product_db_amazon.py | jayleenli/the-legend-of-compurator | 7fc747ebf6b011acec8733a394861f7fed368d73 | [
"MIT"
] | null | null | null | from .scrape_objects_MVP import get_attributes, get_id
from pymongo import MongoClient
import os
DB_URL = os.environ['DB_URL']
CLIENT = MongoClient(DB_URL)
DB = CLIENT.compurator
PRODUCTS_COLLECTION = DB["products"]
def check_product_exists(url):
'''
:param url: url of amazon product
:return: false if product does not exist in products_collection, p_id if it does exist
'''
p_id = get_id(url)
if PRODUCTS_COLLECTION.count({'p_id': p_id}) > 0:
return p_id
return False
def add_product_amazon(url):
'''
:param PRODUCTS_COLLECTION, url:
:return prod_document: amazon id containing attributes of product on amazon
'''
prod_document = get_attributes(url)
PRODUCTS_COLLECTION.insert_one(prod_document)
return prod_document['p_id']
| 24.212121 | 90 | 0.720901 | from .scrape_objects_MVP import get_attributes, get_id
from pymongo import MongoClient
import os
DB_URL = os.environ['DB_URL']
CLIENT = MongoClient(DB_URL)
DB = CLIENT.compurator
PRODUCTS_COLLECTION = DB["products"]
def check_product_exists(url):
p_id = get_id(url)
if PRODUCTS_COLLECTION.count({'p_id': p_id}) > 0:
return p_id
return False
def add_product_amazon(url):
prod_document = get_attributes(url)
PRODUCTS_COLLECTION.insert_one(prod_document)
return prod_document['p_id']
| true | true |
f71b9e37908dd5da30752301903bfc85504aa496 | 728 | py | Python | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 14 | 2018-07-15T17:01:52.000Z | 2018-11-29T06:15:33.000Z | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 1 | 2018-09-28T12:59:34.000Z | 2019-10-08T08:42:59.000Z | Examples/AcceptAllRevisions.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 2 | 2020-12-21T07:59:17.000Z | 2022-02-16T21:41:25.000Z | import os
import asposewordscloud
import asposewordscloud.models.requests
from asposewordscloud.rest import ApiException
from shutil import copyfile
words_api = WordsApi(client_id = '####-####-####-####-####', client_secret = '##################')
file_name = 'test_doc.docx'
# Upload original document to cloud storage.
my_var1 = open(file_name, 'rb')
my_var2 = file_name
upload_file_request = asposewordscloud.models.requests.UploadFileRequest(file_content=my_var1, path=my_var2)
words_api.upload_file(upload_file_request)
# Calls AcceptAllRevisions method for document in cloud.
my_var3 = file_name
request = asposewordscloud.models.requests.AcceptAllRevisionsRequest(name=my_var3)
words_api.accept_all_revisions(request) | 38.315789 | 108 | 0.787088 | import os
import asposewordscloud
import asposewordscloud.models.requests
from asposewordscloud.rest import ApiException
from shutil import copyfile
words_api = WordsApi(client_id = '####-####-####-####-####', client_secret = '##################')
file_name = 'test_doc.docx'
my_var1 = open(file_name, 'rb')
my_var2 = file_name
upload_file_request = asposewordscloud.models.requests.UploadFileRequest(file_content=my_var1, path=my_var2)
words_api.upload_file(upload_file_request)
my_var3 = file_name
request = asposewordscloud.models.requests.AcceptAllRevisionsRequest(name=my_var3)
words_api.accept_all_revisions(request) | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.