hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c3d3d0cb411845f0bc1f2fc13c35d8cbe8a9463 | 4,393 | py | Python | src/demo_music_critic/plotter.py | fcr/Haifa_Tech_Meetup_Talk | d33a74dac894d2c5cd6aa49e1a60a7df284c0f8f | [
"MIT"
] | 4 | 2020-04-08T03:02:09.000Z | 2021-11-05T12:24:34.000Z | src/demo_music_critic/plotter.py | fcr/python_meetup_htm_slides | ef261cd2ec0bb81ecdd829420ae273baff3892cf | [
"MIT"
] | null | null | null | src/demo_music_critic/plotter.py | fcr/python_meetup_htm_slides | ef261cd2ec0bb81ecdd829420ae273baff3892cf | [
"MIT"
] | 1 | 2020-12-04T03:23:11.000Z | 2020-12-04T03:23:11.000Z | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import sys
import os
import csv
import time
import subprocess
from optparse import OptionParser
from plot_output import NuPICPlotOutput
WINDOW = 200
HIGHLIGHT_ALPHA = 0.3
ANOMALY_HIGHLIGHT_COLOR = 'red'
DEFAULT_ANOMALY_THRESHOLD = 0.9
DEFAULT_ANOMALY_TRIGGER_COUNT = 1
parser = OptionParser(
usage="%prog <path/to/nupic/output/directory> [options]\n\nPlot nupic "
"output, optionally syncing the output to the playing of the original WAV file."
)
parser.add_option(
"-w",
"--wav",
dest="wav",
default=None,
help="Path to a WAV file to play synced to the plot.")
parser.add_option(
"-m",
"--maximize",
action="store_true",
default=False,
dest="maximize",
help="Maximize plot window."
)
parser.add_option(
"-t",
"--anomaly_threshold",
dest="anomaly_threshold",
default=DEFAULT_ANOMALY_THRESHOLD,
help="Value the anomaly likelihood(s) must breach before being marked as "
"anomalous in the chart."
)
parser.add_option(
"-g",
"--anomaly_trigger",
dest="anomaly_trigger",
default=DEFAULT_ANOMALY_TRIGGER_COUNT,
help="How many bins must be above the anomaly threshold to display an "
"anomaly on the chart."
)
parser.add_option(
"-a",
"--use_anomaly_score",
action="store_true",
default=False,
dest="use_anomaly_score",
help="Use the anomalyScore from NuPIC instead of the anomalyLikelihood."
)
def run(input_dir, audio_file, maximize,
anomaly_threshold, anomaly_trigger_count, use_anomaly_score):
file_names = os.listdir(input_dir)
bins = [os.path.splitext(n)[0] for n in file_names]
input_files = [open(os.path.join(input_dir, f)) for f in file_names]
readers = [csv.reader(f) for f in input_files]
headers = [reader.next() for reader in readers]
for reader in readers:
reader.next()
reader.next()
output = NuPICPlotOutput(input_dir, bins, maximize, anomaly_threshold, anomaly_trigger_count)
if audio_file:
subprocess.call("open %s" % audio_file, shell=True)
time.sleep(1.0)
start = time.time()
while True:
try:
next_lines = [reader.next() for reader in readers]
except StopIteration:
break
seconds = float(next_lines[0][headers[0].index("seconds")])
data_time = start + seconds
bin_values = []
anomaly_likelihoods = []
if time.time() <= data_time:
for i, line in enumerate(next_lines):
freq_bin = bins[i]
header = headers[i]
bin_value = float(line[header.index(freq_bin)])
if use_anomaly_score:
anomaly_key = "anomalyScore"
else:
anomaly_key = "anomalyLikelihood"
anomaly_likelihood = float(line[header.index(anomaly_key)])
bin_values.append(bin_value)
anomaly_likelihoods.append(anomaly_likelihood)
output.write(seconds, bin_values, anomaly_likelihoods)
# If syncing to an audio file, wait for it to catch up.
if audio_file:
while time.time() < data_time:
time.sleep(0.1)
output.close()
for f in input_files:
f.close()
if __name__ == "__main__":
(options, args) = parser.parse_args(sys.argv[1:])
try:
input_dir = args.pop(0)
except IndexError:
parser.print_help(sys.stderr)
audio_file = options.wav
run(
input_dir,
audio_file,
options.maximize,
float(options.anomaly_threshold),
int(options.anomaly_trigger),
options.use_anomaly_score
)
| 27.628931 | 95 | 0.681539 |
import sys
import os
import csv
import time
import subprocess
from optparse import OptionParser
from plot_output import NuPICPlotOutput
WINDOW = 200
HIGHLIGHT_ALPHA = 0.3
ANOMALY_HIGHLIGHT_COLOR = 'red'
DEFAULT_ANOMALY_THRESHOLD = 0.9
DEFAULT_ANOMALY_TRIGGER_COUNT = 1
parser = OptionParser(
usage="%prog <path/to/nupic/output/directory> [options]\n\nPlot nupic "
"output, optionally syncing the output to the playing of the original WAV file."
)
parser.add_option(
"-w",
"--wav",
dest="wav",
default=None,
help="Path to a WAV file to play synced to the plot.")
parser.add_option(
"-m",
"--maximize",
action="store_true",
default=False,
dest="maximize",
help="Maximize plot window."
)
parser.add_option(
"-t",
"--anomaly_threshold",
dest="anomaly_threshold",
default=DEFAULT_ANOMALY_THRESHOLD,
help="Value the anomaly likelihood(s) must breach before being marked as "
"anomalous in the chart."
)
parser.add_option(
"-g",
"--anomaly_trigger",
dest="anomaly_trigger",
default=DEFAULT_ANOMALY_TRIGGER_COUNT,
help="How many bins must be above the anomaly threshold to display an "
"anomaly on the chart."
)
parser.add_option(
"-a",
"--use_anomaly_score",
action="store_true",
default=False,
dest="use_anomaly_score",
help="Use the anomalyScore from NuPIC instead of the anomalyLikelihood."
)
def run(input_dir, audio_file, maximize,
anomaly_threshold, anomaly_trigger_count, use_anomaly_score):
file_names = os.listdir(input_dir)
bins = [os.path.splitext(n)[0] for n in file_names]
input_files = [open(os.path.join(input_dir, f)) for f in file_names]
readers = [csv.reader(f) for f in input_files]
headers = [reader.next() for reader in readers]
for reader in readers:
reader.next()
reader.next()
output = NuPICPlotOutput(input_dir, bins, maximize, anomaly_threshold, anomaly_trigger_count)
if audio_file:
subprocess.call("open %s" % audio_file, shell=True)
time.sleep(1.0)
start = time.time()
while True:
try:
next_lines = [reader.next() for reader in readers]
except StopIteration:
break
seconds = float(next_lines[0][headers[0].index("seconds")])
data_time = start + seconds
bin_values = []
anomaly_likelihoods = []
if time.time() <= data_time:
for i, line in enumerate(next_lines):
freq_bin = bins[i]
header = headers[i]
bin_value = float(line[header.index(freq_bin)])
if use_anomaly_score:
anomaly_key = "anomalyScore"
else:
anomaly_key = "anomalyLikelihood"
anomaly_likelihood = float(line[header.index(anomaly_key)])
bin_values.append(bin_value)
anomaly_likelihoods.append(anomaly_likelihood)
output.write(seconds, bin_values, anomaly_likelihoods)
if audio_file:
while time.time() < data_time:
time.sleep(0.1)
output.close()
for f in input_files:
f.close()
if __name__ == "__main__":
(options, args) = parser.parse_args(sys.argv[1:])
try:
input_dir = args.pop(0)
except IndexError:
parser.print_help(sys.stderr)
audio_file = options.wav
run(
input_dir,
audio_file,
options.maximize,
float(options.anomaly_threshold),
int(options.anomaly_trigger),
options.use_anomaly_score
)
| true | true |
1c3d3e01c8198b61de3f085f02ae88cae1c55329 | 4,054 | py | Python | cogs/owner.py | KoocieBoi/Sunny | 3c30cc57a891c3baab8c51d75ad1bce4bb8a4472 | [
"MIT"
] | null | null | null | cogs/owner.py | KoocieBoi/Sunny | 3c30cc57a891c3baab8c51d75ad1bce4bb8a4472 | [
"MIT"
] | null | null | null | cogs/owner.py | KoocieBoi/Sunny | 3c30cc57a891c3baab8c51d75ad1bce4bb8a4472 | [
"MIT"
] | null | null | null | from discord.ext import commands
import io, os
from shutil import copy2
import textwrap
import traceback
from contextlib import redirect_stdout
from git import Repo
class OwnerCog:
def __init__(self, bot):
self.bot = bot
self._last_result = None
def cleanup_code(self, content):
"""Automatically removes code blocks from the code."""
if content.startswith('```') and content.endswith('```'):
return '\n'.join(content.split('\n')[1:-1])
return content.strip('` \n')
@commands.command(name='update', hidden=True)
@commands.is_owner()
async def git_update(self, ctx):
"""Downloads the latest version
of the bot from GitHub then runs it"""
await ctx.send("Updating, please wait...")
cwp = os.getcwd()
cwd = cwp.split(os.sep)[-1]
if cwd == "github":
Repo.clone_from("git://github.com/NiceAesth/Sunny.git", cwp)
os.system('python run.py')
else:
Repo.clone_from("git://github.com/NiceAesth/Sunny.git", "github")
copy2('config.py', f'{cwp}/github')
copy2('application.yml', f'{cwp}/github')
os.system('python github/run.py')
await self.bot.logout()
@commands.command(name='load', hidden=True)
@commands.is_owner()
async def cog_load(self, ctx, *, cog: str):
"""Command which Loads a Module.
Remember to use dot path. e.g: cogs.owner"""
try:
self.bot.load_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(name='unload', hidden=True)
@commands.is_owner()
async def cog_unload(self, ctx, *, cog: str):
"""Command which Unloads a Module.
Remember to use dot path. e.g: cogs.owner"""
try:
self.bot.unload_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(name='reload', hidden=True)
@commands.is_owner()
async def cog_reload(self, ctx, *, cog: str):
"""Command which Reloads a Module.
Remember to use dot path. e.g: cogs.owner"""
try:
self.bot.unload_extension(cog)
self.bot.load_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(hidden=True, name='eval')
@commands.is_owner()
async def _eval(self, ctx, *, body: str):
"""Evaluates a code"""
env = {
'bot': self.bot,
'ctx': ctx,
'channel': ctx.channel,
'author': ctx.author,
'guild': ctx.guild,
'message': ctx.message,
'_': self._last_result
}
env.update(globals())
body = self.cleanup_code(body)
stdout = io.StringIO()
to_compile = f'async def func():\n{textwrap.indent(body, " ")}'
try:
exec(to_compile, env)
except Exception as e:
return await ctx.send(f'```py\n{e.__class__.__name__}: {e}\n```')
func = env['func']
try:
with redirect_stdout(stdout):
ret = await func()
except Exception as e:
value = stdout.getvalue()
await ctx.send(f'```py\n{value}{traceback.format_exc()}\n```')
else:
value = stdout.getvalue()
try:
await ctx.message.add_reaction('\u2705')
except:
pass
if ret is None:
if value:
await ctx.send(f'```py\n{value}\n```')
else:
self._last_result = ret
await ctx.send(f'```py\n{value}{ret}\n```')
def setup(bot):
bot.add_cog(OwnerCog(bot)) | 30.712121 | 77 | 0.538481 | from discord.ext import commands
import io, os
from shutil import copy2
import textwrap
import traceback
from contextlib import redirect_stdout
from git import Repo
class OwnerCog:
def __init__(self, bot):
self.bot = bot
self._last_result = None
def cleanup_code(self, content):
if content.startswith('```') and content.endswith('```'):
return '\n'.join(content.split('\n')[1:-1])
return content.strip('` \n')
@commands.command(name='update', hidden=True)
@commands.is_owner()
async def git_update(self, ctx):
await ctx.send("Updating, please wait...")
cwp = os.getcwd()
cwd = cwp.split(os.sep)[-1]
if cwd == "github":
Repo.clone_from("git://github.com/NiceAesth/Sunny.git", cwp)
os.system('python run.py')
else:
Repo.clone_from("git://github.com/NiceAesth/Sunny.git", "github")
copy2('config.py', f'{cwp}/github')
copy2('application.yml', f'{cwp}/github')
os.system('python github/run.py')
await self.bot.logout()
@commands.command(name='load', hidden=True)
@commands.is_owner()
async def cog_load(self, ctx, *, cog: str):
try:
self.bot.load_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(name='unload', hidden=True)
@commands.is_owner()
async def cog_unload(self, ctx, *, cog: str):
try:
self.bot.unload_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(name='reload', hidden=True)
@commands.is_owner()
async def cog_reload(self, ctx, *, cog: str):
try:
self.bot.unload_extension(cog)
self.bot.load_extension(cog)
except Exception as e:
await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')
else:
await ctx.send('**`SUCCESS`**')
@commands.command(hidden=True, name='eval')
@commands.is_owner()
async def _eval(self, ctx, *, body: str):
env = {
'bot': self.bot,
'ctx': ctx,
'channel': ctx.channel,
'author': ctx.author,
'guild': ctx.guild,
'message': ctx.message,
'_': self._last_result
}
env.update(globals())
body = self.cleanup_code(body)
stdout = io.StringIO()
to_compile = f'async def func():\n{textwrap.indent(body, " ")}'
try:
exec(to_compile, env)
except Exception as e:
return await ctx.send(f'```py\n{e.__class__.__name__}: {e}\n```')
func = env['func']
try:
with redirect_stdout(stdout):
ret = await func()
except Exception as e:
value = stdout.getvalue()
await ctx.send(f'```py\n{value}{traceback.format_exc()}\n```')
else:
value = stdout.getvalue()
try:
await ctx.message.add_reaction('\u2705')
except:
pass
if ret is None:
if value:
await ctx.send(f'```py\n{value}\n```')
else:
self._last_result = ret
await ctx.send(f'```py\n{value}{ret}\n```')
def setup(bot):
bot.add_cog(OwnerCog(bot)) | true | true |
1c3d3e1491d34f0020424e1ed87b433c58a91b54 | 548 | py | Python | website-retina/publishconf.py | gelana/website-content | 11db61103d7d944ac321d5843580df2551fc2e2e | [
"MIT"
] | 5 | 2019-04-09T07:47:33.000Z | 2021-11-23T09:25:35.000Z | website-retina/publishconf.py | gelana/website-content | 11db61103d7d944ac321d5843580df2551fc2e2e | [
"MIT"
] | 286 | 2019-03-07T09:22:34.000Z | 2022-03-30T14:57:01.000Z | website-retina/publishconf.py | gelana/website-content | 11db61103d7d944ac321d5843580df2551fc2e2e | [
"MIT"
] | 74 | 2019-04-11T08:25:55.000Z | 2022-03-31T09:44:55.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.a-eyeresearch.nl/'
IMGURL = 'https://assets.diagnijmegen.nl'
RELATIVE_URLS = False
FEED_DOMAIN = SITEURL
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
DELETE_OUTPUT_DIRECTORY = True
| 21.92 | 53 | 0.766423 |
from __future__ import unicode_literals
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.a-eyeresearch.nl/'
IMGURL = 'https://assets.diagnijmegen.nl'
RELATIVE_URLS = False
FEED_DOMAIN = SITEURL
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
DELETE_OUTPUT_DIRECTORY = True
| true | true |
1c3d3e9553c20ffc5112fa2d1e07d041135aefb1 | 1,520 | py | Python | courses/urls.py | samshultz2/e-learning-platform | 4233276c4debe3e36bcc7e0ff8c082ad4ce48082 | [
"BSD-2-Clause"
] | 1 | 2019-01-12T21:31:19.000Z | 2019-01-12T21:31:19.000Z | courses/urls.py | samshultz/e-learning-platform | 4233276c4debe3e36bcc7e0ff8c082ad4ce48082 | [
"BSD-2-Clause"
] | null | null | null | courses/urls.py | samshultz/e-learning-platform | 4233276c4debe3e36bcc7e0ff8c082ad4ce48082 | [
"BSD-2-Clause"
] | null | null | null | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^mine/$', views.ManageCourseListView.as_view(),
name='manage_course_list'),
url(r'^create/$', views.CourseCreateView.as_view(), name='course_create'),
url(r'^(?P<pk>\d+)/edit/$', views.CourseUpdateView.as_view(),
name='course_edit'),
url(r'^(?P<pk>\d+)/delete/$', views.CourseDeleteView.as_view(),
name='course_delete'),
url(r'^(?P<pk>\d+)/module/$', views.CourseModuleUpdateView.as_view(),
name='course_module_update'),
url(r'^module/(?P<module_id>\d+)/content/(?P<model_name>\w+)/create/$',
views.ContentCreateUpdateView.as_view(),
name="module_content_create"),
url(r'^module/(?P<module_id>\d+)/content/(?P<model_name>\w+)/(?P<id>\d+)/$',
views.ContentCreateUpdateView.as_view(), name="module_content_update"),
url(r'^content/(?P<id>\d+)/delete/$',
views.ContentDeleteView.as_view(),
name='module_content_delete'),
url(r'^module/(?P<module_id>\d+)/$',
views.ModuleContentListView.as_view(),
name='module_content_list'),
url(r'^module/order/$',
views.ModuleOrderView.as_view(), name="module_order"),
url(r'^content/order/$',
views.ContentOrderView.as_view(), name="content_order"),
url(r'^subject/(?P<subject>[\w-]+)/$',
views.CourseListView.as_view(), name='course_list_subject'),
url(r'^(?P<slug>[\w-]+)/$',
views.CourseDetailView.as_view(), name='course_detail'),
] | 44.705882 | 80 | 0.632237 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^mine/$', views.ManageCourseListView.as_view(),
name='manage_course_list'),
url(r'^create/$', views.CourseCreateView.as_view(), name='course_create'),
url(r'^(?P<pk>\d+)/edit/$', views.CourseUpdateView.as_view(),
name='course_edit'),
url(r'^(?P<pk>\d+)/delete/$', views.CourseDeleteView.as_view(),
name='course_delete'),
url(r'^(?P<pk>\d+)/module/$', views.CourseModuleUpdateView.as_view(),
name='course_module_update'),
url(r'^module/(?P<module_id>\d+)/content/(?P<model_name>\w+)/create/$',
views.ContentCreateUpdateView.as_view(),
name="module_content_create"),
url(r'^module/(?P<module_id>\d+)/content/(?P<model_name>\w+)/(?P<id>\d+)/$',
views.ContentCreateUpdateView.as_view(), name="module_content_update"),
url(r'^content/(?P<id>\d+)/delete/$',
views.ContentDeleteView.as_view(),
name='module_content_delete'),
url(r'^module/(?P<module_id>\d+)/$',
views.ModuleContentListView.as_view(),
name='module_content_list'),
url(r'^module/order/$',
views.ModuleOrderView.as_view(), name="module_order"),
url(r'^content/order/$',
views.ContentOrderView.as_view(), name="content_order"),
url(r'^subject/(?P<subject>[\w-]+)/$',
views.CourseListView.as_view(), name='course_list_subject'),
url(r'^(?P<slug>[\w-]+)/$',
views.CourseDetailView.as_view(), name='course_detail'),
] | true | true |
1c3d3ed19a5cff1ba1fa08391d67e115dfb1dfa9 | 18,019 | py | Python | aws_topology/stackstate_checks/aws_topology/aws_topology.py | Mathieu-Dirkx/stackstate-agent-integrations | 76bb486f2560e1cdcd5aabb65ef8a19ff36297c1 | [
"BSD-3-Clause"
] | null | null | null | aws_topology/stackstate_checks/aws_topology/aws_topology.py | Mathieu-Dirkx/stackstate-agent-integrations | 76bb486f2560e1cdcd5aabb65ef8a19ff36297c1 | [
"BSD-3-Clause"
] | null | null | null | aws_topology/stackstate_checks/aws_topology/aws_topology.py | Mathieu-Dirkx/stackstate-agent-integrations | 76bb486f2560e1cdcd5aabb65ef8a19ff36297c1 | [
"BSD-3-Clause"
] | null | null | null | # (C) StackState 2021
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .cloudtrail import CloudtrailCollector
from .flowlogs import FlowLogCollector
import logging
import boto3
import time
import traceback
from botocore.exceptions import ClientError
from schematics import Model
from schematics.types import StringType, ListType, DictType, IntType
from botocore.config import Config
from stackstate_checks.base import AgentCheck, TopologyInstance
from .resources import ResourceRegistry, type_arn, RegisteredResourceCollector
from .utils import location_info, correct_tags, capitalize_keys, seconds_ago
from datetime import datetime, timedelta
import pytz
import concurrent.futures
import threading
DEFAULT_BOTO3_RETRIES_COUNT = 50
DEFAULT_BOTO3_CONFIG = Config(
retries=dict(
max_attempts=DEFAULT_BOTO3_RETRIES_COUNT,
)
)
DEFAULT_COLLECTION_INTERVAL = 60
class InitConfig(Model):
aws_access_key_id = StringType(required=True)
aws_secret_access_key = StringType(required=True)
external_id = StringType(required=True)
full_run_interval = IntType(default=3600)
class InstanceInfo(Model):
role_arn = StringType(required=True)
regions = ListType(StringType)
tags = ListType(StringType, default=[])
arns = DictType(StringType, default={})
apis_to_run = ListType(StringType)
log_bucket_name = StringType()
flowlog_bucket_name = StringType()
class AwsTopologyCheck(AgentCheck):
"""Collects AWS Topology and sends them to STS."""
INSTANCE_TYPE = "aws-v2" # TODO should we add _topology?
SERVICE_CHECK_CONNECT_NAME = "aws_topology.can_connect"
SERVICE_CHECK_EXECUTE_NAME = "aws_topology.can_execute"
SERVICE_CHECK_UPDATE_NAME = "aws_topology.can_update"
INSTANCE_SCHEMA = InstanceInfo
@staticmethod
def get_account_id(instance_info):
return instance_info.role_arn.split(":")[4]
def get_instance_key(self, instance_info):
return TopologyInstance(self.INSTANCE_TYPE, str(self.get_account_id(instance_info)))
def must_run_full(self, interval):
self.log.info('Checking if full run is necessary')
if not hasattr(self, 'last_full_topology'):
# Create empty state
self.log.info(' Result => YES (first run)')
return True
secs = seconds_ago(self.last_full_topology)
self.log.info(' Result => {} (Last run was {} seconds ago, interval is set to {})'.format(
"YES" if secs > interval else "NO",
int(secs),
interval)
)
return secs > interval
def check(self, instance_info):
try:
init_config = InitConfig(self.init_config)
init_config.validate()
aws_client = AwsClient(init_config)
self.service_check(self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS connection failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.CRITICAL, message=msg, tags=instance_info.tags
)
return
self.delete_ids = []
self.components_seen = set()
if self.must_run_full(init_config.full_run_interval):
try:
self.log.info('Starting FULL topology scan')
self.last_full_topology = datetime.utcnow().replace(tzinfo=pytz.utc)
self.get_topology(instance_info, aws_client)
self.log.info('Finished FULL topology scan (no exceptions)')
self.service_check(self.SERVICE_CHECK_EXECUTE_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS topology collection failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_EXECUTE_NAME, AgentCheck.WARNING, message=msg, tags=instance_info.tags
)
try:
self.get_topology_update(instance_info, aws_client)
self.get_flowlog_update(instance_info, aws_client)
self.service_check(self.SERVICE_CHECK_UPDATE_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS topology update failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_UPDATE_NAME, AgentCheck.WARNING, message=msg, tags=instance_info.tags
)
def get_topology(self, instance_info, aws_client):
"""Gets AWS Topology returns them in Agent format."""
self.start_snapshot()
errors = []
agent_proxy = AgentProxy(agent=self, role_name=instance_info.role_arn, log=self.log)
futures = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
registry = ResourceRegistry.get_registry()["regional" if region != "global" else "global"]
keys = (
[key for key in registry.keys()]
if instance_info.apis_to_run is None
else [api.split("|")[0] for api in instance_info.apis_to_run]
)
for api in keys:
if not registry.get(api):
continue
client = None
location = location_info(self.get_account_id(instance_info), session.region_name)
filter = None
if instance_info.apis_to_run is not None:
for to_run in instance_info.apis_to_run:
if (api + "|") in to_run:
filter = to_run.split("|")[1]
if client is None:
client = session.client(api) if api != "noclient" else None
processor = registry[api](location.clone(), client, agent_proxy)
self.log.debug("Starting account {} API {} for region {}".format(
self.get_account_id(instance_info),
api,
session.region_name
))
futures[executor.submit(processor.process_all, filter)] = {
"location": location.clone(),
"api": api,
"processor": processor,
}
# processor.process_all(filter=filter)
# self.delete_ids += processor.get_delete_ids()
for future in concurrent.futures.as_completed(futures):
spec = futures[future]
try:
future.result()
self.log.debug("Finished account {} API {} for region {}".format(
self.get_account_id(instance_info),
spec["api"],
spec["location"].Location.AwsRegion
))
except Exception as e:
event = {
"timestamp": int(time.time()),
"event_type": "aws_agent_check_error",
"msg_title": e.__class__.__name__ + " in api " + spec["api"],
"msg_text": str(e),
"tags": [
"aws_region:" + spec["location"].Location.AwsRegion,
"account_id:" + spec["location"].Location.AwsAccount,
"process:" + spec["api"],
],
}
self.event(event)
errors.append("API %s ended with exception: %s %s" % (spec["api"], str(e), traceback.format_exc()))
# TODO this should be for tests, in production these relations should not be sent out
self.log.info('Finalize FULL scan (#components = {})'.format(len(agent_proxy.components_seen)))
agent_proxy.finalize_account_topology()
self.components_seen = agent_proxy.components_seen
self.delete_ids += agent_proxy.delete_ids
if len(errors) > 0:
self.log.warning("Not sending 'stop_snapshot' because one or more APIs returned with exceptions")
raise Exception("get_topology gave following exceptions: %s" % ", ".join(errors))
self.stop_snapshot()
def get_topology_update(self, instance_info, aws_client):
not_before = self.last_full_topology
agent_proxy = AgentProxy(self, instance_info.role_arn, self.log)
listen_for = ResourceRegistry.CLOUDTRAIL
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
events_per_api = {}
collector = CloudtrailCollector(
bucket_name=instance_info.log_bucket_name,
account_id=self.get_account_id(instance_info),
session=session,
agent=agent_proxy,
log=self.log
)
# collect the events (ordering is most recent event first)
for event in collector.get_messages(not_before):
msgs = listen_for.get(event["eventSource"])
if not msgs and event.get("apiVersion"):
msgs = listen_for.get(event["apiVersion"] + "-" + event["eventSource"])
if isinstance(msgs, dict):
event_name = event.get("eventName")
event_class = msgs.get(event_name)
if event_class:
if isinstance(event_class, bool):
agent_proxy.warning("should interpret: " + event["eventName"] + "-" + event["eventSource"])
elif issubclass(event_class, RegisteredResourceCollector):
# the new way of event handling
events = events_per_api.get(event_class.API)
if events:
events.append(event)
else:
events_per_api[event_class.API] = [event]
# TODO if full snapshot ran just before we can use components_seen here too
# operation type C=create D=delete U=update E=event
# component seen: YES
# C -> skip
# U -> timing, do is safe
# D -> timing!, skip will leave component in for hour
# E -> do
# component seen: NO
# C -> try
# U -> try
# D -> skip
# E -> timing, skip (!could have create before)
location = location_info(self.get_account_id(instance_info), session.region_name)
registry = ResourceRegistry.get_registry()["regional" if region != "global" else "global"]
for api in events_per_api:
client = session.client(api) if api != "noclient" else None
resources_seen = set()
processor = registry[api](location.clone(), client, agent_proxy)
for event in events_per_api[api]:
processor.process_cloudtrail_event(event, resources_seen)
self.delete_ids += agent_proxy.delete_ids
def get_flowlog_update(self, instance_info, aws_client):
not_before = self.last_full_topology - timedelta(seconds=60*60)
agent_proxy = AgentProxy(self, instance_info.role_arn, self.log)
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
location = location_info(self.get_account_id(instance_info), session.region_name)
collector = FlowLogCollector(
bucket_name=instance_info.flowlog_bucket_name,
account_id=self.get_account_id(instance_info),
session=session,
location_info=location,
agent=agent_proxy,
log=self.log
)
collector.read_flow_log(not_before)
class AgentProxy(object):
def __init__(self, agent, role_name, log):
self.agent = agent
self.delete_ids = []
self.components_seen = set()
self.parked_relations = []
self.role_name = role_name
self.warnings = {}
self.lock = threading.Lock()
self.log = log
def component(self, location, id, type, data, streams=None, checks=None):
self.components_seen.add(id)
data.update(location.to_primitive())
self.agent.component(id, type, correct_tags(capitalize_keys(data)), streams, checks)
relations_to_send = []
with self.lock:
for i in range(len(self.parked_relations) - 1, -1, -1):
relation = self.parked_relations[i]
if relation["source_id"] == id and relation["target_id"] in self.components_seen:
relations_to_send.append(relation)
self.parked_relations.remove(relation)
if relation["target_id"] == id and relation["source_id"] in self.components_seen:
self.parked_relations.remove(relation)
relations_to_send.append(relation)
for relation in relations_to_send:
self.agent.relation(
relation["source_id"], relation["target_id"], relation["type"],
relation["data"], relation['streams'], relation['checks']
)
def relation(self, source_id, target_id, type, data, streams=None, checks=None):
if source_id in self.components_seen and target_id in self.components_seen:
self.agent.relation(source_id, target_id, type, data, streams, checks)
else:
self.parked_relations.append(
{"type": type, "source_id": source_id, "target_id": target_id,
"data": data, 'streams': streams, 'checks': checks}
)
def finalize_account_topology(self):
for relation in self.parked_relations:
self.agent.relation(relation["source_id"], relation["target_id"], relation["type"], relation["data"])
for warning in self.warnings:
self.agent.warning(warning + " was encountered {} time(s).".format(self.warnings[warning]))
def event(self, event):
self.agent.event(event)
def gauge(self, name, value, tags=None, hostname=None, device_name=None):
self.agent.log.debug('gauge %s: %s %s', name, value, tags)
self.agent.gauge(name, value, tags, hostname, device_name)
def delete(self, id):
self.delete_ids.append(id)
def warning(self, error, **kwargs):
# TODO make a list of max 5 of the resources impacted
warning = self.warnings.get(error, 0) + 1
self.warnings[error] = warning
@staticmethod
def create_arn(type, location, resource_id=""):
func = type_arn.get(type)
if func:
return func(
region=location.Location.AwsRegion, account_id=location.Location.AwsAccount, resource_id=resource_id
)
return "UNSUPPORTED_ARN-" + type + "-" + resource_id
def create_security_group_relations(self, resource_id, resource_data, security_group_field="SecurityGroups"):
if resource_data.get(security_group_field):
for security_group_id in resource_data[security_group_field]:
self.relation(resource_id, security_group_id, "uses-service", {})
class AwsClient:
def __init__(self, config):
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.INFO)
self.external_id = config.external_id
self.aws_access_key_id = config.aws_access_key_id
self.aws_secret_access_key = config.aws_secret_access_key
if self.aws_secret_access_key and self.aws_access_key_id:
self.sts_client = boto3.client(
"sts",
config=DEFAULT_BOTO3_CONFIG,
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
)
else:
# Rely on credential provider chain to find credentials
try:
self.sts_client = boto3.client("sts", config=DEFAULT_BOTO3_CONFIG)
except Exception as e:
raise Exception("No credentials found, the following exception was given: %s" % e)
def get_session(self, role_arn, region):
try:
# This should fail as it means it was able to successfully use the role without an external ID
role = self.sts_client.assume_role(RoleArn=role_arn, RoleSessionName="sts-agent-id-test")
# This override should not be (publicly) documented
if self.external_id != "disable_external_id_this_is_unsafe":
raise Exception(
"No external ID has been set for this role." + "For security reasons, please set the external ID."
)
except ClientError as error:
if error.response["Error"]["Code"] == "AccessDenied":
try:
role = self.sts_client.assume_role(
RoleArn=role_arn, RoleSessionName="sts-agent-check-%s" % region, ExternalId=self.external_id
)
except Exception as error:
raise Exception("Unable to assume role %s. Error: %s" % (role_arn, error))
else:
raise error
return boto3.Session(
region_name=region if region != "global" else "us-east-1",
aws_access_key_id=role["Credentials"]["AccessKeyId"],
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
aws_session_token=role["Credentials"]["SessionToken"],
)
| 45.387909 | 119 | 0.599145 |
from .cloudtrail import CloudtrailCollector
from .flowlogs import FlowLogCollector
import logging
import boto3
import time
import traceback
from botocore.exceptions import ClientError
from schematics import Model
from schematics.types import StringType, ListType, DictType, IntType
from botocore.config import Config
from stackstate_checks.base import AgentCheck, TopologyInstance
from .resources import ResourceRegistry, type_arn, RegisteredResourceCollector
from .utils import location_info, correct_tags, capitalize_keys, seconds_ago
from datetime import datetime, timedelta
import pytz
import concurrent.futures
import threading
DEFAULT_BOTO3_RETRIES_COUNT = 50
DEFAULT_BOTO3_CONFIG = Config(
retries=dict(
max_attempts=DEFAULT_BOTO3_RETRIES_COUNT,
)
)
DEFAULT_COLLECTION_INTERVAL = 60
class InitConfig(Model):
aws_access_key_id = StringType(required=True)
aws_secret_access_key = StringType(required=True)
external_id = StringType(required=True)
full_run_interval = IntType(default=3600)
class InstanceInfo(Model):
role_arn = StringType(required=True)
regions = ListType(StringType)
tags = ListType(StringType, default=[])
arns = DictType(StringType, default={})
apis_to_run = ListType(StringType)
log_bucket_name = StringType()
flowlog_bucket_name = StringType()
class AwsTopologyCheck(AgentCheck):
INSTANCE_TYPE = "aws-v2"
SERVICE_CHECK_CONNECT_NAME = "aws_topology.can_connect"
SERVICE_CHECK_EXECUTE_NAME = "aws_topology.can_execute"
SERVICE_CHECK_UPDATE_NAME = "aws_topology.can_update"
INSTANCE_SCHEMA = InstanceInfo
@staticmethod
def get_account_id(instance_info):
return instance_info.role_arn.split(":")[4]
def get_instance_key(self, instance_info):
return TopologyInstance(self.INSTANCE_TYPE, str(self.get_account_id(instance_info)))
def must_run_full(self, interval):
self.log.info('Checking if full run is necessary')
if not hasattr(self, 'last_full_topology'):
self.log.info(' Result => YES (first run)')
return True
secs = seconds_ago(self.last_full_topology)
self.log.info(' Result => {} (Last run was {} seconds ago, interval is set to {})'.format(
"YES" if secs > interval else "NO",
int(secs),
interval)
)
return secs > interval
def check(self, instance_info):
try:
init_config = InitConfig(self.init_config)
init_config.validate()
aws_client = AwsClient(init_config)
self.service_check(self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS connection failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.CRITICAL, message=msg, tags=instance_info.tags
)
return
self.delete_ids = []
self.components_seen = set()
if self.must_run_full(init_config.full_run_interval):
try:
self.log.info('Starting FULL topology scan')
self.last_full_topology = datetime.utcnow().replace(tzinfo=pytz.utc)
self.get_topology(instance_info, aws_client)
self.log.info('Finished FULL topology scan (no exceptions)')
self.service_check(self.SERVICE_CHECK_EXECUTE_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS topology collection failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_EXECUTE_NAME, AgentCheck.WARNING, message=msg, tags=instance_info.tags
)
try:
self.get_topology_update(instance_info, aws_client)
self.get_flowlog_update(instance_info, aws_client)
self.service_check(self.SERVICE_CHECK_UPDATE_NAME, AgentCheck.OK, tags=instance_info.tags)
except Exception as e:
msg = "AWS topology update failed: {}".format(e)
self.log.error(msg)
self.service_check(
self.SERVICE_CHECK_UPDATE_NAME, AgentCheck.WARNING, message=msg, tags=instance_info.tags
)
def get_topology(self, instance_info, aws_client):
self.start_snapshot()
errors = []
agent_proxy = AgentProxy(agent=self, role_name=instance_info.role_arn, log=self.log)
futures = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
registry = ResourceRegistry.get_registry()["regional" if region != "global" else "global"]
keys = (
[key for key in registry.keys()]
if instance_info.apis_to_run is None
else [api.split("|")[0] for api in instance_info.apis_to_run]
)
for api in keys:
if not registry.get(api):
continue
client = None
location = location_info(self.get_account_id(instance_info), session.region_name)
filter = None
if instance_info.apis_to_run is not None:
for to_run in instance_info.apis_to_run:
if (api + "|") in to_run:
filter = to_run.split("|")[1]
if client is None:
client = session.client(api) if api != "noclient" else None
processor = registry[api](location.clone(), client, agent_proxy)
self.log.debug("Starting account {} API {} for region {}".format(
self.get_account_id(instance_info),
api,
session.region_name
))
futures[executor.submit(processor.process_all, filter)] = {
"location": location.clone(),
"api": api,
"processor": processor,
}
for future in concurrent.futures.as_completed(futures):
spec = futures[future]
try:
future.result()
self.log.debug("Finished account {} API {} for region {}".format(
self.get_account_id(instance_info),
spec["api"],
spec["location"].Location.AwsRegion
))
except Exception as e:
event = {
"timestamp": int(time.time()),
"event_type": "aws_agent_check_error",
"msg_title": e.__class__.__name__ + " in api " + spec["api"],
"msg_text": str(e),
"tags": [
"aws_region:" + spec["location"].Location.AwsRegion,
"account_id:" + spec["location"].Location.AwsAccount,
"process:" + spec["api"],
],
}
self.event(event)
errors.append("API %s ended with exception: %s %s" % (spec["api"], str(e), traceback.format_exc()))
self.log.info('Finalize FULL scan (#components = {})'.format(len(agent_proxy.components_seen)))
agent_proxy.finalize_account_topology()
self.components_seen = agent_proxy.components_seen
self.delete_ids += agent_proxy.delete_ids
if len(errors) > 0:
self.log.warning("Not sending 'stop_snapshot' because one or more APIs returned with exceptions")
raise Exception("get_topology gave following exceptions: %s" % ", ".join(errors))
self.stop_snapshot()
def get_topology_update(self, instance_info, aws_client):
not_before = self.last_full_topology
agent_proxy = AgentProxy(self, instance_info.role_arn, self.log)
listen_for = ResourceRegistry.CLOUDTRAIL
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
events_per_api = {}
collector = CloudtrailCollector(
bucket_name=instance_info.log_bucket_name,
account_id=self.get_account_id(instance_info),
session=session,
agent=agent_proxy,
log=self.log
)
for event in collector.get_messages(not_before):
msgs = listen_for.get(event["eventSource"])
if not msgs and event.get("apiVersion"):
msgs = listen_for.get(event["apiVersion"] + "-" + event["eventSource"])
if isinstance(msgs, dict):
event_name = event.get("eventName")
event_class = msgs.get(event_name)
if event_class:
if isinstance(event_class, bool):
agent_proxy.warning("should interpret: " + event["eventName"] + "-" + event["eventSource"])
elif issubclass(event_class, RegisteredResourceCollector):
events = events_per_api.get(event_class.API)
if events:
events.append(event)
else:
events_per_api[event_class.API] = [event]
location = location_info(self.get_account_id(instance_info), session.region_name)
registry = ResourceRegistry.get_registry()["regional" if region != "global" else "global"]
for api in events_per_api:
client = session.client(api) if api != "noclient" else None
resources_seen = set()
processor = registry[api](location.clone(), client, agent_proxy)
for event in events_per_api[api]:
processor.process_cloudtrail_event(event, resources_seen)
self.delete_ids += agent_proxy.delete_ids
def get_flowlog_update(self, instance_info, aws_client):
not_before = self.last_full_topology - timedelta(seconds=60*60)
agent_proxy = AgentProxy(self, instance_info.role_arn, self.log)
for region in instance_info.regions:
session = aws_client.get_session(instance_info.role_arn, region)
location = location_info(self.get_account_id(instance_info), session.region_name)
collector = FlowLogCollector(
bucket_name=instance_info.flowlog_bucket_name,
account_id=self.get_account_id(instance_info),
session=session,
location_info=location,
agent=agent_proxy,
log=self.log
)
collector.read_flow_log(not_before)
class AgentProxy(object):
def __init__(self, agent, role_name, log):
self.agent = agent
self.delete_ids = []
self.components_seen = set()
self.parked_relations = []
self.role_name = role_name
self.warnings = {}
self.lock = threading.Lock()
self.log = log
def component(self, location, id, type, data, streams=None, checks=None):
self.components_seen.add(id)
data.update(location.to_primitive())
self.agent.component(id, type, correct_tags(capitalize_keys(data)), streams, checks)
relations_to_send = []
with self.lock:
for i in range(len(self.parked_relations) - 1, -1, -1):
relation = self.parked_relations[i]
if relation["source_id"] == id and relation["target_id"] in self.components_seen:
relations_to_send.append(relation)
self.parked_relations.remove(relation)
if relation["target_id"] == id and relation["source_id"] in self.components_seen:
self.parked_relations.remove(relation)
relations_to_send.append(relation)
for relation in relations_to_send:
self.agent.relation(
relation["source_id"], relation["target_id"], relation["type"],
relation["data"], relation['streams'], relation['checks']
)
def relation(self, source_id, target_id, type, data, streams=None, checks=None):
if source_id in self.components_seen and target_id in self.components_seen:
self.agent.relation(source_id, target_id, type, data, streams, checks)
else:
self.parked_relations.append(
{"type": type, "source_id": source_id, "target_id": target_id,
"data": data, 'streams': streams, 'checks': checks}
)
def finalize_account_topology(self):
for relation in self.parked_relations:
self.agent.relation(relation["source_id"], relation["target_id"], relation["type"], relation["data"])
for warning in self.warnings:
self.agent.warning(warning + " was encountered {} time(s).".format(self.warnings[warning]))
def event(self, event):
self.agent.event(event)
def gauge(self, name, value, tags=None, hostname=None, device_name=None):
self.agent.log.debug('gauge %s: %s %s', name, value, tags)
self.agent.gauge(name, value, tags, hostname, device_name)
def delete(self, id):
self.delete_ids.append(id)
def warning(self, error, **kwargs):
warning = self.warnings.get(error, 0) + 1
self.warnings[error] = warning
@staticmethod
def create_arn(type, location, resource_id=""):
func = type_arn.get(type)
if func:
return func(
region=location.Location.AwsRegion, account_id=location.Location.AwsAccount, resource_id=resource_id
)
return "UNSUPPORTED_ARN-" + type + "-" + resource_id
def create_security_group_relations(self, resource_id, resource_data, security_group_field="SecurityGroups"):
if resource_data.get(security_group_field):
for security_group_id in resource_data[security_group_field]:
self.relation(resource_id, security_group_id, "uses-service", {})
class AwsClient:
def __init__(self, config):
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.INFO)
self.external_id = config.external_id
self.aws_access_key_id = config.aws_access_key_id
self.aws_secret_access_key = config.aws_secret_access_key
if self.aws_secret_access_key and self.aws_access_key_id:
self.sts_client = boto3.client(
"sts",
config=DEFAULT_BOTO3_CONFIG,
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
)
else:
try:
self.sts_client = boto3.client("sts", config=DEFAULT_BOTO3_CONFIG)
except Exception as e:
raise Exception("No credentials found, the following exception was given: %s" % e)
def get_session(self, role_arn, region):
try:
role = self.sts_client.assume_role(RoleArn=role_arn, RoleSessionName="sts-agent-id-test")
if self.external_id != "disable_external_id_this_is_unsafe":
raise Exception(
"No external ID has been set for this role." + "For security reasons, please set the external ID."
)
except ClientError as error:
if error.response["Error"]["Code"] == "AccessDenied":
try:
role = self.sts_client.assume_role(
RoleArn=role_arn, RoleSessionName="sts-agent-check-%s" % region, ExternalId=self.external_id
)
except Exception as error:
raise Exception("Unable to assume role %s. Error: %s" % (role_arn, error))
else:
raise error
return boto3.Session(
region_name=region if region != "global" else "us-east-1",
aws_access_key_id=role["Credentials"]["AccessKeyId"],
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
aws_session_token=role["Credentials"]["SessionToken"],
)
| true | true |
1c3d3f7454dcabf5bdf008e318e30edb1446ad41 | 3,044 | py | Python | docs/source/conf.py | jonathanchukinas/fuzzytable | 3d574047c3a8b0c28ab6a00436526c92ca1ea6d2 | [
"MIT"
] | 1 | 2019-11-22T21:16:34.000Z | 2019-11-22T21:16:34.000Z | docs/source/conf.py | jonathanchukinas/fuzzytable | 3d574047c3a8b0c28ab6a00436526c92ca1ea6d2 | [
"MIT"
] | 3 | 2019-11-22T13:16:44.000Z | 2019-11-26T19:49:39.000Z | docs/source/conf.py | jonathanchukinas/fuzzytable | 3d574047c3a8b0c28ab6a00436526c92ca1ea6d2 | [
"MIT"
] | null | null | null | # Configuration file for the Sphinx documentation builder.
import os
import sys
# import sphinx_bootstrap_theme
# import sphinx_rtd_theme
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# sys.path.insert(0, os.path.abspath('.'))
project_path = os.path.abspath('../..')
print('project path:', project_path)
sys.path.insert(0, project_path)
# -- Project information -----------------------------------------------------
project = 'fuzzytable'
copyright = '2019, Jonathan Chukinas'
author = 'Jonathan Chukinas'
release = '0.19' # The full version, including alpha/beta/rc tags
# -- General configuration ---------------------------------------------------
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'm2r',
# 'sphinx_rtd_theme',
'sphinx.ext.todo',
'autodocsumm', # pip install autodocsumm
]
autodoc_default_options = {
'autosummary': True,
}
master_doc = 'index'
# https://github.com/readthedocs/readthedocs.org/issues/2569
# b/c RTD throws an error and the about page says this is the solution to it.
# Add any paths that contain templates here, relative to this directory.
templates_path = [] # ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# html_theme = 'sphinx_rtd_theme'
html_theme = 'alabaster'
html_theme_options = {
'logo': 'logo.png',
# 'logo': 'https://raw.githubusercontent.com/jonathanchukinas/fuzzytable/master/docs/source/images/logo.png',
'github_user': 'jonathanchukinas',
'github_repo': 'fuzzytable',
'description': 'Read tables from messy spreadsheets'
}
# html_theme = 'bootstrap'
# html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# https://github.com/readthedocs/readthedocs.org/issues/2569
# b/c RTD throws an error and the about page says this is the solution to it.
# -- Extension configuration -------------------------------------------------
# Napoleon settings
napoleon_google_docstring = True
# napoleon_numpy_docstring = True
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# TODOs
todo_include_todos = True
| 34.590909 | 113 | 0.693824 |
import os
import sys
project_path = os.path.abspath('../..')
print('project path:', project_path)
sys.path.insert(0, project_path)
project = 'fuzzytable'
copyright = '2019, Jonathan Chukinas'
author = 'Jonathan Chukinas'
release = '0.19'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'm2r',
'sphinx.ext.todo',
'autodocsumm',
]
autodoc_default_options = {
'autosummary': True,
}
master_doc = 'index'
templates_path = []
exclude_patterns = []
html_theme = 'alabaster'
html_theme_options = {
'logo': 'logo.png',
'github_user': 'jonathanchukinas',
'github_repo': 'fuzzytable',
'description': 'Read tables from messy spreadsheets'
}
html_static_path = ['_static']
napoleon_google_docstring = True
napoleon_include_init_with_doc = False
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
todo_include_todos = True
| true | true |
1c3d402900221c0be1ec6066b7fdbe58f2110ddd | 1,453 | py | Python | setup.py | KCGallagher/birdspy | 234911777db4b0a6402750516e8efd9f62748054 | [
"MIT"
] | null | null | null | setup.py | KCGallagher/birdspy | 234911777db4b0a6402750516e8efd9f62748054 | [
"MIT"
] | null | null | null | setup.py | KCGallagher/birdspy | 234911777db4b0a6402750516e8efd9f62748054 | [
"MIT"
] | null | null | null | #
# birdspy setuptools script
#
from setuptools import setup, find_packages
def get_version():
"""
Get version number from the birdspy module.
The easiest way would be to just `import birdspy`, but note that this may
fail if the dependencies have not been installed yet. Instead, we've put
the version number in a simple version_info module, that we'll import here
by temporarily adding the oxrse directory to the pythonpath using sys.path.
"""
import os
import sys
sys.path.append(os.path.abspath('birdspy'))
from version_info import VERSION as version
sys.path.pop()
return version
def get_readme():
"""
Load README.md text for use as description.
"""
with open('README.md') as f:
return f.read()
setup(
# Package name
name='birdspy',
# Version
version=get_version(),
description='This is a image analysis tool for wildlife images', # noqa
long_description=get_readme(),
license='BSD 3-Clause "New" or "Revised" License',
# author='',
# author_email='',
maintainer='',
maintainer_email='',
url='https://github.com/KCGallagher/birdspy',
# Packages to include
packages=find_packages(include=('birdspy', 'birdspy.*')),
include_package_data=True,
# List of dependencies
install_requires=[
# Dependencies go here!
'matplotlib',
'numpy>=1.8',
'pandas',
],
)
| 21.686567 | 79 | 0.651067 |
from setuptools import setup, find_packages
def get_version():
import os
import sys
sys.path.append(os.path.abspath('birdspy'))
from version_info import VERSION as version
sys.path.pop()
return version
def get_readme():
with open('README.md') as f:
return f.read()
setup(
name='birdspy',
version=get_version(),
description='This is a image analysis tool for wildlife images',
long_description=get_readme(),
license='BSD 3-Clause "New" or "Revised" License',
maintainer='',
maintainer_email='',
url='https://github.com/KCGallagher/birdspy',
packages=find_packages(include=('birdspy', 'birdspy.*')),
include_package_data=True,
install_requires=[
'matplotlib',
'numpy>=1.8',
'pandas',
],
)
| true | true |
1c3d409611ef05bfdf081d574a95c0800af6c5bc | 7,116 | py | Python | ucscentralsdk/mometa/mgmt/MgmtCfgExportPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | ucscentralsdk/mometa/mgmt/MgmtCfgExportPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | ucscentralsdk/mometa/mgmt/MgmtCfgExportPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | """This module contains the general information for MgmtCfgExportPolicy ManagedObject."""
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class MgmtCfgExportPolicyConsts():
ADMIN_STATE_DISABLE = "disable"
ADMIN_STATE_ENABLE = "enable"
INT_ID_NONE = "none"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
POLICY_OWNER_UNSPECIFIED = "unspecified"
PROTO_FTP = "ftp"
PROTO_HTTP = "http"
PROTO_NFS_COPY = "nfs-copy"
PROTO_NONE = "none"
PROTO_SCP = "scp"
PROTO_SFTP = "sftp"
PROTO_TFTP = "tftp"
SCHEDULE_1DAY = "1day"
SCHEDULE_1WEEK = "1week"
SCHEDULE_2WEEK = "2week"
SCHEDULE_STATUS_NOT_FOUND = "not-found"
SCHEDULE_STATUS_UNKNOWN = "unknown"
class MgmtCfgExportPolicy(ManagedObject):
"""This is MgmtCfgExportPolicy class."""
consts = MgmtCfgExportPolicyConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("MgmtCfgExportPolicy", "mgmtCfgExportPolicy", "cfg-exp-policy-[name]", VersionMeta.Version101a, "InputOutput", 0x7fff, [], ["admin", "operations"], [u'orgDomainGroup', u'orgOrg', u'policyDeviceProfile'], [u'faultInst', u'mgmtBackupTrigger'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["disable", "enable"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x4, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"host": MoPropertyMeta("host", "host", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""^$|^[a-zA-Z0-9][a-zA-Z0-9_.-]{0,63}$|^([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}$|^([0-9a-fA-F]{1,4}:){1,7}:$|^([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}$|^([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}$|^([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}$|^([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}$|^([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}$|^[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})$|^:((:[0-9a-fA-F]{1,4}){1,7}|:)$""", [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"last_backup": MoPropertyMeta("last_backup", "lastBackup", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"last_enabled_ts": MoPropertyMeta("last_enabled_ts", "lastEnabledTs", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"max_files": MoPropertyMeta("max_files", "maxFiles", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], ["1-1023"]),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101a, MoPropertyMeta.NAMING, 0x40, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["local", "pending-policy", "policy", "unspecified"], []),
"proto": MoPropertyMeta("proto", "proto", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["ftp", "http", "nfs-copy", "none", "scp", "sftp", "tftp"], []),
"pwd": MoPropertyMeta("pwd", "pwd", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x100, 0, 64, None, [], []),
"remote_file": MoPropertyMeta("remote_file", "remoteFile", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x200, 1, 128, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x400, 0, 256, None, [], []),
"sched_name": MoPropertyMeta("sched_name", "schedName", "string", VersionMeta.Version131a, MoPropertyMeta.READ_WRITE, 0x800, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"schedule": MoPropertyMeta("schedule", "schedule", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x1000, None, None, None, ["1day", "1week", "2week"], []),
"schedule_status": MoPropertyMeta("schedule_status", "scheduleStatus", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-found", "unknown"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2000, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"user": MoPropertyMeta("user", "user", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x4000, 0, 510, None, [], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"host": "host",
"intId": "int_id",
"lastBackup": "last_backup",
"lastEnabledTs": "last_enabled_ts",
"maxFiles": "max_files",
"name": "name",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"proto": "proto",
"pwd": "pwd",
"remoteFile": "remote_file",
"rn": "rn",
"schedName": "sched_name",
"schedule": "schedule",
"scheduleStatus": "schedule_status",
"status": "status",
"user": "user",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.child_action = None
self.descr = None
self.host = None
self.int_id = None
self.last_backup = None
self.last_enabled_ts = None
self.max_files = None
self.policy_level = None
self.policy_owner = None
self.proto = None
self.pwd = None
self.remote_file = None
self.sched_name = None
self.schedule = None
self.schedule_status = None
self.status = None
self.user = None
ManagedObject.__init__(self, "MgmtCfgExportPolicy", parent_mo_or_dn, **kwargs)
| 64.690909 | 578 | 0.618606 |
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class MgmtCfgExportPolicyConsts():
ADMIN_STATE_DISABLE = "disable"
ADMIN_STATE_ENABLE = "enable"
INT_ID_NONE = "none"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
POLICY_OWNER_UNSPECIFIED = "unspecified"
PROTO_FTP = "ftp"
PROTO_HTTP = "http"
PROTO_NFS_COPY = "nfs-copy"
PROTO_NONE = "none"
PROTO_SCP = "scp"
PROTO_SFTP = "sftp"
PROTO_TFTP = "tftp"
SCHEDULE_1DAY = "1day"
SCHEDULE_1WEEK = "1week"
SCHEDULE_2WEEK = "2week"
SCHEDULE_STATUS_NOT_FOUND = "not-found"
SCHEDULE_STATUS_UNKNOWN = "unknown"
class MgmtCfgExportPolicy(ManagedObject):
consts = MgmtCfgExportPolicyConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("MgmtCfgExportPolicy", "mgmtCfgExportPolicy", "cfg-exp-policy-[name]", VersionMeta.Version101a, "InputOutput", 0x7fff, [], ["admin", "operations"], [u'orgDomainGroup', u'orgOrg', u'policyDeviceProfile'], [u'faultInst', u'mgmtBackupTrigger'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["disable", "enable"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x4, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"host": MoPropertyMeta("host", "host", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""^$|^[a-zA-Z0-9][a-zA-Z0-9_.-]{0,63}$|^([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}$|^([0-9a-fA-F]{1,4}:){1,7}:$|^([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}$|^([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}$|^([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}$|^([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}$|^([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}$|^[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})$|^:((:[0-9a-fA-F]{1,4}){1,7}|:)$""", [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version101a, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"last_backup": MoPropertyMeta("last_backup", "lastBackup", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"last_enabled_ts": MoPropertyMeta("last_enabled_ts", "lastEnabledTs", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"max_files": MoPropertyMeta("max_files", "maxFiles", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], ["1-1023"]),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101a, MoPropertyMeta.NAMING, 0x40, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["local", "pending-policy", "policy", "unspecified"], []),
"proto": MoPropertyMeta("proto", "proto", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["ftp", "http", "nfs-copy", "none", "scp", "sftp", "tftp"], []),
"pwd": MoPropertyMeta("pwd", "pwd", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x100, 0, 64, None, [], []),
"remote_file": MoPropertyMeta("remote_file", "remoteFile", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x200, 1, 128, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101a, MoPropertyMeta.READ_ONLY, 0x400, 0, 256, None, [], []),
"sched_name": MoPropertyMeta("sched_name", "schedName", "string", VersionMeta.Version131a, MoPropertyMeta.READ_WRITE, 0x800, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"schedule": MoPropertyMeta("schedule", "schedule", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x1000, None, None, None, ["1day", "1week", "2week"], []),
"schedule_status": MoPropertyMeta("schedule_status", "scheduleStatus", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-found", "unknown"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x2000, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"user": MoPropertyMeta("user", "user", "string", VersionMeta.Version101a, MoPropertyMeta.READ_WRITE, 0x4000, 0, 510, None, [], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"host": "host",
"intId": "int_id",
"lastBackup": "last_backup",
"lastEnabledTs": "last_enabled_ts",
"maxFiles": "max_files",
"name": "name",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"proto": "proto",
"pwd": "pwd",
"remoteFile": "remote_file",
"rn": "rn",
"schedName": "sched_name",
"schedule": "schedule",
"scheduleStatus": "schedule_status",
"status": "status",
"user": "user",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.child_action = None
self.descr = None
self.host = None
self.int_id = None
self.last_backup = None
self.last_enabled_ts = None
self.max_files = None
self.policy_level = None
self.policy_owner = None
self.proto = None
self.pwd = None
self.remote_file = None
self.sched_name = None
self.schedule = None
self.schedule_status = None
self.status = None
self.user = None
ManagedObject.__init__(self, "MgmtCfgExportPolicy", parent_mo_or_dn, **kwargs)
| true | true |
1c3d40f630669f1651221ddfbbbc6c5daeb7b7e2 | 21,057 | py | Python | pysnmp/NETI-TRUNK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/NETI-TRUNK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/NETI-TRUNK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module NETI-TRUNK-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NETI-TRUNK-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:10:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
netiGeneric, = mibBuilder.importSymbols("NETI-COMMON-MIB", "netiGeneric")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, ModuleIdentity, ObjectIdentity, Counter64, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, NotificationType, MibIdentifier, Integer32, Unsigned32, Gauge32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "ModuleIdentity", "ObjectIdentity", "Counter64", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "NotificationType", "MibIdentifier", "Integer32", "Unsigned32", "Gauge32", "Counter32")
TimeStamp, DisplayString, RowPointer, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "DisplayString", "RowPointer", "TruthValue", "TextualConvention")
netiTrunkMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2928, 2, 3))
netiTrunkMIB.setRevisions(('2014-03-14 08:00', '2013-08-29 16:00', '2013-01-24 15:00', '2009-08-26 15:00',))
if mibBuilder.loadTexts: netiTrunkMIB.setLastUpdated('201403140800Z')
if mibBuilder.loadTexts: netiTrunkMIB.setOrganization('Net Insight AB')
trunkObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1))
dppipGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1))
class FecMode(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("fecNone", 1), ("fec1D", 2), ("fec2D", 3))
class DppipSupport(TextualConvention, OctetString):
status = 'current'
displayHint = '1d.1d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(2, 2)
fixedLength = 2
dppipNumber = MibScalar((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipNumber.setStatus('current')
dppipLastChange = MibScalar((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 2), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipLastChange.setStatus('current')
dppipTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3), )
if mibBuilder.loadTexts: dppipTable.setStatus('current')
dppipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipEntry.setStatus('current')
dppipIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 1), Unsigned32())
if mibBuilder.loadTexts: dppipIndex.setStatus('current')
dppipName = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 2), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipName.setStatus('current')
dppipAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipAddress.setStatus('current')
dppipNetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipNetMask.setStatus('current')
dppipDefaultGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDefaultGateway.setStatus('current')
dppipPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipPeerAddress.setStatus('current')
dppipTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 7), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxSlots.setStatus('current')
dppipTxUsedCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTxUsedCapacity.setStatus('current')
dppipRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxSlots.setStatus('current')
dppipRxUsedCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxUsedCapacity.setStatus('current')
dppipDelayVariation = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVariation.setStatus('current')
dppipOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3), ("unknown", 4), ("dormant", 5), ("notPresent", 6), ("lowerLayerDown", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipOperStatus.setStatus('current')
dppipFailure = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipFailure.setStatus('current')
dppipReceivedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipReceivedFrames.setStatus('current')
dppipMissingFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMissingFrames.setStatus('current')
dppipDeliveredFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDeliveredFrames.setStatus('current')
dppipDroppedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDroppedFrames.setStatus('current')
dppipDuplicateFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDuplicateFrames.setStatus('current')
dppipReorderedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipReorderedFrames.setStatus('current')
dppipLostFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipLostFrames.setStatus('current')
dppipRecoveredFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRecoveredFrames.setStatus('current')
dppipSentFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipSentFrames.setStatus('current')
dppipDelayVarPtp = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 23), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVarPtp.setStatus('current')
dppipDelayVar999 = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 24), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVar999.setStatus('current')
dppipAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipAdminStatus.setStatus('current')
dppipVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipVlan.setStatus('current')
dppipDelayVar01 = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 27), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVar01.setStatus('current')
dppipPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 28), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipPrio.setStatus('current')
dppipPhysIf = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 29), RowPointer()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPhysIf.setStatus('current')
dppipMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 30), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(64, 1500))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipMtu.setStatus('current')
dppipTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 31), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTtl.setStatus('current')
dppipDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 32), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDscp.setStatus('current')
dppipRxSlotsPerFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 33), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxSlotsPerFrame.setStatus('current')
dppipAvailTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 34), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipAvailTxSlots.setStatus('current')
dppipAvailRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 35), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipAvailRxSlots.setStatus('current')
dppipMinUsageRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 36), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipMinUsageRatio.setStatus('current')
dppipTxTranspSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 37), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTxTranspSlots.setStatus('current')
dppipRxTranspSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 38), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxTranspSlots.setStatus('current')
dppipNomDTDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 39), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipNomDTDelay.setStatus('current')
dppipTxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 40), FecMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecMode.setStatus('current')
dppipTxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 41), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecRows.setStatus('current')
dppipTxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 42), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecCols.setStatus('current')
dppipRxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 43), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecMode.setStatus('current')
dppipRxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 44), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecRows.setStatus('current')
dppipRxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 45), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecCols.setStatus('current')
dppipCntControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipCntControl.setStatus('current')
dppipSuppressAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 47), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipSuppressAlarms.setStatus('current')
dppipSigFailFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 48), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipSigFailFilter.setStatus('current')
dppipDegThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 49), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 8000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDegThreshold.setStatus('current')
dppipDegPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 50), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(2, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDegPeriod.setStatus('current')
dppipTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 51), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTolJitter.setStatus('current')
dppipLimitsTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4), )
if mibBuilder.loadTexts: dppipLimitsTable.setStatus('current')
dppipLimitsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipLimitsEntry.setStatus('current')
dppipMaxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 1), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecMode.setStatus('current')
dppipMaxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecRows.setStatus('current')
dppipMinFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinFecRows.setStatus('current')
dppipMaxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecCols.setStatus('current')
dppipMinFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinFecCols.setStatus('current')
dppipMaxFecElems = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecElems.setStatus('current')
dppipMaxTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxTxSlots.setStatus('current')
dppipMinTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinTxSlots.setStatus('current')
dppipMaxTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxTolJitter.setStatus('current')
dppipMinTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 10), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinTolJitter.setStatus('current')
dppipDTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 11), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDTSupport.setStatus('current')
dppipTTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 12), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTTSupport.setStatus('current')
dppipPeerLimitsTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5), )
if mibBuilder.loadTexts: dppipPeerLimitsTable.setStatus('current')
dppipPeerLimitsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipPeerLimitsEntry.setStatus('current')
dppipPeerMaxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 1), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecMode.setStatus('current')
dppipPeerMaxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecRows.setStatus('current')
dppipPeerMinFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinFecRows.setStatus('current')
dppipPeerMaxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecCols.setStatus('current')
dppipPeerMinFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinFecCols.setStatus('current')
dppipPeerMaxFecElems = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecElems.setStatus('current')
dppipPeerMaxRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxRxSlots.setStatus('current')
dppipPeerMinRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinRxSlots.setStatus('current')
dppipPeerDTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 9), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerDTSupport.setStatus('current')
dppipPeerTTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 10), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerTTSupport.setStatus('current')
mibBuilder.exportSymbols("NETI-TRUNK-MIB", dppipDelayVar01=dppipDelayVar01, dppipDegPeriod=dppipDegPeriod, dppipTxUsedCapacity=dppipTxUsedCapacity, dppipTxFecRows=dppipTxFecRows, dppipLimitsEntry=dppipLimitsEntry, netiTrunkMIB=netiTrunkMIB, dppipPeerDTSupport=dppipPeerDTSupport, dppipPeerMaxFecRows=dppipPeerMaxFecRows, dppipDelayVarPtp=dppipDelayVarPtp, dppipPeerMaxFecElems=dppipPeerMaxFecElems, dppipPrio=dppipPrio, dppipLostFrames=dppipLostFrames, dppipMaxFecRows=dppipMaxFecRows, dppipPeerMinFecRows=dppipPeerMinFecRows, dppipDuplicateFrames=dppipDuplicateFrames, dppipEntry=dppipEntry, dppipPeerMinFecCols=dppipPeerMinFecCols, dppipName=dppipName, dppipMaxTxSlots=dppipMaxTxSlots, dppipDegThreshold=dppipDegThreshold, dppipDTSupport=dppipDTSupport, dppipPeerTTSupport=dppipPeerTTSupport, dppipVlan=dppipVlan, dppipCntControl=dppipCntControl, dppipMaxFecMode=dppipMaxFecMode, dppipPeerAddress=dppipPeerAddress, dppipFailure=dppipFailure, DppipSupport=DppipSupport, dppipMinFecCols=dppipMinFecCols, dppipDscp=dppipDscp, dppipRxFecCols=dppipRxFecCols, dppipPeerLimitsTable=dppipPeerLimitsTable, dppipRxTranspSlots=dppipRxTranspSlots, FecMode=FecMode, dppipNumber=dppipNumber, dppipPeerLimitsEntry=dppipPeerLimitsEntry, dppipIndex=dppipIndex, dppipDefaultGateway=dppipDefaultGateway, dppipRxSlotsPerFrame=dppipRxSlotsPerFrame, dppipRxUsedCapacity=dppipRxUsedCapacity, dppipAvailRxSlots=dppipAvailRxSlots, dppipReceivedFrames=dppipReceivedFrames, dppipPeerMaxFecMode=dppipPeerMaxFecMode, dppipRxFecMode=dppipRxFecMode, dppipMissingFrames=dppipMissingFrames, dppipDelayVariation=dppipDelayVariation, trunkObjects=trunkObjects, dppipMaxTolJitter=dppipMaxTolJitter, dppipRxSlots=dppipRxSlots, dppipPhysIf=dppipPhysIf, dppipMinTxSlots=dppipMinTxSlots, dppipTxFecCols=dppipTxFecCols, dppipTtl=dppipTtl, dppipMaxFecCols=dppipMaxFecCols, dppipNetMask=dppipNetMask, dppipTxTranspSlots=dppipTxTranspSlots, dppipMinTolJitter=dppipMinTolJitter, dppipSigFailFilter=dppipSigFailFilter, dppipPeerMaxRxSlots=dppipPeerMaxRxSlots, dppipPeerMinRxSlots=dppipPeerMinRxSlots, dppipTolJitter=dppipTolJitter, dppipLastChange=dppipLastChange, dppipTxSlots=dppipTxSlots, dppipAdminStatus=dppipAdminStatus, dppipAddress=dppipAddress, PYSNMP_MODULE_ID=netiTrunkMIB, dppipDeliveredFrames=dppipDeliveredFrames, dppipTTSupport=dppipTTSupport, dppipMtu=dppipMtu, dppipDroppedFrames=dppipDroppedFrames, dppipLimitsTable=dppipLimitsTable, dppipReorderedFrames=dppipReorderedFrames, dppipMinFecRows=dppipMinFecRows, dppipOperStatus=dppipOperStatus, dppipTable=dppipTable, dppipSentFrames=dppipSentFrames, dppipTxFecMode=dppipTxFecMode, dppipRecoveredFrames=dppipRecoveredFrames, dppipSuppressAlarms=dppipSuppressAlarms, dppipMinUsageRatio=dppipMinUsageRatio, dppipRxFecRows=dppipRxFecRows, dppipAvailTxSlots=dppipAvailTxSlots, dppipMaxFecElems=dppipMaxFecElems, dppipNomDTDelay=dppipNomDTDelay, dppipDelayVar999=dppipDelayVar999, dppipGroup=dppipGroup, dppipPeerMaxFecCols=dppipPeerMaxFecCols)
| 107.433673 | 2,961 | 0.745548 |
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
netiGeneric, = mibBuilder.importSymbols("NETI-COMMON-MIB", "netiGeneric")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, ModuleIdentity, ObjectIdentity, Counter64, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, NotificationType, MibIdentifier, Integer32, Unsigned32, Gauge32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "ModuleIdentity", "ObjectIdentity", "Counter64", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "NotificationType", "MibIdentifier", "Integer32", "Unsigned32", "Gauge32", "Counter32")
TimeStamp, DisplayString, RowPointer, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TimeStamp", "DisplayString", "RowPointer", "TruthValue", "TextualConvention")
netiTrunkMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2928, 2, 3))
netiTrunkMIB.setRevisions(('2014-03-14 08:00', '2013-08-29 16:00', '2013-01-24 15:00', '2009-08-26 15:00',))
if mibBuilder.loadTexts: netiTrunkMIB.setLastUpdated('201403140800Z')
if mibBuilder.loadTexts: netiTrunkMIB.setOrganization('Net Insight AB')
trunkObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1))
dppipGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1))
class FecMode(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("fecNone", 1), ("fec1D", 2), ("fec2D", 3))
class DppipSupport(TextualConvention, OctetString):
status = 'current'
displayHint = '1d.1d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(2, 2)
fixedLength = 2
dppipNumber = MibScalar((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipNumber.setStatus('current')
dppipLastChange = MibScalar((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 2), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipLastChange.setStatus('current')
dppipTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3), )
if mibBuilder.loadTexts: dppipTable.setStatus('current')
dppipEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipEntry.setStatus('current')
dppipIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 1), Unsigned32())
if mibBuilder.loadTexts: dppipIndex.setStatus('current')
dppipName = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 2), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipName.setStatus('current')
dppipAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipAddress.setStatus('current')
dppipNetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipNetMask.setStatus('current')
dppipDefaultGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDefaultGateway.setStatus('current')
dppipPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipPeerAddress.setStatus('current')
dppipTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 7), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxSlots.setStatus('current')
dppipTxUsedCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTxUsedCapacity.setStatus('current')
dppipRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxSlots.setStatus('current')
dppipRxUsedCapacity = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxUsedCapacity.setStatus('current')
dppipDelayVariation = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVariation.setStatus('current')
dppipOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3), ("unknown", 4), ("dormant", 5), ("notPresent", 6), ("lowerLayerDown", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipOperStatus.setStatus('current')
dppipFailure = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 13), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipFailure.setStatus('current')
dppipReceivedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipReceivedFrames.setStatus('current')
dppipMissingFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMissingFrames.setStatus('current')
dppipDeliveredFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDeliveredFrames.setStatus('current')
dppipDroppedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDroppedFrames.setStatus('current')
dppipDuplicateFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDuplicateFrames.setStatus('current')
dppipReorderedFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipReorderedFrames.setStatus('current')
dppipLostFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipLostFrames.setStatus('current')
dppipRecoveredFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 21), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRecoveredFrames.setStatus('current')
dppipSentFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 22), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipSentFrames.setStatus('current')
dppipDelayVarPtp = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 23), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVarPtp.setStatus('current')
dppipDelayVar999 = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 24), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVar999.setStatus('current')
dppipAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipAdminStatus.setStatus('current')
dppipVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipVlan.setStatus('current')
dppipDelayVar01 = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 27), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDelayVar01.setStatus('current')
dppipPrio = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 28), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipPrio.setStatus('current')
dppipPhysIf = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 29), RowPointer()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPhysIf.setStatus('current')
dppipMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 30), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(64, 1500))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipMtu.setStatus('current')
dppipTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 31), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTtl.setStatus('current')
dppipDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 32), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDscp.setStatus('current')
dppipRxSlotsPerFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 33), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxSlotsPerFrame.setStatus('current')
dppipAvailTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 34), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipAvailTxSlots.setStatus('current')
dppipAvailRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 35), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipAvailRxSlots.setStatus('current')
dppipMinUsageRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 36), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipMinUsageRatio.setStatus('current')
dppipTxTranspSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 37), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTxTranspSlots.setStatus('current')
dppipRxTranspSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 38), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxTranspSlots.setStatus('current')
dppipNomDTDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 39), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipNomDTDelay.setStatus('current')
dppipTxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 40), FecMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecMode.setStatus('current')
dppipTxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 41), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecRows.setStatus('current')
dppipTxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 42), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTxFecCols.setStatus('current')
dppipRxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 43), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecMode.setStatus('current')
dppipRxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 44), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecRows.setStatus('current')
dppipRxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 45), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipRxFecCols.setStatus('current')
dppipCntControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 46), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipCntControl.setStatus('current')
dppipSuppressAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 47), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipSuppressAlarms.setStatus('current')
dppipSigFailFilter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 48), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipSigFailFilter.setStatus('current')
dppipDegThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 49), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 8000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDegThreshold.setStatus('current')
dppipDegPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 50), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(2, 10))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipDegPeriod.setStatus('current')
dppipTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 3, 1, 51), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dppipTolJitter.setStatus('current')
dppipLimitsTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4), )
if mibBuilder.loadTexts: dppipLimitsTable.setStatus('current')
dppipLimitsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipLimitsEntry.setStatus('current')
dppipMaxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 1), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecMode.setStatus('current')
dppipMaxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecRows.setStatus('current')
dppipMinFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinFecRows.setStatus('current')
dppipMaxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecCols.setStatus('current')
dppipMinFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinFecCols.setStatus('current')
dppipMaxFecElems = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxFecElems.setStatus('current')
dppipMaxTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxTxSlots.setStatus('current')
dppipMinTxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinTxSlots.setStatus('current')
dppipMaxTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMaxTolJitter.setStatus('current')
dppipMinTolJitter = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 10), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipMinTolJitter.setStatus('current')
dppipDTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 11), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipDTSupport.setStatus('current')
dppipTTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 4, 1, 12), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipTTSupport.setStatus('current')
dppipPeerLimitsTable = MibTable((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5), )
if mibBuilder.loadTexts: dppipPeerLimitsTable.setStatus('current')
dppipPeerLimitsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1), ).setIndexNames((0, "NETI-TRUNK-MIB", "dppipIndex"))
if mibBuilder.loadTexts: dppipPeerLimitsEntry.setStatus('current')
dppipPeerMaxFecMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 1), FecMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecMode.setStatus('current')
dppipPeerMaxFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecRows.setStatus('current')
dppipPeerMinFecRows = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinFecRows.setStatus('current')
dppipPeerMaxFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecCols.setStatus('current')
dppipPeerMinFecCols = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinFecCols.setStatus('current')
dppipPeerMaxFecElems = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxFecElems.setStatus('current')
dppipPeerMaxRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMaxRxSlots.setStatus('current')
dppipPeerMinRxSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerMinRxSlots.setStatus('current')
dppipPeerDTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 9), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerDTSupport.setStatus('current')
dppipPeerTTSupport = MibTableColumn((1, 3, 6, 1, 4, 1, 2928, 2, 3, 1, 1, 5, 1, 10), DppipSupport()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dppipPeerTTSupport.setStatus('current')
mibBuilder.exportSymbols("NETI-TRUNK-MIB", dppipDelayVar01=dppipDelayVar01, dppipDegPeriod=dppipDegPeriod, dppipTxUsedCapacity=dppipTxUsedCapacity, dppipTxFecRows=dppipTxFecRows, dppipLimitsEntry=dppipLimitsEntry, netiTrunkMIB=netiTrunkMIB, dppipPeerDTSupport=dppipPeerDTSupport, dppipPeerMaxFecRows=dppipPeerMaxFecRows, dppipDelayVarPtp=dppipDelayVarPtp, dppipPeerMaxFecElems=dppipPeerMaxFecElems, dppipPrio=dppipPrio, dppipLostFrames=dppipLostFrames, dppipMaxFecRows=dppipMaxFecRows, dppipPeerMinFecRows=dppipPeerMinFecRows, dppipDuplicateFrames=dppipDuplicateFrames, dppipEntry=dppipEntry, dppipPeerMinFecCols=dppipPeerMinFecCols, dppipName=dppipName, dppipMaxTxSlots=dppipMaxTxSlots, dppipDegThreshold=dppipDegThreshold, dppipDTSupport=dppipDTSupport, dppipPeerTTSupport=dppipPeerTTSupport, dppipVlan=dppipVlan, dppipCntControl=dppipCntControl, dppipMaxFecMode=dppipMaxFecMode, dppipPeerAddress=dppipPeerAddress, dppipFailure=dppipFailure, DppipSupport=DppipSupport, dppipMinFecCols=dppipMinFecCols, dppipDscp=dppipDscp, dppipRxFecCols=dppipRxFecCols, dppipPeerLimitsTable=dppipPeerLimitsTable, dppipRxTranspSlots=dppipRxTranspSlots, FecMode=FecMode, dppipNumber=dppipNumber, dppipPeerLimitsEntry=dppipPeerLimitsEntry, dppipIndex=dppipIndex, dppipDefaultGateway=dppipDefaultGateway, dppipRxSlotsPerFrame=dppipRxSlotsPerFrame, dppipRxUsedCapacity=dppipRxUsedCapacity, dppipAvailRxSlots=dppipAvailRxSlots, dppipReceivedFrames=dppipReceivedFrames, dppipPeerMaxFecMode=dppipPeerMaxFecMode, dppipRxFecMode=dppipRxFecMode, dppipMissingFrames=dppipMissingFrames, dppipDelayVariation=dppipDelayVariation, trunkObjects=trunkObjects, dppipMaxTolJitter=dppipMaxTolJitter, dppipRxSlots=dppipRxSlots, dppipPhysIf=dppipPhysIf, dppipMinTxSlots=dppipMinTxSlots, dppipTxFecCols=dppipTxFecCols, dppipTtl=dppipTtl, dppipMaxFecCols=dppipMaxFecCols, dppipNetMask=dppipNetMask, dppipTxTranspSlots=dppipTxTranspSlots, dppipMinTolJitter=dppipMinTolJitter, dppipSigFailFilter=dppipSigFailFilter, dppipPeerMaxRxSlots=dppipPeerMaxRxSlots, dppipPeerMinRxSlots=dppipPeerMinRxSlots, dppipTolJitter=dppipTolJitter, dppipLastChange=dppipLastChange, dppipTxSlots=dppipTxSlots, dppipAdminStatus=dppipAdminStatus, dppipAddress=dppipAddress, PYSNMP_MODULE_ID=netiTrunkMIB, dppipDeliveredFrames=dppipDeliveredFrames, dppipTTSupport=dppipTTSupport, dppipMtu=dppipMtu, dppipDroppedFrames=dppipDroppedFrames, dppipLimitsTable=dppipLimitsTable, dppipReorderedFrames=dppipReorderedFrames, dppipMinFecRows=dppipMinFecRows, dppipOperStatus=dppipOperStatus, dppipTable=dppipTable, dppipSentFrames=dppipSentFrames, dppipTxFecMode=dppipTxFecMode, dppipRecoveredFrames=dppipRecoveredFrames, dppipSuppressAlarms=dppipSuppressAlarms, dppipMinUsageRatio=dppipMinUsageRatio, dppipRxFecRows=dppipRxFecRows, dppipAvailTxSlots=dppipAvailTxSlots, dppipMaxFecElems=dppipMaxFecElems, dppipNomDTDelay=dppipNomDTDelay, dppipDelayVar999=dppipDelayVar999, dppipGroup=dppipGroup, dppipPeerMaxFecCols=dppipPeerMaxFecCols)
| true | true |
1c3d41252c8b5c17dcc82e53aed8ad40a8c54ef2 | 496 | py | Python | pydiscourse/exceptions.py | cwyark/pydiscourse | aeb763c42c94c660003e0f1d1b40302605de5cc6 | [
"MIT"
] | 72 | 2016-09-18T19:45:59.000Z | 2022-01-22T21:55:06.000Z | pydiscourse/exceptions.py | cwyark/pydiscourse | aeb763c42c94c660003e0f1d1b40302605de5cc6 | [
"MIT"
] | 37 | 2016-04-17T15:32:43.000Z | 2021-12-29T21:37:17.000Z | pydiscourse/exceptions.py | cwyark/pydiscourse | aeb763c42c94c660003e0f1d1b40302605de5cc6 | [
"MIT"
] | 44 | 2016-04-17T14:48:38.000Z | 2022-03-12T13:35:37.000Z | from requests.exceptions import HTTPError
class DiscourseError(HTTPError):
""" A generic error while attempting to communicate with Discourse """
class DiscourseServerError(DiscourseError):
""" The Discourse Server encountered an error while processing the request """
class DiscourseClientError(DiscourseError):
""" An invalid request has been made """
class DiscourseRateLimitedError(DiscourseError):
""" Request required more than the permissible number of retries """
| 27.555556 | 82 | 0.768145 | from requests.exceptions import HTTPError
class DiscourseError(HTTPError):
class DiscourseServerError(DiscourseError):
class DiscourseClientError(DiscourseError):
class DiscourseRateLimitedError(DiscourseError):
| true | true |
1c3d425f712c8bf11e9204e4f22236f068c69ca7 | 3,024 | py | Python | libtoml/parser.py | prestontimmons/libtoml | be42d33fbc3dfd53304da06dc4efef4279f4847c | [
"MIT"
] | 1 | 2020-01-13T19:21:36.000Z | 2020-01-13T19:21:36.000Z | libtoml/parser.py | prestontimmons/libtoml | be42d33fbc3dfd53304da06dc4efef4279f4847c | [
"MIT"
] | null | null | null | libtoml/parser.py | prestontimmons/libtoml | be42d33fbc3dfd53304da06dc4efef4279f4847c | [
"MIT"
] | null | null | null | from datetime import datetime
from rply import LexerGenerator, ParserGenerator
ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
lg = LexerGenerator()
lg.ignore(r"\s+")
lg.ignore(r"\# .*")
lg.add("COLON", r":")
lg.add("LCURLY", r"\{")
lg.add("RCURLY", r"\}")
lg.add("LBRACKET", r"\[")
lg.add("RBRACKET", r"\]")
lg.add("COMMA", r",")
lg.add("EQUALS", r"=")
lg.add("BOOLEAN", r"true|false")
lg.add("DATETIME", r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z")
lg.add("FLOAT", r"-?\d+\.\d+")
lg.add("INTEGER", r"-?\d+")
lg.add("STRING", r'"(\\"|[^"])*"')
lg.add("KEY", r"[a-zA-Z_][a-zA-Z0-9_#\?\.]*")
lexer = lg.build()
pg = ParserGenerator([rule.name for rule in lg.rules], cache_id="libtoml")
@pg.production("main : statements")
def main(p):
return p[0]
@pg.production("statements : statements statement")
def statements(p):
return p[0] + [p[1]]
@pg.production("statements : statement")
def statements_single(p):
return [p[0]]
@pg.production("statement : expr")
def statement_expr(p):
return p[0]
@pg.production("statement : assign")
def statement_assign(p):
return p[0]
@pg.production("assign : KEY EQUALS arg")
def assignment(p):
return (p[0].getstr(), p[2])
@pg.production("args : arg")
@pg.production("args : arg COMMA")
def args_single(p):
return [p[0]]
@pg.production("args : arg COMMA args")
def args(p):
return [p[0]] + p[2]
@pg.production("dictkey : arg COLON arg")
@pg.production("dictkey : arg COLON arg COMMA")
def dictkey_single(p):
return {p[0]: p[2]}
@pg.production("dictkeys : dictkey")
def dictkeys(p):
return p[0]
@pg.production("dictkeys : dictkey dictkeys")
def dictkeys_many(p):
d = p[0]
d.update(p[1])
return d
@pg.production("arg : LCURLY dictkeys RCURLY")
def dict_arg(p):
return p[1]
@pg.production("arg : LBRACKET args RBRACKET")
def list_arg(p):
return p[1]
@pg.production("arg : STRING")
def string_arg(p):
return p[0].getstr()[1:-1]
@pg.production("arg : DATETIME")
def date_arg(p):
return datetime.strptime(p[0].getstr(), ISO8601_FORMAT)
@pg.production("arg : FLOAT")
def float_arg(p):
return float(p[0].getstr())
@pg.production("arg : INTEGER")
def integer(p):
return int(p[0].getstr())
@pg.production("arg : BOOLEAN")
def boolean_arg(p):
val = p[0].getstr()
if val == "true":
return True
if val == "false":
return False
@pg.production("expr : LBRACKET LBRACKET KEY RBRACKET RBRACKET")
def table_expr(p):
return ("table", p[2].getstr())
@pg.production("expr : LBRACKET KEY RBRACKET")
def key_expr(p):
return ("keygroup", p[1].getstr())
@pg.error
def error_handler(token):
if token.value == "$end":
raise EmptyError()
msg = "Error on line %s. Ran into a %s where it wasn't expected."
raise ValueError(msg % (token.source_pos.lineno, token.gettokentype()))
class EmptyError(ValueError):
pass
parser = pg.build()
def parse_toml(value):
tokens = lexer.lex(value)
try:
return parser.parse(tokens)
except EmptyError:
return {}
| 22.4 | 75 | 0.632937 | from datetime import datetime
from rply import LexerGenerator, ParserGenerator
ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
lg = LexerGenerator()
lg.ignore(r"\s+")
lg.ignore(r"\# .*")
lg.add("COLON", r":")
lg.add("LCURLY", r"\{")
lg.add("RCURLY", r"\}")
lg.add("LBRACKET", r"\[")
lg.add("RBRACKET", r"\]")
lg.add("COMMA", r",")
lg.add("EQUALS", r"=")
lg.add("BOOLEAN", r"true|false")
lg.add("DATETIME", r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z")
lg.add("FLOAT", r"-?\d+\.\d+")
lg.add("INTEGER", r"-?\d+")
lg.add("STRING", r'"(\\"|[^"])*"')
lg.add("KEY", r"[a-zA-Z_][a-zA-Z0-9_#\?\.]*")
lexer = lg.build()
pg = ParserGenerator([rule.name for rule in lg.rules], cache_id="libtoml")
@pg.production("main : statements")
def main(p):
return p[0]
@pg.production("statements : statements statement")
def statements(p):
return p[0] + [p[1]]
@pg.production("statements : statement")
def statements_single(p):
return [p[0]]
@pg.production("statement : expr")
def statement_expr(p):
return p[0]
@pg.production("statement : assign")
def statement_assign(p):
return p[0]
@pg.production("assign : KEY EQUALS arg")
def assignment(p):
return (p[0].getstr(), p[2])
@pg.production("args : arg")
@pg.production("args : arg COMMA")
def args_single(p):
return [p[0]]
@pg.production("args : arg COMMA args")
def args(p):
return [p[0]] + p[2]
@pg.production("dictkey : arg COLON arg")
@pg.production("dictkey : arg COLON arg COMMA")
def dictkey_single(p):
return {p[0]: p[2]}
@pg.production("dictkeys : dictkey")
def dictkeys(p):
return p[0]
@pg.production("dictkeys : dictkey dictkeys")
def dictkeys_many(p):
d = p[0]
d.update(p[1])
return d
@pg.production("arg : LCURLY dictkeys RCURLY")
def dict_arg(p):
return p[1]
@pg.production("arg : LBRACKET args RBRACKET")
def list_arg(p):
return p[1]
@pg.production("arg : STRING")
def string_arg(p):
return p[0].getstr()[1:-1]
@pg.production("arg : DATETIME")
def date_arg(p):
return datetime.strptime(p[0].getstr(), ISO8601_FORMAT)
@pg.production("arg : FLOAT")
def float_arg(p):
return float(p[0].getstr())
@pg.production("arg : INTEGER")
def integer(p):
return int(p[0].getstr())
@pg.production("arg : BOOLEAN")
def boolean_arg(p):
val = p[0].getstr()
if val == "true":
return True
if val == "false":
return False
@pg.production("expr : LBRACKET LBRACKET KEY RBRACKET RBRACKET")
def table_expr(p):
return ("table", p[2].getstr())
@pg.production("expr : LBRACKET KEY RBRACKET")
def key_expr(p):
return ("keygroup", p[1].getstr())
@pg.error
def error_handler(token):
if token.value == "$end":
raise EmptyError()
msg = "Error on line %s. Ran into a %s where it wasn't expected."
raise ValueError(msg % (token.source_pos.lineno, token.gettokentype()))
class EmptyError(ValueError):
pass
parser = pg.build()
def parse_toml(value):
tokens = lexer.lex(value)
try:
return parser.parse(tokens)
except EmptyError:
return {}
| true | true |
1c3d428d3420761702555a2ebfed2b889fb2ffe0 | 464 | py | Python | ParametricRuns/meshConvertT.py | Tjcross31/OPTIMISATION-OF-BIPLANECONFIGURATIONS-FOR-SUPERSONICFLIGHT | 4deb31d8e4fb16bfed49a39c8ad39dbfd349b3ab | [
"MIT"
] | null | null | null | ParametricRuns/meshConvertT.py | Tjcross31/OPTIMISATION-OF-BIPLANECONFIGURATIONS-FOR-SUPERSONICFLIGHT | 4deb31d8e4fb16bfed49a39c8ad39dbfd349b3ab | [
"MIT"
] | null | null | null | ParametricRuns/meshConvertT.py | Tjcross31/OPTIMISATION-OF-BIPLANECONFIGURATIONS-FOR-SUPERSONICFLIGHT | 4deb31d8e4fb16bfed49a39c8ad39dbfd349b3ab | [
"MIT"
] | null | null | null | import os
from threading import Thread
def f(name):
if os.path.isdir(name) and name != "blank_foamcase":
oldDir = os.getcwd()
os.system('cd ~/OpenFOAM/tjc2017-7/run')
os.chdir(oldDir)
os.chdir(name)
os.system('gmshToFoam busemann.msh')
os.chdir(oldDir)
if __name__ == "__main__":
dirs = os.listdir()
for name in dirs:
t = Thread(target=f, args=(name,))
t.start() | 24.421053 | 57 | 0.560345 | import os
from threading import Thread
def f(name):
if os.path.isdir(name) and name != "blank_foamcase":
oldDir = os.getcwd()
os.system('cd ~/OpenFOAM/tjc2017-7/run')
os.chdir(oldDir)
os.chdir(name)
os.system('gmshToFoam busemann.msh')
os.chdir(oldDir)
if __name__ == "__main__":
dirs = os.listdir()
for name in dirs:
t = Thread(target=f, args=(name,))
t.start() | true | true |
1c3d46c29767bbbf9d272c6f7b8a0ce9e0097bcd | 7,653 | py | Python | code/functions/ZGL.py | kaltenburger/homophily_monophily_NHB | ef7c297694885e1bf1e1e4cc75a6ba9bd3d83164 | [
"MIT"
] | 6 | 2018-03-20T21:00:55.000Z | 2021-03-28T02:27:24.000Z | code/functions/ZGL.py | kaltenburger/homophily_monophily_NHB | ef7c297694885e1bf1e1e4cc75a6ba9bd3d83164 | [
"MIT"
] | null | null | null | code/functions/ZGL.py | kaltenburger/homophily_monophily_NHB | ef7c297694885e1bf1e1e4cc75a6ba9bd3d83164 | [
"MIT"
] | 2 | 2019-06-10T06:43:47.000Z | 2022-02-28T01:56:49.000Z | from sklearn.metrics import confusion_matrix
## 4/24/2017
## about: ZGL function
#"Semi-Supervised Learning Using Gaussian Fields and Harmonic Functions". Xiaojin Zhu, Zoubin Ghahramani, John Lafferty. The Twentieth International Conference on Machine Learning (ICML-2003).
## note: helpful Matlab code that was adapted: http://pages.cs.wisc.edu/~jerryzhu/pub/harmonic_function.m
### assume: binary class set-up -- imbalanced classes -- allows k>2 classes; adjacency matrix -- assumed to be symmetric
def ZGL_finalized(feature_x, membership_y, num_unlabeled, num_iter, cv_setup=None, python_library='numpy'):
percent_initially_labelled = np.subtract(1, num_unlabeled)
### ZGL CODE
mean_accuracy = []
se_accuracy = []
mean_micro_auc = []
se_micro_auc = []
mean_wt_auc = []
se_wt_auc = []
if python_library == 'numpy':
W_unordered = np.array(feature_x)
if python_library == 'scipy':
W_unordered = np.copy(feature_x)
n = len(membership_y)
classes = np.sort(np.unique(membership_y))
class_labels = np.array(range(len(classes)))
# relabel membership class labels - for coding convenience
# preserve ordering of original class labels -- but force to be in sequential order now
membership_y_update = np.copy(membership_y)
for j in range(len(classes)):
membership_y_update[membership_y_update == classes[j]] = class_labels[j]
for i in range(len(percent_initially_labelled)):
print(num_unlabeled[i])
if cv_setup=='stratified':
k_fold = cross_validation.StratifiedShuffleSplit((membership_y_update), n_iter=num_iter,
test_size=num_unlabeled[i],
random_state=0)
else:
k_fold = cross_validation.ShuffleSplit(len(membership_y_update), n_iter=num_iter,
test_size=num_unlabeled[i],
random_state=0)
accuracy = []
micro_auc = []
wt_auc = []
for k, (train, test) in enumerate(k_fold):
#if k == 0:
#print train
idx = np.concatenate((train, test)) # concatenate train + test = L + U
## create W (4) in ZGL paper
if python_library == 'numpy':
W = np.reshape([W_unordered[row,col] for row in np.array(idx) for col in np.array(idx)],(n,n))
if python_library == 'scipy':
W_unordered = scipy.sparse.csc_matrix(feature_x)
W = W_unordered[idx,:][:,idx]
#fl: L*c label matrix from ZGL paper
train_labels = np.array([np.array(membership_y_update)[id] for id in train]) # resort labels to be in same order as training data
classes_train = np.sort(np.unique(train_labels))
accuracy_score_benchmark = np.mean(np.array(train_labels) == np.max(class_labels))
fl =np.array(np.matrix(label_binarize(train_labels,
list(classes_train) + [np.max(classes_train)+1]))[:,0:(np.max(classes_train)+1)])
# record testing gender labels for comparing predictions -- ie ground-truth
true_test_labels = np.array([np.array(membership_y_update)[id] for id in test])
classes_true_test = np.sort(np.unique(true_test_labels))
ground_truth =np.array(np.matrix(label_binarize(true_test_labels,
list(classes_true_test) + [np.max(classes_true_test)+1]))[:,0:(np.max(classes_true_test)+1)])
l = len(train) # number of labeled points
u = len(test) # number of unlabeled points
## compute Equation (5) in ZGL paper
W_ll = W[0:l,0:l]
W_lu = W[0:l,l:(l+u)]
W_ul = W[l:(l+u),0:l]
W_uu = W[l:(l+u),l:(l+u)]
if python_library == 'numpy':
D = np.diag(np.sum(W, axis=1))
D_ll = D[0:l,0:l]
D_lu = D[0:l,l:(l+u)]
D_ul = D[l:(l+u),0:l]
D_uu = D[l:(l+u),l:(l+u)]
harmonic_fxn = np.dot(np.dot(np.linalg.inv(np.matrix(np.subtract(D_uu, W_uu))),np.matrix(W_ul)), np.matrix(fl))
if python_library == 'scipy':
D_tmp = scipy.sparse.csc_matrix.sum(W,1)
udiag = np.zeros(n)
ldiag = np.zeros(n)
diag = np.array(D_tmp.T)[0]
D = scipy.sparse.csc_matrix(scipy.sparse.dia_matrix(([diag, udiag, ldiag], [0, 2, -2]), shape=(n, n)))
D_ll = D[0:l,0:l]
D_lu = D[0:l,l:(l+u)]
D_ul = D[l:(l+u),0:l]
D_uu = D[l:(l+u),l:(l+u)]
harmonic_fxn = scipy.sparse.csc_matrix.dot(scipy.sparse.csc_matrix.dot(scipy.sparse.linalg.inv(scipy.sparse.csc_matrix(scipy.subtract(D_uu,
W_uu))), W_ul),
fl)
harmonic_fxn_final = np.copy(harmonic_fxn)
if len(np.unique(membership_y))>2:
micro_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final,average='micro'))
wt_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final,average='weighted'))
accuracy.append(metrics.accuracy_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final))
else:
micro_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final[:,1]-harmonic_fxn_final[:,0],average='micro'))
wt_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final[:,1]-harmonic_fxn_final[:,0],average='weighted'))
y_true = label_binarize(membership_y[test],np.unique(membership_y))
y_pred = ((harmonic_fxn_final[:,1]) >accuracy_score_benchmark)+0
tn, fp, fn, tp = confusion_matrix(label_binarize(membership_y[test],np.unique(membership_y)),
((harmonic_fxn_final[:,1]) >accuracy_score_benchmark)+0).ravel()
#accuracy.append((tn/(fp+tn)*0.5 + tp/(tp+fn))*0.5)
accuracy.append(f1_score(y_true, y_pred, average='macro'))#, pos_label=1) )
# accuracy.append(metrics.accuracy_score(label_binarize(membership_y[test],np.unique(membership_y)),
# (harmonic_fxn_final[:,1] > accuracy_score_benchmark)+0))
mean_accuracy.append(np.mean(accuracy)) #placeholder
se_accuracy.append(np.std(accuracy)) #placeholder
mean_micro_auc.append(np.mean(micro_auc))
se_micro_auc.append(np.std(micro_auc))
mean_wt_auc.append(np.mean(wt_auc))
se_wt_auc.append(np.std(wt_auc))
return(mean_accuracy, se_accuracy, mean_micro_auc, se_micro_auc, mean_wt_auc,se_wt_auc)
| 51.709459 | 193 | 0.556775 | from sklearn.metrics import confusion_matrix
)
if python_library == 'scipy':
W_unordered = np.copy(feature_x)
n = len(membership_y)
classes = np.sort(np.unique(membership_y))
class_labels = np.array(range(len(classes)))
membership_y_update = np.copy(membership_y)
for j in range(len(classes)):
membership_y_update[membership_y_update == classes[j]] = class_labels[j]
for i in range(len(percent_initially_labelled)):
print(num_unlabeled[i])
if cv_setup=='stratified':
k_fold = cross_validation.StratifiedShuffleSplit((membership_y_update), n_iter=num_iter,
test_size=num_unlabeled[i],
random_state=0)
else:
k_fold = cross_validation.ShuffleSplit(len(membership_y_update), n_iter=num_iter,
test_size=num_unlabeled[i],
random_state=0)
accuracy = []
micro_auc = []
wt_auc = []
for k, (train, test) in enumerate(k_fold):
idx = np.concatenate((train, test))
ary == 'numpy':
W = np.reshape([W_unordered[row,col] for row in np.array(idx) for col in np.array(idx)],(n,n))
if python_library == 'scipy':
W_unordered = scipy.sparse.csc_matrix(feature_x)
W = W_unordered[idx,:][:,idx]
train_labels = np.array([np.array(membership_y_update)[id] for id in train])
classes_train = np.sort(np.unique(train_labels))
accuracy_score_benchmark = np.mean(np.array(train_labels) == np.max(class_labels))
fl =np.array(np.matrix(label_binarize(train_labels,
list(classes_train) + [np.max(classes_train)+1]))[:,0:(np.max(classes_train)+1)])
true_test_labels = np.array([np.array(membership_y_update)[id] for id in test])
classes_true_test = np.sort(np.unique(true_test_labels))
ground_truth =np.array(np.matrix(label_binarize(true_test_labels,
list(classes_true_test) + [np.max(classes_true_test)+1]))[:,0:(np.max(classes_true_test)+1)])
l = len(train)
u = len(test)
W_lu = W[0:l,l:(l+u)]
W_ul = W[l:(l+u),0:l]
W_uu = W[l:(l+u),l:(l+u)]
if python_library == 'numpy':
D = np.diag(np.sum(W, axis=1))
D_ll = D[0:l,0:l]
D_lu = D[0:l,l:(l+u)]
D_ul = D[l:(l+u),0:l]
D_uu = D[l:(l+u),l:(l+u)]
harmonic_fxn = np.dot(np.dot(np.linalg.inv(np.matrix(np.subtract(D_uu, W_uu))),np.matrix(W_ul)), np.matrix(fl))
if python_library == 'scipy':
D_tmp = scipy.sparse.csc_matrix.sum(W,1)
udiag = np.zeros(n)
ldiag = np.zeros(n)
diag = np.array(D_tmp.T)[0]
D = scipy.sparse.csc_matrix(scipy.sparse.dia_matrix(([diag, udiag, ldiag], [0, 2, -2]), shape=(n, n)))
D_ll = D[0:l,0:l]
D_lu = D[0:l,l:(l+u)]
D_ul = D[l:(l+u),0:l]
D_uu = D[l:(l+u),l:(l+u)]
harmonic_fxn = scipy.sparse.csc_matrix.dot(scipy.sparse.csc_matrix.dot(scipy.sparse.linalg.inv(scipy.sparse.csc_matrix(scipy.subtract(D_uu,
W_uu))), W_ul),
fl)
harmonic_fxn_final = np.copy(harmonic_fxn)
if len(np.unique(membership_y))>2:
micro_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final,average='micro'))
wt_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final,average='weighted'))
accuracy.append(metrics.accuracy_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final))
else:
micro_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final[:,1]-harmonic_fxn_final[:,0],average='micro'))
wt_auc.append(metrics.roc_auc_score(label_binarize(membership_y[test],np.unique(membership_y)),
harmonic_fxn_final[:,1]-harmonic_fxn_final[:,0],average='weighted'))
y_true = label_binarize(membership_y[test],np.unique(membership_y))
y_pred = ((harmonic_fxn_final[:,1]) >accuracy_score_benchmark)+0
tn, fp, fn, tp = confusion_matrix(label_binarize(membership_y[test],np.unique(membership_y)),
((harmonic_fxn_final[:,1]) >accuracy_score_benchmark)+0).ravel()
accuracy.append(f1_score(y_true, y_pred, average='macro'))
mean_accuracy.append(np.mean(accuracy))
se_accuracy.append(np.std(accuracy))
mean_micro_auc.append(np.mean(micro_auc))
se_micro_auc.append(np.std(micro_auc))
mean_wt_auc.append(np.mean(wt_auc))
se_wt_auc.append(np.std(wt_auc))
return(mean_accuracy, se_accuracy, mean_micro_auc, se_micro_auc, mean_wt_auc,se_wt_auc)
| true | true |
1c3d47593aeb17fbc895e22ffbf5ac9d48ac35e6 | 56,311 | py | Python | virtual/lib/python3.6/site-packages/google/protobuf/internal/text_format_test.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
] | null | null | null | virtual/lib/python3.6/site-packages/google/protobuf/internal/text_format_test.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
] | 11 | 2020-06-05T20:57:31.000Z | 2021-09-22T18:35:03.000Z | virtual/lib/python3.6/site-packages/google/protobuf/internal/text_format_test.py | marknesh/instagram-app | 514ec6e59ad127857234245b05130431fa3262cc | [
"MIT"
] | null | null | null | #! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.text_format."""
__author__ = 'kenton@google.com (Kenton Varda)'
import re
import six
import string
try:
import unittest2 as unittest # PY26, pylint: disable=g-import-not-at-top
except ImportError:
import unittest # pylint: disable=g-import-not-at-top
from google.protobuf.internal import _parameterized
from google.protobuf import any_test_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import test_util
from google.protobuf.internal import message_set_extensions_pb2
from google.protobuf import descriptor_pool
from google.protobuf import text_format
# Low-level nuts-n-bolts tests.
class SimpleTextFormatTests(unittest.TestCase):
# The members of _QUOTES are formatted into a regexp template that
# expects single characters. Therefore it's an error (in addition to being
# non-sensical in the first place) to try to specify a "quote mark" that is
# more than one character.
def testQuoteMarksAreSingleChars(self):
for quote in text_format._QUOTES:
self.assertEqual(1, len(quote))
# Base class with some common functionality.
class TextFormatBase(unittest.TestCase):
def ReadGolden(self, golden_filename):
with test_util.GoldenFile(golden_filename) as f:
return (f.readlines() if str is bytes else # PY3
[golden_line.decode('utf-8') for golden_line in f])
def CompareToGoldenFile(self, text, golden_filename):
golden_lines = self.ReadGolden(golden_filename)
self.assertMultiLineEqual(text, ''.join(golden_lines))
def CompareToGoldenText(self, text, golden_text):
self.assertEqual(text, golden_text)
def RemoveRedundantZeros(self, text):
# Some platforms print 1e+5 as 1e+005. This is fine, but we need to remove
# these zeros in order to match the golden file.
text = text.replace('e+0','e+').replace('e+0','e+') \
.replace('e-0','e-').replace('e-0','e-')
# Floating point fields are printed with .0 suffix even if they are
# actualy integer numbers.
text = re.compile(r'\.0$', re.MULTILINE).sub('', text)
return text
@_parameterized.Parameters((unittest_pb2), (unittest_proto3_arena_pb2))
class TextFormatTest(TextFormatBase):
def testPrintExotic(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'repeated_int64: -9223372036854775808\n'
'repeated_uint64: 18446744073709551615\n'
'repeated_double: 123.456\n'
'repeated_double: 1.23e+22\n'
'repeated_double: 1.23e-18\n'
'repeated_string:'
' "\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
'repeated_string: "\\303\\274\\352\\234\\237"\n')
def testPrintExoticUnicodeSubclass(self, message_module):
class UnicodeSub(six.text_type):
pass
message = message_module.TestAllTypes()
message.repeated_string.append(UnicodeSub('\u00fc\ua71f'))
self.CompareToGoldenText(
text_format.MessageToString(message),
'repeated_string: "\\303\\274\\352\\234\\237"\n')
def testPrintNestedMessageAsOneLine(self, message_module):
message = message_module.TestAllTypes()
msg = message.repeated_nested_message.add()
msg.bb = 42
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'repeated_nested_message { bb: 42 }')
def testPrintRepeatedFieldsAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int32.append(1)
message.repeated_int32.append(1)
message.repeated_int32.append(3)
message.repeated_string.append('Google')
message.repeated_string.append('Zurich')
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'repeated_int32: 1 repeated_int32: 1 repeated_int32: 3 '
'repeated_string: "Google" repeated_string: "Zurich"')
def testPrintNestedNewLineInStringAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.optional_string = 'a\nnew\nline'
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'optional_string: "a\\nnew\\nline"')
def testPrintExoticAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(
message, as_one_line=True)),
'repeated_int64: -9223372036854775808'
' repeated_uint64: 18446744073709551615'
' repeated_double: 123.456'
' repeated_double: 1.23e+22'
' repeated_double: 1.23e-18'
' repeated_string: '
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""'
' repeated_string: "\\303\\274\\352\\234\\237"')
def testRoundTripExoticAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
# Test as_utf8 = False.
wire_text = text_format.MessageToString(message,
as_one_line=True,
as_utf8=False)
parsed_message = message_module.TestAllTypes()
r = text_format.Parse(wire_text, parsed_message)
self.assertIs(r, parsed_message)
self.assertEqual(message, parsed_message)
# Test as_utf8 = True.
wire_text = text_format.MessageToString(message,
as_one_line=True,
as_utf8=True)
parsed_message = message_module.TestAllTypes()
r = text_format.Parse(wire_text, parsed_message)
self.assertIs(r, parsed_message)
self.assertEqual(message, parsed_message,
'\n%s != %s' % (message, parsed_message))
def testPrintRawUtf8String(self, message_module):
message = message_module.TestAllTypes()
message.repeated_string.append('\u00fc\ua71f')
text = text_format.MessageToString(message, as_utf8=True)
self.CompareToGoldenText(text, 'repeated_string: "\303\274\352\234\237"\n')
parsed_message = message_module.TestAllTypes()
text_format.Parse(text, parsed_message)
self.assertEqual(message, parsed_message,
'\n%s != %s' % (message, parsed_message))
def testPrintFloatFormat(self, message_module):
# Check that float_format argument is passed to sub-message formatting.
message = message_module.NestedTestAllTypes()
# We use 1.25 as it is a round number in binary. The proto 32-bit float
# will not gain additional imprecise digits as a 64-bit Python float and
# show up in its str. 32-bit 1.2 is noisy when extended to 64-bit:
# >>> struct.unpack('f', struct.pack('f', 1.2))[0]
# 1.2000000476837158
# >>> struct.unpack('f', struct.pack('f', 1.25))[0]
# 1.25
message.payload.optional_float = 1.25
# Check rounding at 15 significant digits
message.payload.optional_double = -.000003456789012345678
# Check no decimal point.
message.payload.repeated_float.append(-5642)
# Check no trailing zeros.
message.payload.repeated_double.append(.000078900)
formatted_fields = ['optional_float: 1.25',
'optional_double: -3.45678901234568e-6',
'repeated_float: -5642', 'repeated_double: 7.89e-5']
text_message = text_format.MessageToString(message, float_format='.15g')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_message),
'payload {{\n {0}\n {1}\n {2}\n {3}\n}}\n'.format(
*formatted_fields))
# as_one_line=True is a separate code branch where float_format is passed.
text_message = text_format.MessageToString(message,
as_one_line=True,
float_format='.15g')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_message),
'payload {{ {0} {1} {2} {3} }}'.format(*formatted_fields))
def testMessageToString(self, message_module):
message = message_module.ForeignMessage()
message.c = 123
self.assertEqual('c: 123\n', str(message))
def testPrintField(self, message_module):
message = message_module.TestAllTypes()
field = message.DESCRIPTOR.fields_by_name['optional_float']
value = message.optional_float
out = text_format.TextWriter(False)
text_format.PrintField(field, value, out)
self.assertEqual('optional_float: 0.0\n', out.getvalue())
out.close()
# Test Printer
out = text_format.TextWriter(False)
printer = text_format._Printer(out)
printer.PrintField(field, value)
self.assertEqual('optional_float: 0.0\n', out.getvalue())
out.close()
def testPrintFieldValue(self, message_module):
message = message_module.TestAllTypes()
field = message.DESCRIPTOR.fields_by_name['optional_float']
value = message.optional_float
out = text_format.TextWriter(False)
text_format.PrintFieldValue(field, value, out)
self.assertEqual('0.0', out.getvalue())
out.close()
# Test Printer
out = text_format.TextWriter(False)
printer = text_format._Printer(out)
printer.PrintFieldValue(field, value)
self.assertEqual('0.0', out.getvalue())
out.close()
def testParseAllFields(self, message_module):
message = message_module.TestAllTypes()
test_util.SetAllFields(message)
ascii_text = text_format.MessageToString(message)
parsed_message = message_module.TestAllTypes()
text_format.Parse(ascii_text, parsed_message)
self.assertEqual(message, parsed_message)
if message_module is unittest_pb2:
test_util.ExpectAllFieldsSet(self, message)
def testParseExotic(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: -9223372036854775808\n'
'repeated_uint64: 18446744073709551615\n'
'repeated_double: 123.456\n'
'repeated_double: 1.23e+22\n'
'repeated_double: 1.23e-18\n'
'repeated_string: \n'
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
'repeated_string: "foo" \'corge\' "grault"\n'
'repeated_string: "\\303\\274\\352\\234\\237"\n'
'repeated_string: "\\xc3\\xbc"\n'
'repeated_string: "\xc3\xbc"\n')
text_format.Parse(text, message)
self.assertEqual(-9223372036854775808, message.repeated_int64[0])
self.assertEqual(18446744073709551615, message.repeated_uint64[0])
self.assertEqual(123.456, message.repeated_double[0])
self.assertEqual(1.23e22, message.repeated_double[1])
self.assertEqual(1.23e-18, message.repeated_double[2])
self.assertEqual('\000\001\a\b\f\n\r\t\v\\\'"', message.repeated_string[0])
self.assertEqual('foocorgegrault', message.repeated_string[1])
self.assertEqual('\u00fc\ua71f', message.repeated_string[2])
self.assertEqual('\u00fc', message.repeated_string[3])
def testParseTrailingCommas(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: 100;\n'
'repeated_int64: 200;\n'
'repeated_int64: 300,\n'
'repeated_string: "one",\n'
'repeated_string: "two";\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_int64[0])
self.assertEqual(200, message.repeated_int64[1])
self.assertEqual(300, message.repeated_int64[2])
self.assertEqual('one', message.repeated_string[0])
self.assertEqual('two', message.repeated_string[1])
def testParseRepeatedScalarShortFormat(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: [100, 200];\n'
'repeated_int64: 300,\n'
'repeated_string: ["one", "two"];\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_int64[0])
self.assertEqual(200, message.repeated_int64[1])
self.assertEqual(300, message.repeated_int64[2])
self.assertEqual('one', message.repeated_string[0])
self.assertEqual('two', message.repeated_string[1])
def testParseRepeatedMessageShortFormat(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_nested_message: [{bb: 100}, {bb: 200}],\n'
'repeated_nested_message: {bb: 300}\n'
'repeated_nested_message [{bb: 400}];\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_nested_message[0].bb)
self.assertEqual(200, message.repeated_nested_message[1].bb)
self.assertEqual(300, message.repeated_nested_message[2].bb)
self.assertEqual(400, message.repeated_nested_message[3].bb)
def testParseEmptyText(self, message_module):
message = message_module.TestAllTypes()
text = ''
text_format.Parse(text, message)
self.assertEqual(message_module.TestAllTypes(), message)
def testParseInvalidUtf8(self, message_module):
message = message_module.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
def testParseSingleWord(self, message_module):
message = message_module.TestAllTypes()
text = 'foo'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"foo".'), text_format.Parse, text, message)
def testParseUnknownField(self, message_module):
message = message_module.TestAllTypes()
text = 'unknown_field: 8\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"unknown_field".'), text_format.Parse, text, message)
def testParseBadEnumValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_nested_enum: BARR'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value named BARR.'), text_format.Parse,
text, message)
message = message_module.TestAllTypes()
text = 'optional_nested_enum: 100'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value with number 100.'), text_format.Parse,
text, message)
def testParseBadIntValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_int32: bork'
six.assertRaisesRegex(self, text_format.ParseError,
('1:17 : Couldn\'t parse integer: bork'),
text_format.Parse, text, message)
def testParseStringFieldUnescape(self, message_module):
message = message_module.TestAllTypes()
text = r'''repeated_string: "\xf\x62"
repeated_string: "\\xf\\x62"
repeated_string: "\\\xf\\\x62"
repeated_string: "\\\\xf\\\\x62"
repeated_string: "\\\\\xf\\\\\x62"
repeated_string: "\x5cx20"'''
text_format.Parse(text, message)
SLASH = '\\'
self.assertEqual('\x0fb', message.repeated_string[0])
self.assertEqual(SLASH + 'xf' + SLASH + 'x62', message.repeated_string[1])
self.assertEqual(SLASH + '\x0f' + SLASH + 'b', message.repeated_string[2])
self.assertEqual(SLASH + SLASH + 'xf' + SLASH + SLASH + 'x62',
message.repeated_string[3])
self.assertEqual(SLASH + SLASH + '\x0f' + SLASH + SLASH + 'b',
message.repeated_string[4])
self.assertEqual(SLASH + 'x20', message.repeated_string[5])
def testMergeDuplicateScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
r = text_format.Merge(text, message)
self.assertIs(r, message)
self.assertEqual(67, message.optional_int32)
def testMergeDuplicateNestedMessageScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
r = text_format.Merge(text, message)
self.assertTrue(r is message)
self.assertEqual(2, message.optional_nested_message.bb)
def testParseOneof(self, message_module):
m = message_module.TestAllTypes()
m.oneof_uint32 = 11
m2 = message_module.TestAllTypes()
text_format.Parse(text_format.MessageToString(m), m2)
self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field'))
def testParseMultipleOneof(self, message_module):
m_string = '\n'.join(['oneof_uint32: 11', 'oneof_string: "foo"'])
m2 = message_module.TestAllTypes()
if message_module is unittest_pb2:
with self.assertRaisesRegex(text_format.ParseError,
' is specified along with field '):
text_format.Parse(m_string, m2)
else:
text_format.Parse(m_string, m2)
self.assertEqual('oneof_string', m2.WhichOneof('oneof_field'))
# These are tests that aren't fundamentally specific to proto2, but are at
# the moment because of differences between the proto2 and proto3 test schemas.
# Ideally the schemas would be made more similar so these tests could pass.
class OnlyWorksWithProto2RightNowTests(TextFormatBase):
def testPrintAllFieldsPointy(self):
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(
message, pointy_brackets=True)),
'text_format_unittest_data_pointy_oneof.txt')
def testParseGolden(self):
golden_text = '\n'.join(self.ReadGolden(
'text_format_unittest_data_oneof_implemented.txt'))
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.Parse(golden_text, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testPrintAllFields(self):
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'text_format_unittest_data_oneof_implemented.txt')
def testPrintInIndexOrder(self):
message = unittest_pb2.TestFieldOrderings()
message.my_string = '115'
message.my_int = 101
message.my_float = 111
message.optional_nested_message.oo = 0
message.optional_nested_message.bb = 1
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(
message, use_index_order=True)),
'my_string: \"115\"\nmy_int: 101\nmy_float: 111\n'
'optional_nested_message {\n oo: 0\n bb: 1\n}\n')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'my_int: 101\nmy_string: \"115\"\nmy_float: 111\n'
'optional_nested_message {\n bb: 1\n oo: 0\n}\n')
def testMergeLinesGolden(self):
opened = self.ReadGolden('text_format_unittest_data_oneof_implemented.txt')
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.MergeLines(opened, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testParseLinesGolden(self):
opened = self.ReadGolden('text_format_unittest_data_oneof_implemented.txt')
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.ParseLines(opened, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testPrintMap(self):
message = map_unittest_pb2.TestMap()
message.map_int32_int32[-123] = -456
message.map_int64_int64[-2**33] = -2**34
message.map_uint32_uint32[123] = 456
message.map_uint64_uint64[2**33] = 2**34
message.map_string_string['abc'] = '123'
message.map_int32_foreign_message[111].c = 5
# Maps are serialized to text format using their underlying repeated
# representation.
self.CompareToGoldenText(
text_format.MessageToString(message), 'map_int32_int32 {\n'
' key: -123\n'
' value: -456\n'
'}\n'
'map_int64_int64 {\n'
' key: -8589934592\n'
' value: -17179869184\n'
'}\n'
'map_uint32_uint32 {\n'
' key: 123\n'
' value: 456\n'
'}\n'
'map_uint64_uint64 {\n'
' key: 8589934592\n'
' value: 17179869184\n'
'}\n'
'map_string_string {\n'
' key: "abc"\n'
' value: "123"\n'
'}\n'
'map_int32_foreign_message {\n'
' key: 111\n'
' value {\n'
' c: 5\n'
' }\n'
'}\n')
def testMapOrderEnforcement(self):
message = map_unittest_pb2.TestMap()
for letter in string.ascii_uppercase[13:26]:
message.map_string_string[letter] = 'dummy'
for letter in reversed(string.ascii_uppercase[0:13]):
message.map_string_string[letter] = 'dummy'
golden = ''.join(('map_string_string {\n key: "%c"\n value: "dummy"\n}\n'
% (letter,) for letter in string.ascii_uppercase))
self.CompareToGoldenText(text_format.MessageToString(message), golden)
def testMapOrderSemantics(self):
golden_lines = self.ReadGolden('map_test_data.txt')
# The C++ implementation emits defaulted-value fields, while the Python
# implementation does not. Adjusting for this is awkward, but it is
# valuable to test against a common golden file.
line_blacklist = (' key: 0\n', ' value: 0\n', ' key: false\n',
' value: false\n')
golden_lines = [line for line in golden_lines if line not in line_blacklist]
message = map_unittest_pb2.TestMap()
text_format.ParseLines(golden_lines, message)
candidate = text_format.MessageToString(message)
# The Python implementation emits "1.0" for the double value that the C++
# implementation emits as "1".
candidate = candidate.replace('1.0', '1', 2)
self.assertMultiLineEqual(candidate, ''.join(golden_lines))
# Tests of proto2-only features (MessageSet, extensions, etc.).
class Proto2Tests(TextFormatBase):
def testPrintMessageSet(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
self.CompareToGoldenText(
text_format.MessageToString(message), 'message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
message = message_set_extensions_pb2.TestMessageSet()
ext = message_set_extensions_pb2.message_set_extension3
message.Extensions[ext].text = 'bar'
self.CompareToGoldenText(
text_format.MessageToString(message),
'[google.protobuf.internal.TestMessageSetExtension3] {\n'
' text: \"bar\"\n'
'}\n')
def testPrintMessageSetByFieldNumber(self):
out = text_format.TextWriter(False)
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
text_format.PrintMessage(message, out, use_field_number=True)
self.CompareToGoldenText(out.getvalue(), '1 {\n'
' 1545008 {\n'
' 15: 23\n'
' }\n'
' 1547769 {\n'
' 25: \"foo\"\n'
' }\n'
'}\n')
out.close()
def testPrintMessageSetAsOneLine(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'message_set {'
' [protobuf_unittest.TestMessageSetExtension1] {'
' i: 23'
' }'
' [protobuf_unittest.TestMessageSetExtension2] {'
' str: \"foo\"'
' }'
' }')
def testParseMessageSet(self):
message = unittest_pb2.TestAllTypes()
text = ('repeated_uint64: 1\n' 'repeated_uint64: 2\n')
text_format.Parse(text, message)
self.assertEqual(1, message.repeated_uint64[0])
self.assertEqual(2, message.repeated_uint64[1])
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
def testParseMessageByFieldNumber(self):
message = unittest_pb2.TestAllTypes()
text = ('34: 1\n' 'repeated_uint64: 2\n')
text_format.Parse(text, message, allow_field_number=True)
self.assertEqual(1, message.repeated_uint64[0])
self.assertEqual(2, message.repeated_uint64[1])
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('1 {\n'
' 1545008 {\n'
' 15: 23\n'
' }\n'
' 1547769 {\n'
' 25: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message, allow_field_number=True)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
# Can't parse field number without set allow_field_number=True.
message = unittest_pb2.TestAllTypes()
text = '34:1\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"34".'), text_format.Parse, text, message)
# Can't parse if field number is not found.
text = '1234:1\n'
six.assertRaisesRegex(
self,
text_format.ParseError,
(r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"1234".'),
text_format.Parse,
text,
message,
allow_field_number=True)
def testPrintAllExtensions(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'text_format_unittest_extensions_data.txt')
def testPrintAllExtensionsPointy(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(
message, pointy_brackets=True)),
'text_format_unittest_extensions_data_pointy.txt')
def testParseGoldenExtensions(self):
golden_text = '\n'.join(self.ReadGolden(
'text_format_unittest_extensions_data.txt'))
parsed_message = unittest_pb2.TestAllExtensions()
text_format.Parse(golden_text, parsed_message)
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.assertEqual(message, parsed_message)
def testParseAllExtensions(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
ascii_text = text_format.MessageToString(message)
parsed_message = unittest_pb2.TestAllExtensions()
text_format.Parse(ascii_text, parsed_message)
self.assertEqual(message, parsed_message)
def testParseAllowedUnknownExtension(self):
# Skip over unknown extension correctly.
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [unknown_extension] {\n'
' i: 23\n'
' bin: "\xe0"'
' [nested_unknown_ext]: {\n'
' i: 23\n'
' test: "test_string"\n'
' floaty_float: -0.315\n'
' num: -inf\n'
' multiline_str: "abc"\n'
' "def"\n'
' "xyz."\n'
' [nested_unknown_ext]: <\n'
' i: 23\n'
' i: 24\n'
' pointfloat: .3\n'
' test: "test_string"\n'
' floaty_float: -0.315\n'
' num: -inf\n'
' long_string: "test" "test2" \n'
' >\n'
' }\n'
' }\n'
' [unknown_extension]: 5\n'
'}\n')
text_format.Parse(text, message, allow_unknown_extension=True)
golden = 'message_set {\n}\n'
self.CompareToGoldenText(text_format.MessageToString(message), golden)
# Catch parse errors in unknown extension.
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' i:\n' # Missing value.
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: }',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' str: "malformed string\n' # Missing closing quote.
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: "',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' str: "malformed\n multiline\n string\n'
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: "',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [malformed_extension] <\n'
' i: -5\n'
' \n' # Missing '>' here.
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'5:1 : Expected ">".',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
# Don't allow unknown fields with allow_unknown_extension=True.
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' unknown_field: true\n'
' \n' # Missing '>' here.
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
('2:3 : Message type '
'"proto2_wireformat_unittest.TestMessageSet" has no'
' field named "unknown_field".'),
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
# Parse known extension correcty.
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message, allow_unknown_extension=True)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
def testParseBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
six.assertRaisesRegex(self, text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
six.assertRaisesRegex(self, text_format.ParseError, (
'1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'), text_format.Parse, text, message)
def testMergeDuplicateExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
text_format.Merge(text, message)
self.assertEqual(67,
message.Extensions[unittest_pb2.optional_int32_extension])
def testParseDuplicateExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:96 : Message type "protobuf_unittest.TestAllExtensions" '
'should not have multiple '
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateNestedMessageScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
'should not have multiple "bb" fields.'), text_format.Parse, text,
message)
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
def testParseGroupNotClosed(self):
message = unittest_pb2.TestAllTypes()
text = 'RepeatedGroup: <'
six.assertRaisesRegex(self, text_format.ParseError, '1:16 : Expected ">".',
text_format.Parse, text, message)
text = 'RepeatedGroup: {'
six.assertRaisesRegex(self, text_format.ParseError, '1:16 : Expected "}".',
text_format.Parse, text, message)
def testParseEmptyGroup(self):
message = unittest_pb2.TestAllTypes()
text = 'OptionalGroup: {}'
text_format.Parse(text, message)
self.assertTrue(message.HasField('optionalgroup'))
message.Clear()
message = unittest_pb2.TestAllTypes()
text = 'OptionalGroup: <>'
text_format.Parse(text, message)
self.assertTrue(message.HasField('optionalgroup'))
# Maps aren't really proto2-only, but our test schema only has maps for
# proto2.
def testParseMap(self):
text = ('map_int32_int32 {\n'
' key: -123\n'
' value: -456\n'
'}\n'
'map_int64_int64 {\n'
' key: -8589934592\n'
' value: -17179869184\n'
'}\n'
'map_uint32_uint32 {\n'
' key: 123\n'
' value: 456\n'
'}\n'
'map_uint64_uint64 {\n'
' key: 8589934592\n'
' value: 17179869184\n'
'}\n'
'map_string_string {\n'
' key: "abc"\n'
' value: "123"\n'
'}\n'
'map_int32_foreign_message {\n'
' key: 111\n'
' value {\n'
' c: 5\n'
' }\n'
'}\n')
message = map_unittest_pb2.TestMap()
text_format.Parse(text, message)
self.assertEqual(-456, message.map_int32_int32[-123])
self.assertEqual(-2**34, message.map_int64_int64[-2**33])
self.assertEqual(456, message.map_uint32_uint32[123])
self.assertEqual(2**34, message.map_uint64_uint64[2**33])
self.assertEqual('123', message.map_string_string['abc'])
self.assertEqual(5, message.map_int32_foreign_message[111].c)
class Proto3Tests(unittest.TestCase):
def testPrintMessageExpandAny(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
descriptor_pool=descriptor_pool.Default()),
'any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
def testPrintMessageExpandAnyRepeated(self):
packed_message = unittest_pb2.OneString()
message = any_test_pb2.TestAny()
packed_message.data = 'string0'
message.repeated_any_value.add().Pack(packed_message)
packed_message.data = 'string1'
message.repeated_any_value.add().Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
descriptor_pool=descriptor_pool.Default()),
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string0"\n'
' }\n'
'}\n'
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string1"\n'
' }\n'
'}\n')
def testPrintMessageExpandAnyNoDescriptorPool(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message, descriptor_pool=None),
'any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
def testPrintMessageExpandAnyDescriptorPoolMissingType(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
empty_pool = descriptor_pool.DescriptorPool()
self.assertEqual(
text_format.MessageToString(message, descriptor_pool=empty_pool),
'any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
def testPrintMessageExpandAnyPointyBrackets(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
pointy_brackets=True,
descriptor_pool=descriptor_pool.Default()),
'any_value <\n'
' [type.googleapis.com/protobuf_unittest.OneString] <\n'
' data: "string"\n'
' >\n'
'>\n')
def testPrintMessageExpandAnyAsOneLine(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
as_one_line=True,
descriptor_pool=descriptor_pool.Default()),
'any_value {'
' [type.googleapis.com/protobuf_unittest.OneString]'
' { data: "string" } '
'}')
def testPrintMessageExpandAnyAsOneLinePointyBrackets(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
as_one_line=True,
pointy_brackets=True,
descriptor_pool=descriptor_pool.Default()),
'any_value <'
' [type.googleapis.com/protobuf_unittest.OneString]'
' < data: "string" > '
'>')
def testMergeExpandedAny(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyRepeated(self):
message = any_test_pb2.TestAny()
text = ('repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string0"\n'
' }\n'
'}\n'
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string1"\n'
' }\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.repeated_any_value[0].Unpack(packed_message)
self.assertEqual('string0', packed_message.data)
message.repeated_any_value[1].Unpack(packed_message)
self.assertEqual('string1', packed_message.data)
def testMergeExpandedAnyPointyBrackets(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] <\n'
' data: "string"\n'
' >\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyNoDescriptorPool(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
text_format.Merge(text, message, descriptor_pool=None)
self.assertEqual(str(e.exception),
'Descriptor pool required to parse expanded Any field')
def testMergeExpandedAnyDescriptorPoolMissingType(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
empty_pool = descriptor_pool.DescriptorPool()
text_format.Merge(text, message, descriptor_pool=empty_pool)
self.assertEqual(
str(e.exception),
'Type protobuf_unittest.OneString not found in descriptor pool')
def testMergeUnexpandedAny(self):
text = ('any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
message = any_test_pb2.TestAny()
text_format.Merge(text, message)
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
class TokenizerTest(unittest.TestCase):
def testSimpleTokenCases(self):
text = ('identifier1:"string1"\n \n\n'
'identifier2 : \n \n123 \n identifier3 :\'string\'\n'
'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n'
'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
'ID9: 22 ID10: -111111111111111111 ID11: -22\n'
'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f '
'false_bool: 0 true_BOOL:t \n true_bool1: 1 false_BOOL1:f ')
tokenizer = text_format.Tokenizer(text.splitlines())
methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':',
(tokenizer.ConsumeString, 'string1'),
(tokenizer.ConsumeIdentifier, 'identifier2'), ':',
(tokenizer.ConsumeInteger, 123),
(tokenizer.ConsumeIdentifier, 'identifier3'), ':',
(tokenizer.ConsumeString, 'string'),
(tokenizer.ConsumeIdentifier, 'identifiER_4'), ':',
(tokenizer.ConsumeFloat, 1.1e+2),
(tokenizer.ConsumeIdentifier, 'ID5'), ':',
(tokenizer.ConsumeFloat, -0.23),
(tokenizer.ConsumeIdentifier, 'ID6'), ':',
(tokenizer.ConsumeString, 'aaaa\'bbbb'),
(tokenizer.ConsumeIdentifier, 'ID7'), ':',
(tokenizer.ConsumeString, 'aa\"bb'),
(tokenizer.ConsumeIdentifier, 'ID8'), ':', '{',
(tokenizer.ConsumeIdentifier, 'A'), ':',
(tokenizer.ConsumeFloat, float('inf')),
(tokenizer.ConsumeIdentifier, 'B'), ':',
(tokenizer.ConsumeFloat, -float('inf')),
(tokenizer.ConsumeIdentifier, 'C'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'D'), ':',
(tokenizer.ConsumeBool, False), '}',
(tokenizer.ConsumeIdentifier, 'ID9'), ':',
(tokenizer.ConsumeInteger, 22),
(tokenizer.ConsumeIdentifier, 'ID10'), ':',
(tokenizer.ConsumeInteger, -111111111111111111),
(tokenizer.ConsumeIdentifier, 'ID11'), ':',
(tokenizer.ConsumeInteger, -22),
(tokenizer.ConsumeIdentifier, 'ID12'), ':',
(tokenizer.ConsumeInteger, 2222222222222222222),
(tokenizer.ConsumeIdentifier, 'ID13'), ':',
(tokenizer.ConsumeFloat, 1.23456),
(tokenizer.ConsumeIdentifier, 'ID14'), ':',
(tokenizer.ConsumeFloat, 1.2e+2),
(tokenizer.ConsumeIdentifier, 'false_bool'), ':',
(tokenizer.ConsumeBool, False),
(tokenizer.ConsumeIdentifier, 'true_BOOL'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'true_bool1'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'false_BOOL1'), ':',
(tokenizer.ConsumeBool, False)]
i = 0
while not tokenizer.AtEnd():
m = methods[i]
if isinstance(m, str):
token = tokenizer.token
self.assertEqual(token, m)
tokenizer.NextToken()
else:
self.assertEqual(m[1], m[0]())
i += 1
def testConsumeAbstractIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(-1, tokenizer.ConsumeInteger())
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())
self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
text = '-0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
def testConsumeIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint64, tokenizer)
self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt32, tokenizer)
self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt64, tokenizer)
self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
text = '-0 -0 0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
def testSkipComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines())
self.assertTrue(tokenizer.AtEnd())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
def testConsumeComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
skip_comments=False)
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
def testConsumeTwoComments(self):
text = '# some comment\n# another comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# another comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
def testConsumeTrailingComment(self):
text = 'some_number: 4\n# some comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
if __name__ == '__main__':
unittest.main()
| 41.405147 | 80 | 0.649447 |
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = 'kenton@google.com (Kenton Varda)'
import re
import six
import string
try:
import unittest2 as unittest # PY26, pylint: disable=g-import-not-at-top
except ImportError:
import unittest # pylint: disable=g-import-not-at-top
from google.protobuf.internal import _parameterized
from google.protobuf import any_test_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import test_util
from google.protobuf.internal import message_set_extensions_pb2
from google.protobuf import descriptor_pool
from google.protobuf import text_format
# Low-level nuts-n-bolts tests.
class SimpleTextFormatTests(unittest.TestCase):
# The members of _QUOTES are formatted into a regexp template that
# expects single characters. Therefore it's an error (in addition to being
def testQuoteMarksAreSingleChars(self):
for quote in text_format._QUOTES:
self.assertEqual(1, len(quote))
class TextFormatBase(unittest.TestCase):
def ReadGolden(self, golden_filename):
with test_util.GoldenFile(golden_filename) as f:
return (f.readlines() if str is bytes else
[golden_line.decode('utf-8') for golden_line in f])
def CompareToGoldenFile(self, text, golden_filename):
golden_lines = self.ReadGolden(golden_filename)
self.assertMultiLineEqual(text, ''.join(golden_lines))
def CompareToGoldenText(self, text, golden_text):
self.assertEqual(text, golden_text)
def RemoveRedundantZeros(self, text):
text = text.replace('e+0','e+').replace('e+0','e+') \
.replace('e-0','e-').replace('e-0','e-')
text = re.compile(r'\.0$', re.MULTILINE).sub('', text)
return text
@_parameterized.Parameters((unittest_pb2), (unittest_proto3_arena_pb2))
class TextFormatTest(TextFormatBase):
def testPrintExotic(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'repeated_int64: -9223372036854775808\n'
'repeated_uint64: 18446744073709551615\n'
'repeated_double: 123.456\n'
'repeated_double: 1.23e+22\n'
'repeated_double: 1.23e-18\n'
'repeated_string:'
' "\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
'repeated_string: "\\303\\274\\352\\234\\237"\n')
def testPrintExoticUnicodeSubclass(self, message_module):
class UnicodeSub(six.text_type):
pass
message = message_module.TestAllTypes()
message.repeated_string.append(UnicodeSub('\u00fc\ua71f'))
self.CompareToGoldenText(
text_format.MessageToString(message),
'repeated_string: "\\303\\274\\352\\234\\237"\n')
def testPrintNestedMessageAsOneLine(self, message_module):
message = message_module.TestAllTypes()
msg = message.repeated_nested_message.add()
msg.bb = 42
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'repeated_nested_message { bb: 42 }')
def testPrintRepeatedFieldsAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int32.append(1)
message.repeated_int32.append(1)
message.repeated_int32.append(3)
message.repeated_string.append('Google')
message.repeated_string.append('Zurich')
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'repeated_int32: 1 repeated_int32: 1 repeated_int32: 3 '
'repeated_string: "Google" repeated_string: "Zurich"')
def testPrintNestedNewLineInStringAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.optional_string = 'a\nnew\nline'
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'optional_string: "a\\nnew\\nline"')
def testPrintExoticAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(
message, as_one_line=True)),
'repeated_int64: -9223372036854775808'
' repeated_uint64: 18446744073709551615'
' repeated_double: 123.456'
' repeated_double: 1.23e+22'
' repeated_double: 1.23e-18'
' repeated_string: '
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""'
' repeated_string: "\\303\\274\\352\\234\\237"')
def testRoundTripExoticAsOneLine(self, message_module):
message = message_module.TestAllTypes()
message.repeated_int64.append(-9223372036854775808)
message.repeated_uint64.append(18446744073709551615)
message.repeated_double.append(123.456)
message.repeated_double.append(1.23e22)
message.repeated_double.append(1.23e-18)
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
message.repeated_string.append('\u00fc\ua71f')
# Test as_utf8 = False.
wire_text = text_format.MessageToString(message,
as_one_line=True,
as_utf8=False)
parsed_message = message_module.TestAllTypes()
r = text_format.Parse(wire_text, parsed_message)
self.assertIs(r, parsed_message)
self.assertEqual(message, parsed_message)
# Test as_utf8 = True.
wire_text = text_format.MessageToString(message,
as_one_line=True,
as_utf8=True)
parsed_message = message_module.TestAllTypes()
r = text_format.Parse(wire_text, parsed_message)
self.assertIs(r, parsed_message)
self.assertEqual(message, parsed_message,
'\n%s != %s' % (message, parsed_message))
def testPrintRawUtf8String(self, message_module):
message = message_module.TestAllTypes()
message.repeated_string.append('\u00fc\ua71f')
text = text_format.MessageToString(message, as_utf8=True)
self.CompareToGoldenText(text, 'repeated_string: "\303\274\352\234\237"\n')
parsed_message = message_module.TestAllTypes()
text_format.Parse(text, parsed_message)
self.assertEqual(message, parsed_message,
'\n%s != %s' % (message, parsed_message))
def testPrintFloatFormat(self, message_module):
# Check that float_format argument is passed to sub-message formatting.
message = message_module.NestedTestAllTypes()
# We use 1.25 as it is a round number in binary. The proto 32-bit float
# will not gain additional imprecise digits as a 64-bit Python float and
# show up in its str. 32-bit 1.2 is noisy when extended to 64-bit:
# >>> struct.unpack('f', struct.pack('f', 1.2))[0]
# 1.2000000476837158
# >>> struct.unpack('f', struct.pack('f', 1.25))[0]
# 1.25
message.payload.optional_float = 1.25
# Check rounding at 15 significant digits
message.payload.optional_double = -.000003456789012345678
# Check no decimal point.
message.payload.repeated_float.append(-5642)
# Check no trailing zeros.
message.payload.repeated_double.append(.000078900)
formatted_fields = ['optional_float: 1.25',
'optional_double: -3.45678901234568e-6',
'repeated_float: -5642', 'repeated_double: 7.89e-5']
text_message = text_format.MessageToString(message, float_format='.15g')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_message),
'payload {{\n {0}\n {1}\n {2}\n {3}\n}}\n'.format(
*formatted_fields))
# as_one_line=True is a separate code branch where float_format is passed.
text_message = text_format.MessageToString(message,
as_one_line=True,
float_format='.15g')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_message),
'payload {{ {0} {1} {2} {3} }}'.format(*formatted_fields))
def testMessageToString(self, message_module):
message = message_module.ForeignMessage()
message.c = 123
self.assertEqual('c: 123\n', str(message))
def testPrintField(self, message_module):
message = message_module.TestAllTypes()
field = message.DESCRIPTOR.fields_by_name['optional_float']
value = message.optional_float
out = text_format.TextWriter(False)
text_format.PrintField(field, value, out)
self.assertEqual('optional_float: 0.0\n', out.getvalue())
out.close()
# Test Printer
out = text_format.TextWriter(False)
printer = text_format._Printer(out)
printer.PrintField(field, value)
self.assertEqual('optional_float: 0.0\n', out.getvalue())
out.close()
def testPrintFieldValue(self, message_module):
message = message_module.TestAllTypes()
field = message.DESCRIPTOR.fields_by_name['optional_float']
value = message.optional_float
out = text_format.TextWriter(False)
text_format.PrintFieldValue(field, value, out)
self.assertEqual('0.0', out.getvalue())
out.close()
# Test Printer
out = text_format.TextWriter(False)
printer = text_format._Printer(out)
printer.PrintFieldValue(field, value)
self.assertEqual('0.0', out.getvalue())
out.close()
def testParseAllFields(self, message_module):
message = message_module.TestAllTypes()
test_util.SetAllFields(message)
ascii_text = text_format.MessageToString(message)
parsed_message = message_module.TestAllTypes()
text_format.Parse(ascii_text, parsed_message)
self.assertEqual(message, parsed_message)
if message_module is unittest_pb2:
test_util.ExpectAllFieldsSet(self, message)
def testParseExotic(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: -9223372036854775808\n'
'repeated_uint64: 18446744073709551615\n'
'repeated_double: 123.456\n'
'repeated_double: 1.23e+22\n'
'repeated_double: 1.23e-18\n'
'repeated_string: \n'
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
'repeated_string: "foo" \'corge\' "grault"\n'
'repeated_string: "\\303\\274\\352\\234\\237"\n'
'repeated_string: "\\xc3\\xbc"\n'
'repeated_string: "\xc3\xbc"\n')
text_format.Parse(text, message)
self.assertEqual(-9223372036854775808, message.repeated_int64[0])
self.assertEqual(18446744073709551615, message.repeated_uint64[0])
self.assertEqual(123.456, message.repeated_double[0])
self.assertEqual(1.23e22, message.repeated_double[1])
self.assertEqual(1.23e-18, message.repeated_double[2])
self.assertEqual('\000\001\a\b\f\n\r\t\v\\\'"', message.repeated_string[0])
self.assertEqual('foocorgegrault', message.repeated_string[1])
self.assertEqual('\u00fc\ua71f', message.repeated_string[2])
self.assertEqual('\u00fc', message.repeated_string[3])
def testParseTrailingCommas(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: 100;\n'
'repeated_int64: 200;\n'
'repeated_int64: 300,\n'
'repeated_string: "one",\n'
'repeated_string: "two";\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_int64[0])
self.assertEqual(200, message.repeated_int64[1])
self.assertEqual(300, message.repeated_int64[2])
self.assertEqual('one', message.repeated_string[0])
self.assertEqual('two', message.repeated_string[1])
def testParseRepeatedScalarShortFormat(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: [100, 200];\n'
'repeated_int64: 300,\n'
'repeated_string: ["one", "two"];\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_int64[0])
self.assertEqual(200, message.repeated_int64[1])
self.assertEqual(300, message.repeated_int64[2])
self.assertEqual('one', message.repeated_string[0])
self.assertEqual('two', message.repeated_string[1])
def testParseRepeatedMessageShortFormat(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_nested_message: [{bb: 100}, {bb: 200}],\n'
'repeated_nested_message: {bb: 300}\n'
'repeated_nested_message [{bb: 400}];\n')
text_format.Parse(text, message)
self.assertEqual(100, message.repeated_nested_message[0].bb)
self.assertEqual(200, message.repeated_nested_message[1].bb)
self.assertEqual(300, message.repeated_nested_message[2].bb)
self.assertEqual(400, message.repeated_nested_message[3].bb)
def testParseEmptyText(self, message_module):
message = message_module.TestAllTypes()
text = ''
text_format.Parse(text, message)
self.assertEqual(message_module.TestAllTypes(), message)
def testParseInvalidUtf8(self, message_module):
message = message_module.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
def testParseSingleWord(self, message_module):
message = message_module.TestAllTypes()
text = 'foo'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"foo".'), text_format.Parse, text, message)
def testParseUnknownField(self, message_module):
message = message_module.TestAllTypes()
text = 'unknown_field: 8\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"unknown_field".'), text_format.Parse, text, message)
def testParseBadEnumValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_nested_enum: BARR'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value named BARR.'), text_format.Parse,
text, message)
message = message_module.TestAllTypes()
text = 'optional_nested_enum: 100'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value with number 100.'), text_format.Parse,
text, message)
def testParseBadIntValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_int32: bork'
six.assertRaisesRegex(self, text_format.ParseError,
('1:17 : Couldn\'t parse integer: bork'),
text_format.Parse, text, message)
def testParseStringFieldUnescape(self, message_module):
message = message_module.TestAllTypes()
text = r'''repeated_string: "\xf\x62"
repeated_string: "\\xf\\x62"
repeated_string: "\\\xf\\\x62"
repeated_string: "\\\\xf\\\\x62"
repeated_string: "\\\\\xf\\\\\x62"
repeated_string: "\x5cx20"'''
text_format.Parse(text, message)
SLASH = '\\'
self.assertEqual('\x0fb', message.repeated_string[0])
self.assertEqual(SLASH + 'xf' + SLASH + 'x62', message.repeated_string[1])
self.assertEqual(SLASH + '\x0f' + SLASH + 'b', message.repeated_string[2])
self.assertEqual(SLASH + SLASH + 'xf' + SLASH + SLASH + 'x62',
message.repeated_string[3])
self.assertEqual(SLASH + SLASH + '\x0f' + SLASH + SLASH + 'b',
message.repeated_string[4])
self.assertEqual(SLASH + 'x20', message.repeated_string[5])
def testMergeDuplicateScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
r = text_format.Merge(text, message)
self.assertIs(r, message)
self.assertEqual(67, message.optional_int32)
def testMergeDuplicateNestedMessageScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
r = text_format.Merge(text, message)
self.assertTrue(r is message)
self.assertEqual(2, message.optional_nested_message.bb)
def testParseOneof(self, message_module):
m = message_module.TestAllTypes()
m.oneof_uint32 = 11
m2 = message_module.TestAllTypes()
text_format.Parse(text_format.MessageToString(m), m2)
self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field'))
def testParseMultipleOneof(self, message_module):
m_string = '\n'.join(['oneof_uint32: 11', 'oneof_string: "foo"'])
m2 = message_module.TestAllTypes()
if message_module is unittest_pb2:
with self.assertRaisesRegex(text_format.ParseError,
' is specified along with field '):
text_format.Parse(m_string, m2)
else:
text_format.Parse(m_string, m2)
self.assertEqual('oneof_string', m2.WhichOneof('oneof_field'))
# These are tests that aren't fundamentally specific to proto2, but are at
# the moment because of differences between the proto2 and proto3 test schemas.
# Ideally the schemas would be made more similar so these tests could pass.
class OnlyWorksWithProto2RightNowTests(TextFormatBase):
def testPrintAllFieldsPointy(self):
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(
message, pointy_brackets=True)),
'text_format_unittest_data_pointy_oneof.txt')
def testParseGolden(self):
golden_text = '\n'.join(self.ReadGolden(
'text_format_unittest_data_oneof_implemented.txt'))
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.Parse(golden_text, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testPrintAllFields(self):
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'text_format_unittest_data_oneof_implemented.txt')
def testPrintInIndexOrder(self):
message = unittest_pb2.TestFieldOrderings()
message.my_string = '115'
message.my_int = 101
message.my_float = 111
message.optional_nested_message.oo = 0
message.optional_nested_message.bb = 1
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(
message, use_index_order=True)),
'my_string: \"115\"\nmy_int: 101\nmy_float: 111\n'
'optional_nested_message {\n oo: 0\n bb: 1\n}\n')
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'my_int: 101\nmy_string: \"115\"\nmy_float: 111\n'
'optional_nested_message {\n bb: 1\n oo: 0\n}\n')
def testMergeLinesGolden(self):
opened = self.ReadGolden('text_format_unittest_data_oneof_implemented.txt')
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.MergeLines(opened, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testParseLinesGolden(self):
opened = self.ReadGolden('text_format_unittest_data_oneof_implemented.txt')
parsed_message = unittest_pb2.TestAllTypes()
r = text_format.ParseLines(opened, parsed_message)
self.assertIs(r, parsed_message)
message = unittest_pb2.TestAllTypes()
test_util.SetAllFields(message)
self.assertEqual(message, parsed_message)
def testPrintMap(self):
message = map_unittest_pb2.TestMap()
message.map_int32_int32[-123] = -456
message.map_int64_int64[-2**33] = -2**34
message.map_uint32_uint32[123] = 456
message.map_uint64_uint64[2**33] = 2**34
message.map_string_string['abc'] = '123'
message.map_int32_foreign_message[111].c = 5
# Maps are serialized to text format using their underlying repeated
# representation.
self.CompareToGoldenText(
text_format.MessageToString(message), 'map_int32_int32 {\n'
' key: -123\n'
' value: -456\n'
'}\n'
'map_int64_int64 {\n'
' key: -8589934592\n'
' value: -17179869184\n'
'}\n'
'map_uint32_uint32 {\n'
' key: 123\n'
' value: 456\n'
'}\n'
'map_uint64_uint64 {\n'
' key: 8589934592\n'
' value: 17179869184\n'
'}\n'
'map_string_string {\n'
' key: "abc"\n'
' value: "123"\n'
'}\n'
'map_int32_foreign_message {\n'
' key: 111\n'
' value {\n'
' c: 5\n'
' }\n'
'}\n')
def testMapOrderEnforcement(self):
message = map_unittest_pb2.TestMap()
for letter in string.ascii_uppercase[13:26]:
message.map_string_string[letter] = 'dummy'
for letter in reversed(string.ascii_uppercase[0:13]):
message.map_string_string[letter] = 'dummy'
golden = ''.join(('map_string_string {\n key: "%c"\n value: "dummy"\n}\n'
% (letter,) for letter in string.ascii_uppercase))
self.CompareToGoldenText(text_format.MessageToString(message), golden)
def testMapOrderSemantics(self):
golden_lines = self.ReadGolden('map_test_data.txt')
# The C++ implementation emits defaulted-value fields, while the Python
# implementation does not. Adjusting for this is awkward, but it is
# valuable to test against a common golden file.
line_blacklist = (' key: 0\n', ' value: 0\n', ' key: false\n',
' value: false\n')
golden_lines = [line for line in golden_lines if line not in line_blacklist]
message = map_unittest_pb2.TestMap()
text_format.ParseLines(golden_lines, message)
candidate = text_format.MessageToString(message)
# The Python implementation emits "1.0" for the double value that the C++
# implementation emits as "1".
candidate = candidate.replace('1.0', '1', 2)
self.assertMultiLineEqual(candidate, ''.join(golden_lines))
# Tests of proto2-only features (MessageSet, extensions, etc.).
class Proto2Tests(TextFormatBase):
def testPrintMessageSet(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
self.CompareToGoldenText(
text_format.MessageToString(message), 'message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
message = message_set_extensions_pb2.TestMessageSet()
ext = message_set_extensions_pb2.message_set_extension3
message.Extensions[ext].text = 'bar'
self.CompareToGoldenText(
text_format.MessageToString(message),
'[google.protobuf.internal.TestMessageSetExtension3] {\n'
' text: \"bar\"\n'
'}\n')
def testPrintMessageSetByFieldNumber(self):
out = text_format.TextWriter(False)
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
text_format.PrintMessage(message, out, use_field_number=True)
self.CompareToGoldenText(out.getvalue(), '1 {\n'
' 1545008 {\n'
' 15: 23\n'
' }\n'
' 1547769 {\n'
' 25: \"foo\"\n'
' }\n'
'}\n')
out.close()
def testPrintMessageSetAsOneLine(self):
message = unittest_mset_pb2.TestMessageSetContainer()
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
message.message_set.Extensions[ext1].i = 23
message.message_set.Extensions[ext2].str = 'foo'
self.CompareToGoldenText(
text_format.MessageToString(message, as_one_line=True),
'message_set {'
' [protobuf_unittest.TestMessageSetExtension1] {'
' i: 23'
' }'
' [protobuf_unittest.TestMessageSetExtension2] {'
' str: \"foo\"'
' }'
' }')
def testParseMessageSet(self):
message = unittest_pb2.TestAllTypes()
text = ('repeated_uint64: 1\n' 'repeated_uint64: 2\n')
text_format.Parse(text, message)
self.assertEqual(1, message.repeated_uint64[0])
self.assertEqual(2, message.repeated_uint64[1])
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
def testParseMessageByFieldNumber(self):
message = unittest_pb2.TestAllTypes()
text = ('34: 1\n' 'repeated_uint64: 2\n')
text_format.Parse(text, message, allow_field_number=True)
self.assertEqual(1, message.repeated_uint64[0])
self.assertEqual(2, message.repeated_uint64[1])
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('1 {\n'
' 1545008 {\n'
' 15: 23\n'
' }\n'
' 1547769 {\n'
' 25: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message, allow_field_number=True)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
# Can't parse field number without set allow_field_number=True.
message = unittest_pb2.TestAllTypes()
text = '34:1\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"34".'), text_format.Parse, text, message)
# Can't parse if field number is not found.
text = '1234:1\n'
six.assertRaisesRegex(
self,
text_format.ParseError,
(r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"1234".'),
text_format.Parse,
text,
message,
allow_field_number=True)
def testPrintAllExtensions(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
'text_format_unittest_extensions_data.txt')
def testPrintAllExtensionsPointy(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.CompareToGoldenFile(
self.RemoveRedundantZeros(text_format.MessageToString(
message, pointy_brackets=True)),
'text_format_unittest_extensions_data_pointy.txt')
def testParseGoldenExtensions(self):
golden_text = '\n'.join(self.ReadGolden(
'text_format_unittest_extensions_data.txt'))
parsed_message = unittest_pb2.TestAllExtensions()
text_format.Parse(golden_text, parsed_message)
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
self.assertEqual(message, parsed_message)
def testParseAllExtensions(self):
message = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(message)
ascii_text = text_format.MessageToString(message)
parsed_message = unittest_pb2.TestAllExtensions()
text_format.Parse(ascii_text, parsed_message)
self.assertEqual(message, parsed_message)
def testParseAllowedUnknownExtension(self):
# Skip over unknown extension correctly.
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [unknown_extension] {\n'
' i: 23\n'
' bin: "\xe0"'
' [nested_unknown_ext]: {\n'
' i: 23\n'
' test: "test_string"\n'
' floaty_float: -0.315\n'
' num: -inf\n'
' multiline_str: "abc"\n'
' "def"\n'
' "xyz."\n'
' [nested_unknown_ext]: <\n'
' i: 23\n'
' i: 24\n'
' pointfloat: .3\n'
' test: "test_string"\n'
' floaty_float: -0.315\n'
' num: -inf\n'
' long_string: "test" "test2" \n'
' >\n'
' }\n'
' }\n'
' [unknown_extension]: 5\n'
'}\n')
text_format.Parse(text, message, allow_unknown_extension=True)
golden = 'message_set {\n}\n'
self.CompareToGoldenText(text_format.MessageToString(message), golden)
# Catch parse errors in unknown extension.
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' i:\n' # Missing value.
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: }',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' str: "malformed string\n' # Missing closing quote.
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: "',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [unknown_extension] {\n'
' str: "malformed\n multiline\n string\n'
' }\n'
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'Invalid field value: "',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' [malformed_extension] <\n'
' i: -5\n'
' \n' # Missing '>' here.
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
'5:1 : Expected ">".',
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
# Don't allow unknown fields with allow_unknown_extension=True.
message = unittest_mset_pb2.TestMessageSetContainer()
malformed = ('message_set {\n'
' unknown_field: true\n'
' \n' # Missing '>' here.
'}\n')
six.assertRaisesRegex(self,
text_format.ParseError,
('2:3 : Message type '
'"proto2_wireformat_unittest.TestMessageSet" has no'
' field named "unknown_field".'),
text_format.Parse,
malformed,
message,
allow_unknown_extension=True)
# Parse known extension correcty.
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('message_set {\n'
' [protobuf_unittest.TestMessageSetExtension1] {\n'
' i: 23\n'
' }\n'
' [protobuf_unittest.TestMessageSetExtension2] {\n'
' str: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message, allow_unknown_extension=True)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
def testParseBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
six.assertRaisesRegex(self, text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
six.assertRaisesRegex(self, text_format.ParseError, (
'1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'), text_format.Parse, text, message)
def testMergeDuplicateExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
text_format.Merge(text, message)
self.assertEqual(67,
message.Extensions[unittest_pb2.optional_int32_extension])
def testParseDuplicateExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:96 : Message type "protobuf_unittest.TestAllExtensions" '
'should not have multiple '
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateNestedMessageScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
'should not have multiple "bb" fields.'), text_format.Parse, text,
message)
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
def testParseGroupNotClosed(self):
message = unittest_pb2.TestAllTypes()
text = 'RepeatedGroup: <'
six.assertRaisesRegex(self, text_format.ParseError, '1:16 : Expected ">".',
text_format.Parse, text, message)
text = 'RepeatedGroup: {'
six.assertRaisesRegex(self, text_format.ParseError, '1:16 : Expected "}".',
text_format.Parse, text, message)
def testParseEmptyGroup(self):
message = unittest_pb2.TestAllTypes()
text = 'OptionalGroup: {}'
text_format.Parse(text, message)
self.assertTrue(message.HasField('optionalgroup'))
message.Clear()
message = unittest_pb2.TestAllTypes()
text = 'OptionalGroup: <>'
text_format.Parse(text, message)
self.assertTrue(message.HasField('optionalgroup'))
# Maps aren't really proto2-only, but our test schema only has maps for
# proto2.
def testParseMap(self):
text = ('map_int32_int32 {\n'
' key: -123\n'
' value: -456\n'
'}\n'
'map_int64_int64 {\n'
' key: -8589934592\n'
' value: -17179869184\n'
'}\n'
'map_uint32_uint32 {\n'
' key: 123\n'
' value: 456\n'
'}\n'
'map_uint64_uint64 {\n'
' key: 8589934592\n'
' value: 17179869184\n'
'}\n'
'map_string_string {\n'
' key: "abc"\n'
' value: "123"\n'
'}\n'
'map_int32_foreign_message {\n'
' key: 111\n'
' value {\n'
' c: 5\n'
' }\n'
'}\n')
message = map_unittest_pb2.TestMap()
text_format.Parse(text, message)
self.assertEqual(-456, message.map_int32_int32[-123])
self.assertEqual(-2**34, message.map_int64_int64[-2**33])
self.assertEqual(456, message.map_uint32_uint32[123])
self.assertEqual(2**34, message.map_uint64_uint64[2**33])
self.assertEqual('123', message.map_string_string['abc'])
self.assertEqual(5, message.map_int32_foreign_message[111].c)
class Proto3Tests(unittest.TestCase):
def testPrintMessageExpandAny(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
descriptor_pool=descriptor_pool.Default()),
'any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
def testPrintMessageExpandAnyRepeated(self):
packed_message = unittest_pb2.OneString()
message = any_test_pb2.TestAny()
packed_message.data = 'string0'
message.repeated_any_value.add().Pack(packed_message)
packed_message.data = 'string1'
message.repeated_any_value.add().Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
descriptor_pool=descriptor_pool.Default()),
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string0"\n'
' }\n'
'}\n'
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string1"\n'
' }\n'
'}\n')
def testPrintMessageExpandAnyNoDescriptorPool(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message, descriptor_pool=None),
'any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
def testPrintMessageExpandAnyDescriptorPoolMissingType(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
empty_pool = descriptor_pool.DescriptorPool()
self.assertEqual(
text_format.MessageToString(message, descriptor_pool=empty_pool),
'any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
def testPrintMessageExpandAnyPointyBrackets(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
pointy_brackets=True,
descriptor_pool=descriptor_pool.Default()),
'any_value <\n'
' [type.googleapis.com/protobuf_unittest.OneString] <\n'
' data: "string"\n'
' >\n'
'>\n')
def testPrintMessageExpandAnyAsOneLine(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
as_one_line=True,
descriptor_pool=descriptor_pool.Default()),
'any_value {'
' [type.googleapis.com/protobuf_unittest.OneString]'
' { data: "string" } '
'}')
def testPrintMessageExpandAnyAsOneLinePointyBrackets(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
self.assertEqual(
text_format.MessageToString(message,
as_one_line=True,
pointy_brackets=True,
descriptor_pool=descriptor_pool.Default()),
'any_value <'
' [type.googleapis.com/protobuf_unittest.OneString]'
' < data: "string" > '
'>')
def testMergeExpandedAny(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyRepeated(self):
message = any_test_pb2.TestAny()
text = ('repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string0"\n'
' }\n'
'}\n'
'repeated_any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string1"\n'
' }\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.repeated_any_value[0].Unpack(packed_message)
self.assertEqual('string0', packed_message.data)
message.repeated_any_value[1].Unpack(packed_message)
self.assertEqual('string1', packed_message.data)
def testMergeExpandedAnyPointyBrackets(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] <\n'
' data: "string"\n'
' >\n'
'}\n')
text_format.Merge(text, message, descriptor_pool=descriptor_pool.Default())
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyNoDescriptorPool(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
text_format.Merge(text, message, descriptor_pool=None)
self.assertEqual(str(e.exception),
'Descriptor pool required to parse expanded Any field')
def testMergeExpandedAnyDescriptorPoolMissingType(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
empty_pool = descriptor_pool.DescriptorPool()
text_format.Merge(text, message, descriptor_pool=empty_pool)
self.assertEqual(
str(e.exception),
'Type protobuf_unittest.OneString not found in descriptor pool')
def testMergeUnexpandedAny(self):
text = ('any_value {\n'
' type_url: "type.googleapis.com/protobuf_unittest.OneString"\n'
' value: "\\n\\006string"\n'
'}\n')
message = any_test_pb2.TestAny()
text_format.Merge(text, message)
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
class TokenizerTest(unittest.TestCase):
def testSimpleTokenCases(self):
text = ('identifier1:"string1"\n \n\n'
'identifier2 : \n \n123 \n identifier3 :\'string\'\n'
'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n'
'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
'ID9: 22 ID10: -111111111111111111 ID11: -22\n'
'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f '
'false_bool: 0 true_BOOL:t \n true_bool1: 1 false_BOOL1:f ')
tokenizer = text_format.Tokenizer(text.splitlines())
methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), ':',
(tokenizer.ConsumeString, 'string1'),
(tokenizer.ConsumeIdentifier, 'identifier2'), ':',
(tokenizer.ConsumeInteger, 123),
(tokenizer.ConsumeIdentifier, 'identifier3'), ':',
(tokenizer.ConsumeString, 'string'),
(tokenizer.ConsumeIdentifier, 'identifiER_4'), ':',
(tokenizer.ConsumeFloat, 1.1e+2),
(tokenizer.ConsumeIdentifier, 'ID5'), ':',
(tokenizer.ConsumeFloat, -0.23),
(tokenizer.ConsumeIdentifier, 'ID6'), ':',
(tokenizer.ConsumeString, 'aaaa\'bbbb'),
(tokenizer.ConsumeIdentifier, 'ID7'), ':',
(tokenizer.ConsumeString, 'aa\"bb'),
(tokenizer.ConsumeIdentifier, 'ID8'), ':', '{',
(tokenizer.ConsumeIdentifier, 'A'), ':',
(tokenizer.ConsumeFloat, float('inf')),
(tokenizer.ConsumeIdentifier, 'B'), ':',
(tokenizer.ConsumeFloat, -float('inf')),
(tokenizer.ConsumeIdentifier, 'C'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'D'), ':',
(tokenizer.ConsumeBool, False), '}',
(tokenizer.ConsumeIdentifier, 'ID9'), ':',
(tokenizer.ConsumeInteger, 22),
(tokenizer.ConsumeIdentifier, 'ID10'), ':',
(tokenizer.ConsumeInteger, -111111111111111111),
(tokenizer.ConsumeIdentifier, 'ID11'), ':',
(tokenizer.ConsumeInteger, -22),
(tokenizer.ConsumeIdentifier, 'ID12'), ':',
(tokenizer.ConsumeInteger, 2222222222222222222),
(tokenizer.ConsumeIdentifier, 'ID13'), ':',
(tokenizer.ConsumeFloat, 1.23456),
(tokenizer.ConsumeIdentifier, 'ID14'), ':',
(tokenizer.ConsumeFloat, 1.2e+2),
(tokenizer.ConsumeIdentifier, 'false_bool'), ':',
(tokenizer.ConsumeBool, False),
(tokenizer.ConsumeIdentifier, 'true_BOOL'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'true_bool1'), ':',
(tokenizer.ConsumeBool, True),
(tokenizer.ConsumeIdentifier, 'false_BOOL1'), ':',
(tokenizer.ConsumeBool, False)]
i = 0
while not tokenizer.AtEnd():
m = methods[i]
if isinstance(m, str):
token = tokenizer.token
self.assertEqual(token, m)
tokenizer.NextToken()
else:
self.assertEqual(m[1], m[0]())
i += 1
def testConsumeAbstractIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(-1, tokenizer.ConsumeInteger())
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInteger())
self.assertEqual(int64_max + 1, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
text = '-0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertEqual(0, tokenizer.ConsumeInteger())
self.assertTrue(tokenizer.AtEnd())
def testConsumeIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint64, tokenizer)
self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt32, tokenizer)
self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt64, tokenizer)
self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
text = '-0 -0 0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
def testSkipComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines())
self.assertTrue(tokenizer.AtEnd())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
def testConsumeComment(self):
tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
skip_comments=False)
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
def testConsumeTwoComments(self):
text = '# some comment\n# another comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# another comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
def testConsumeTrailingComment(self):
text = 'some_number: 4\n# some comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
if __name__ == '__main__':
unittest.main()
| true | true |
1c3d489c9cdb8fe48bef6bd7695b7af927df066f | 1,026 | py | Python | adoptions/views.py | Pruthviraj98/Wisdompets_linkedInLearning | 8bfcce297245503a543157eca2d23448fce3bf6f | [
"MIT"
] | null | null | null | adoptions/views.py | Pruthviraj98/Wisdompets_linkedInLearning | 8bfcce297245503a543157eca2d23448fce3bf6f | [
"MIT"
] | null | null | null | adoptions/views.py | Pruthviraj98/Wisdompets_linkedInLearning | 8bfcce297245503a543157eca2d23448fce3bf6f | [
"MIT"
] | null | null | null | from django.shortcuts import render
#HttpRespose class creates the response objects that views expected to return
from django.http import HttpResponse, Http404
#we can use thi above function instead of the render function because in the case of render functon, it needs template files to execute.
#we need the home view to show up some pets and their details. So import the model of Pet first
from .models import Pet
def home(request):
pets = Pet.objects.all()#ORM query
#httpresponse object was previously used to inline a string for an RHTML body
#this becomes unwieldy if we use large body of html.
#therefore, large body: Render function.
#we have to send the objects through the dictonary format to the template.
return render(request, 'home.html', {'pets':pets,})
def pet_detail(request, pet_id):
try:
pet = Pet.objects.get(id=pet_id)
except Pet.DoesNotExist:
#return 404 so, import http404
raise Http404('pet not found')
return render(request, 'pet_detail.html', {'pet': pet}) | 42.75 | 137 | 0.750487 | from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Pet
def home(request):
pets = Pet.objects.all()
return render(request, 'home.html', {'pets':pets,})
def pet_detail(request, pet_id):
try:
pet = Pet.objects.get(id=pet_id)
except Pet.DoesNotExist:
raise Http404('pet not found')
return render(request, 'pet_detail.html', {'pet': pet}) | true | true |
1c3d49b00167dece0fbe4e234e4f4c6dd16eac8e | 640 | py | Python | capybre/__init__.py | digitaltembo/capybre | 0066436ce6670436569685f8fdf82c61ec0ea1f5 | [
"MIT"
] | 1 | 2022-01-24T03:42:35.000Z | 2022-01-24T03:42:35.000Z | capybre/__init__.py | digitaltembo/capybre | 0066436ce6670436569685f8fdf82c61ec0ea1f5 | [
"MIT"
] | null | null | null | capybre/__init__.py | digitaltembo/capybre | 0066436ce6670436569685f8fdf82c61ec0ea1f5 | [
"MIT"
] | 1 | 2021-01-24T19:45:54.000Z | 2021-01-24T19:45:54.000Z | from .convert import convert, converted_fileobj
from .ebook_format import EbookFormat
from .metadata import (
Metadata,
extract_metadata,
extract_metadata_map,
extract_cover,
extracted_cover_fileobj
)
from .fetch_metadata import (
fetch_metadata,
fetch_metadata_map,
fetch_cover,
fetched_metadata_and_cover
)
__all__ = [
'convert',
'converted_fileobj',
'EbookFormat',
'extract_cover',
'extract_metadata',
'extract_metadata_map',
'extracted_cover_fileobj',
'fetch_cover',
'fetch_metadata',
'fetch_metadata_map',
'fetched_metadata_and_cover',
'Metadata',
]
| 20.645161 | 47 | 0.7125 | from .convert import convert, converted_fileobj
from .ebook_format import EbookFormat
from .metadata import (
Metadata,
extract_metadata,
extract_metadata_map,
extract_cover,
extracted_cover_fileobj
)
from .fetch_metadata import (
fetch_metadata,
fetch_metadata_map,
fetch_cover,
fetched_metadata_and_cover
)
__all__ = [
'convert',
'converted_fileobj',
'EbookFormat',
'extract_cover',
'extract_metadata',
'extract_metadata_map',
'extracted_cover_fileobj',
'fetch_cover',
'fetch_metadata',
'fetch_metadata_map',
'fetched_metadata_and_cover',
'Metadata',
]
| true | true |
1c3d4a7ac2a9ff1f2451b6a1ed8a8d891d361ccc | 734 | py | Python | configs/gdrn/lmCropBlenderSO/resnest50d_a6_AugCosyAAEGray_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_SO/resnest50d_a6_AugCosyAAEGary_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_lamp.py | THU-DA-6D-Pose-Group/self6dpp | c267cfa55e440e212136a5e9940598720fa21d16 | [
"Apache-2.0"
] | 33 | 2021-12-15T07:11:47.000Z | 2022-03-29T08:58:32.000Z | configs/gdrn/lmCropBlenderSO/resnest50d_a6_AugCosyAAEGray_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_SO/resnest50d_a6_AugCosyAAEGary_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_lamp.py | THU-DA-6D-Pose-Group/self6dpp | c267cfa55e440e212136a5e9940598720fa21d16 | [
"Apache-2.0"
] | 3 | 2021-12-15T11:39:54.000Z | 2022-03-29T07:24:23.000Z | configs/gdrn/lmCropBlenderSO/resnest50d_a6_AugCosyAAEGray_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_SO/resnest50d_a6_AugCosyAAEGary_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_lamp.py | THU-DA-6D-Pose-Group/self6dpp | c267cfa55e440e212136a5e9940598720fa21d16 | [
"Apache-2.0"
] | null | null | null | _base_ = ["./resnest50d_a6_AugCosyAAEGary_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_ape.py"]
OUTPUT_DIR = "output/gdrn/lm_crop_blender/resnest50d_a6_AugCosyAAEGray_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e/lamp"
DATASETS = dict(
TRAIN=("lm_blender_lamp_train",),
TEST=("lm_crop_lamp_test",),
)
# objects lamp Avg(1)
# ad_2 6.10 6.10
# ad_5 36.99 36.99
# ad_10 70.73 70.73
# rete_2 10.98 10.98
# rete_5 71.95 71.95
# rete_10 91.87 91.87
# re_2 15.45 15.45
# re_5 72.36 72.36
# re_10 93.09 93.09
# te_2 54.47 54.47
# te_5 88.62 88.62
# te_10 95.12 95.12
# proj_2 6.91 6.91
# proj_5 78.46 78.46
# proj_10 94.31 94.31
# re 5.42 5.42
# te 0.03 0.03
| 26.214286 | 118 | 0.645777 | _base_ = ["./resnest50d_a6_AugCosyAAEGary_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e_ape.py"]
OUTPUT_DIR = "output/gdrn/lm_crop_blender/resnest50d_a6_AugCosyAAEGray_BG05_mlBCE_bboxCrop_DZI10_lm_blender_100e/lamp"
DATASETS = dict(
TRAIN=("lm_blender_lamp_train",),
TEST=("lm_crop_lamp_test",),
)
| true | true |
1c3d4acf43575526856e6058c374b358ba99a083 | 3,867 | py | Python | image_generation/core/gan.py | drboog/FPK | 7e79cfcede41dd7ed65987acce5a7617977fc9be | [
"MIT"
] | 1 | 2021-08-19T00:08:30.000Z | 2021-08-19T00:08:30.000Z | image_generation/core/gan.py | drboog/FPK | 7e79cfcede41dd7ed65987acce5a7617977fc9be | [
"MIT"
] | null | null | null | image_generation/core/gan.py | drboog/FPK | 7e79cfcede41dd7ed65987acce5a7617977fc9be | [
"MIT"
] | null | null | null | from .model import MMD_GAN, tf
from . import mmd
class GAN(MMD_GAN):
def __init__(self, sess, config, **kwargs):
config.dof_dim = 1
super(GAN, self).__init__(sess, config, **kwargs)
def delete_diag(self, matrix):
return matrix - tf.matrix_diag(tf.matrix_diag_part(matrix)) # return matrix, while k_ii is 0
def mmd_loss(self, G, images):
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
kernel = getattr(mmd, '_%s_kernel' % self.config.kernel)
K_XX, K_XY, K_YY, T_or_F, noise_norm, scale_norm, K_XX_sin, K_XY_sin, K_YY_sin = kernel(G, images)
mmd_loss = mmd.mmd2([K_XX, K_XY, K_YY, T_or_F])
kernel_2 = getattr(mmd, '_rbf_kernel')
K_XX_, K_XY_, K_YY_, T_or_F_ = kernel_2(G, images)
mmd_loss_ = mmd.mmd2([K_XX_, K_XY_, K_YY_, T_or_F_])
mmd_loss = self.warm_up*mmd_loss + (1 - self.warm_up) * mmd_loss_
K_XX = self.warm_up*K_XX + (1 - self.warm_up) * K_XX_
K_XY = self.warm_up*K_XY + (1 - self.warm_up) * K_XY_
K_YY = self.warm_up*K_YY + (1 - self.warm_up) * K_YY_
K_XX_sin = self.warm_up*K_XX_sin + (1 - self.warm_up)*K_XX_
K_YY_sin = self.warm_up*K_YY_sin + (1 - self.warm_up)*K_YY_
K_XY_sin = self.warm_up*K_XY_sin + (1 - self.warm_up)*K_XY_
return mmd_loss, K_XX, K_YY, K_XY, noise_norm, scale_norm, K_XX_sin, K_YY_sin, K_XY_sin
else:
kernel = getattr(mmd, '_%s_kernel' % self.config.kernel)
K_XX, K_XY, K_YY, T_or_F = kernel(G, images)
mmd_loss = mmd.mmd2([K_XX, K_XY, K_YY, T_or_F])
return mmd_loss, K_XX, K_YY, K_XY
def set_loss(self, G, images):
self.d_loss = tf.reduce_mean(tf.nn.softplus(-images) + tf.nn.softplus(G))
self.g_loss = tf.reduce_mean(tf.nn.softplus(-G))
if self.config.with_fp > 0:
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
mmd_1, k_gg, k_ii, k_gi, noise_norm, scale_norm, k_gg_sin, k_ii_sin, k_gi_sin = self.mmd_loss(G, images)
self.k_gg_sin = k_gg_sin
self.k_ii_sin = k_ii_sin
self.k_gi_sin = k_gi_sin
else:
mmd_1, k_gg, k_ii, k_gi = self.mmd_loss(G, images) # k_gg means generated images kernel matrix ...
self.k_gg_sin = k_gg
self.k_ii_sin = k_ii
self.k_gi_sin = k_gi
self.k_gg = k_gg
self.k_ii = k_ii
self.k_gi = k_gi
self.mmd_1 = mmd_1
# self.g_loss += self.config.with_fp * self.mmd_1
# when lam_1 = 2*lam_2, then it's simply (kde - 1/n)**2
# square_g = 0.5*self.config.lam_1*tf.reduce_mean(tf.square(tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1)/(self.batch_size-1)))
square_g = self.config.lam_2 * tf.reduce_mean(
tf.square(tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1) / (self.batch_size - 1)))
sim_g = self.config.lam_1 * tf.reduce_mean(
tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1) / (self.batch_size - 1))
sim_gi = self.config.lam_3 * tf.reduce_mean(self.k_gi)
self.d_loss += self.config.with_fp * (square_g - sim_g + sim_gi)
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
self.d_loss += self.config.with_fp * (self.config.ker_lam * noise_norm)
self.k_loss = self.d_loss
self.optim_name = 'gan%d_loss' % int(self.config.gradient_penalty)
tf.summary.scalar(self.optim_name + ' G', self.g_loss)
tf.summary.scalar(self.optim_name + ' D', self.d_loss)
| 51.56 | 153 | 0.595552 | from .model import MMD_GAN, tf
from . import mmd
class GAN(MMD_GAN):
def __init__(self, sess, config, **kwargs):
config.dof_dim = 1
super(GAN, self).__init__(sess, config, **kwargs)
def delete_diag(self, matrix):
return matrix - tf.matrix_diag(tf.matrix_diag_part(matrix))
def mmd_loss(self, G, images):
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
kernel = getattr(mmd, '_%s_kernel' % self.config.kernel)
K_XX, K_XY, K_YY, T_or_F, noise_norm, scale_norm, K_XX_sin, K_XY_sin, K_YY_sin = kernel(G, images)
mmd_loss = mmd.mmd2([K_XX, K_XY, K_YY, T_or_F])
kernel_2 = getattr(mmd, '_rbf_kernel')
K_XX_, K_XY_, K_YY_, T_or_F_ = kernel_2(G, images)
mmd_loss_ = mmd.mmd2([K_XX_, K_XY_, K_YY_, T_or_F_])
mmd_loss = self.warm_up*mmd_loss + (1 - self.warm_up) * mmd_loss_
K_XX = self.warm_up*K_XX + (1 - self.warm_up) * K_XX_
K_XY = self.warm_up*K_XY + (1 - self.warm_up) * K_XY_
K_YY = self.warm_up*K_YY + (1 - self.warm_up) * K_YY_
K_XX_sin = self.warm_up*K_XX_sin + (1 - self.warm_up)*K_XX_
K_YY_sin = self.warm_up*K_YY_sin + (1 - self.warm_up)*K_YY_
K_XY_sin = self.warm_up*K_XY_sin + (1 - self.warm_up)*K_XY_
return mmd_loss, K_XX, K_YY, K_XY, noise_norm, scale_norm, K_XX_sin, K_YY_sin, K_XY_sin
else:
kernel = getattr(mmd, '_%s_kernel' % self.config.kernel)
K_XX, K_XY, K_YY, T_or_F = kernel(G, images)
mmd_loss = mmd.mmd2([K_XX, K_XY, K_YY, T_or_F])
return mmd_loss, K_XX, K_YY, K_XY
def set_loss(self, G, images):
self.d_loss = tf.reduce_mean(tf.nn.softplus(-images) + tf.nn.softplus(G))
self.g_loss = tf.reduce_mean(tf.nn.softplus(-G))
if self.config.with_fp > 0:
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
mmd_1, k_gg, k_ii, k_gi, noise_norm, scale_norm, k_gg_sin, k_ii_sin, k_gi_sin = self.mmd_loss(G, images)
self.k_gg_sin = k_gg_sin
self.k_ii_sin = k_ii_sin
self.k_gi_sin = k_gi_sin
else:
mmd_1, k_gg, k_ii, k_gi = self.mmd_loss(G, images)
self.k_gg_sin = k_gg
self.k_ii_sin = k_ii
self.k_gi_sin = k_gi
self.k_gg = k_gg
self.k_ii = k_ii
self.k_gi = k_gi
self.mmd_1 = mmd_1
# square_g = 0.5*self.config.lam_1*tf.reduce_mean(tf.square(tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1)/(self.batch_size-1)))
square_g = self.config.lam_2 * tf.reduce_mean(
tf.square(tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1) / (self.batch_size - 1)))
sim_g = self.config.lam_1 * tf.reduce_mean(
tf.reduce_sum(self.delete_diag(self.k_gg), axis=-1) / (self.batch_size - 1))
sim_gi = self.config.lam_3 * tf.reduce_mean(self.k_gi)
self.d_loss += self.config.with_fp * (square_g - sim_g + sim_gi)
if self.config.kernel == 'imp_1' or self.config.kernel == 'imp_2' or self.config.kernel == 'imp_3':
self.d_loss += self.config.with_fp * (self.config.ker_lam * noise_norm)
self.k_loss = self.d_loss
self.optim_name = 'gan%d_loss' % int(self.config.gradient_penalty)
tf.summary.scalar(self.optim_name + ' G', self.g_loss)
tf.summary.scalar(self.optim_name + ' D', self.d_loss)
| true | true |
1c3d4c2ed54ce2e0ed54a95ec8d7a7dc8f7fbf41 | 759 | py | Python | September-2020-Challange/RobotBoundedInCircle.py | Hemant-60/leetcode-solutions | 2e0a96a148424288bad55a699137a8da16d756d8 | [
"MIT"
] | null | null | null | September-2020-Challange/RobotBoundedInCircle.py | Hemant-60/leetcode-solutions | 2e0a96a148424288bad55a699137a8da16d756d8 | [
"MIT"
] | null | null | null | September-2020-Challange/RobotBoundedInCircle.py | Hemant-60/leetcode-solutions | 2e0a96a148424288bad55a699137a8da16d756d8 | [
"MIT"
] | null | null | null | '''
https://leetcode.com/explore/challenge/card/september-leetcoding-challenge/556/week-3-september-15th-september-21st/3463/
'''
class Solution:
def isRobotBounded(self, instructions: str) -> bool:
instructions*=4
x,y=0,0
d=0
for i in instructions:
if i=="G":
if d==0:
y+=1
elif d==1:
x+=1
elif d==2:
y-=1
else:
x-=1
elif i=="L":
d= 3 if d==0 else (d-1)
else:
d= 0 if d==3 else (d+1)
if x==0 and y==0 and d==0:
return True
return False
| 26.172414 | 121 | 0.386034 | class Solution:
def isRobotBounded(self, instructions: str) -> bool:
instructions*=4
x,y=0,0
d=0
for i in instructions:
if i=="G":
if d==0:
y+=1
elif d==1:
x+=1
elif d==2:
y-=1
else:
x-=1
elif i=="L":
d= 3 if d==0 else (d-1)
else:
d= 0 if d==3 else (d+1)
if x==0 and y==0 and d==0:
return True
return False
| true | true |
1c3d4c3f4b64985968dbd1fd640769278c1a42f4 | 167 | py | Python | Requests.py | Guilehm/python | ce6f8b44623cc25e9b18b2dbf8e0528096f0de96 | [
"MIT"
] | null | null | null | Requests.py | Guilehm/python | ce6f8b44623cc25e9b18b2dbf8e0528096f0de96 | [
"MIT"
] | null | null | null | Requests.py | Guilehm/python | ce6f8b44623cc25e9b18b2dbf8e0528096f0de96 | [
"MIT"
] | null | null | null | import bs4
import requests
res = requests.get("http://nostarch.com")
res.raise_for_status()
no_starch_soup = bs4.BeautifulSoup(res.text, "lxml")
type(no_starch_soup)
| 20.875 | 52 | 0.778443 | import bs4
import requests
res = requests.get("http://nostarch.com")
res.raise_for_status()
no_starch_soup = bs4.BeautifulSoup(res.text, "lxml")
type(no_starch_soup)
| true | true |
1c3d4cc98e4b238d4eaca37734936299ca1d1d4a | 7,241 | py | Python | lackey/SettingsDebug.py | Inobitec/lackey | 1ea404a3e003f0ef4dcaa8879ab02b1f568fa0a2 | [
"MIT"
] | 599 | 2016-08-15T22:51:53.000Z | 2022-03-29T16:16:34.000Z | lackey/SettingsDebug.py | Inobitec/lackey | 1ea404a3e003f0ef4dcaa8879ab02b1f568fa0a2 | [
"MIT"
] | 129 | 2016-08-16T11:11:52.000Z | 2021-10-20T20:55:40.000Z | lackey/SettingsDebug.py | Inobitec/lackey | 1ea404a3e003f0ef4dcaa8879ab02b1f568fa0a2 | [
"MIT"
] | 89 | 2016-08-16T01:33:27.000Z | 2021-12-09T15:40:58.000Z | """ Defines Settings and Debug objects """
import datetime
import os
import __main__
from io import open # For Python 2 native line endings compatibility
from ._version import __version__, __sikuli_version__
class DebugMaster(object):
""" Used to create the global Debug object """
_log_file = None
_debug_level = 0
_logger = None
_logger_no_prefix = False
_logger_methods = {
"user": None,
"info": None,
"action": None,
"error": None,
"debug": None
}
def user(self, message):
""" Creates a user log (if user logging is turned on)
Uses the log path defined by ``Debug.setUserLogFile()``. If no log file is
defined, sends to STDOUT
Note: Does *not* use Java string formatting like Sikuli.
Format your message with Python ``basestring.format()`` instead.
"""
if Settings.UserLogs:
self._write_log(Settings.UserLogPrefix, Settings.UserLogTime, message)
def history(self, message):
""" Records an Action-level log message
Uses the log path defined by ``Debug.setUserLogFile()``. If no log file is
defined, sends to STDOUT
"""
if Settings.ActionLogs:
self._write_log("action", Settings.LogTime, message)
def error(self, message):
""" Records an Error-level log message
Uses the log path defined by ``Debug.setUserLogFile()``. If no log file is
defined, sends to STDOUT
"""
if Settings.ErrorLogs:
self._write_log("error", Settings.LogTime, message)
def info(self, message):
""" Records an Info-level log message
Uses the log path defined by ``Debug.setUserLogFile()``. If no log file is
defined, sends to STDOUT
"""
if Settings.InfoLogs:
self._write_log("info", Settings.LogTime, message)
def on(self, level):
""" Turns on all debugging messages up to the specified level
0 = None; 1 = User;
"""
if isinstance(level, int) and level >= 0 and level <= 3:
self._debug_level = level
def off(self):
""" Turns off all debugging messages """
self._debug_level = 0
def log(self, level, message):
""" Records a Debug-level log message
Uses the log path defined by ``Debug.setUserLogFile()``. If no log file is
defined, sends to STDOUT
"""
if level <= self._debug_level:
self._write_log("debug", Settings.LogTime, message)
def setLogger(self, logger_obj):
""" Sets log handler to ``logger_obj`` """
self._logger = logger_obj
def setLoggerNoPrefix(self, logger_obj):
""" Sets log handler to ``logger_obj`` """
self._logger = logger_obj
self._logger_no_prefix = True
def setLoggerAll(self, mthd):
""" Sends all messages to ``logger.[mthd]()`` for handling """
for key in self._logger_methods:
self._logger_methods[key] = mthd
def setLoggerUser(self, mthd):
""" Sends user messages to ``logger.[mthd]()`` for handling """
self._logger_methods["user"] = mthd
def setLoggerInfo(self, mthd):
""" Sends info messages to ``logger.[mthd]()`` for handling """
self._logger_methods["info"] = mthd
def setLoggerAction(self, mthd):
""" Sends action messages to ``logger.[mthd]()`` for handling """
self._logger_methods["action"] = mthd
def setLoggerError(self, mthd):
""" Sends error messages to ``logger.[mthd]()`` for handling """
self._logger_methods["error"] = mthd
def setLoggerDebug(self, mthd):
""" Sends debug messages to ``logger.[mthd]()`` for handling """
self._logger_methods["debug"] = mthd
def setLogFile(self, filepath):
""" Defines the file to which output log messages should be sent.
Set to `None` to print to STDOUT instead.
"""
if filepath is None:
self._log_file = None
return
parsed_path = os.path.abspath(filepath)
# Checks if the provided log filename is in a real directory, and that
# the filename itself is not a directory.
if os.path.isdir(os.path.dirname(parsed_path)) and not os.path.isdir(parsed_path):
self._log_file = parsed_path
else:
raise IOError("File not found: " + filepath)
def _write_log(self, log_type, log_time, message):
""" Private method to abstract log writing for different types of logs """
timestamp = datetime.datetime.now().strftime(" %Y-%m-%d %H:%M:%S")
log_entry = "[{}{}] {}".format(log_type, timestamp if log_time else "", message)
if self._logger and callable(getattr(self._logger, self._logger_methods[log_type], None)):
# Check for log handler (sends message only if _logger_no_prefix is True)
getattr(
self._logger,
self._logger_methods[log_type],
None
)(message if self._logger_no_prefix else log_entry)
elif self._log_file:
# Otherwise write to file, if a file has been specified
with open(self._log_file, 'a') as logfile:
try:
logfile.write(unicode(log_entry + "\n"))
except NameError: # `unicode` only works in Python 2
logfile.write(log_entry + "\n")
else:
# Otherwise, print to STDOUT
print(log_entry)
class SettingsMaster(object):
""" Global settings that Lackey refers to by default """
## Logging Settings
ActionLogs = True # Message prefix: [log]
InfoLogs = True # Message prefix: [info]
DebugLogs = False # Message prefix: [debug]
ErrorLogs = False # Message prefix: [error]
LogTime = False
### User Logging
UserLogs = True
UserLogPrefix = "user"
UserLogTime = True
## Region Settings
MinSimilarity = 0.7
SlowMotionDelay = 3 # Extra duration of slowed-down visual effects
WaitScanRate = 3 # Searches per second
ObserveScanRate = 3 # Searches per second (observers)
OberveMinChangedPixels = 50 # Threshold to trigger onChange() (not implemented yet)
## Keyboard/Mouse Settings
MoveMouseDelay = 0.3 # Time to take moving mouse to target location
DelayBeforeMouseDown = 0.3
DelayBeforeDrag = 0.3
DelayBeforeDrop = 0.3
ClickDelay = 0.0 # Resets to 0 after next click
TypeDelay = 0.0 # Resets to 0 after next keypress
## Action Settings
ShowActions = False
## File Settings
# Path to Sikuli project - might not be current directory
try:
BundlePath = os.path.dirname(os.path.abspath(os.path.join(os.getcwd(), __main__.__file__)))
except AttributeError:
BundlePath = os.path.dirname(os.path.abspath(os.getcwd()))
ImagePaths = []
OcrDataPath = None
## Popup settings
PopupLocation = None
## OCR Settings
SwitchToText = False
# Environment methods
def getSikuliVersion(self):
return "Lackey {} (compatible with SikuliX {})".format(__version__, __sikuli_version__)
Debug = DebugMaster()
Settings = SettingsMaster()
| 37.518135 | 99 | 0.622704 | import datetime
import os
import __main__
from io import open
from ._version import __version__, __sikuli_version__
class DebugMaster(object):
_log_file = None
_debug_level = 0
_logger = None
_logger_no_prefix = False
_logger_methods = {
"user": None,
"info": None,
"action": None,
"error": None,
"debug": None
}
def user(self, message):
if Settings.UserLogs:
self._write_log(Settings.UserLogPrefix, Settings.UserLogTime, message)
def history(self, message):
if Settings.ActionLogs:
self._write_log("action", Settings.LogTime, message)
def error(self, message):
if Settings.ErrorLogs:
self._write_log("error", Settings.LogTime, message)
def info(self, message):
if Settings.InfoLogs:
self._write_log("info", Settings.LogTime, message)
def on(self, level):
if isinstance(level, int) and level >= 0 and level <= 3:
self._debug_level = level
def off(self):
self._debug_level = 0
def log(self, level, message):
if level <= self._debug_level:
self._write_log("debug", Settings.LogTime, message)
def setLogger(self, logger_obj):
self._logger = logger_obj
def setLoggerNoPrefix(self, logger_obj):
self._logger = logger_obj
self._logger_no_prefix = True
def setLoggerAll(self, mthd):
for key in self._logger_methods:
self._logger_methods[key] = mthd
def setLoggerUser(self, mthd):
self._logger_methods["user"] = mthd
def setLoggerInfo(self, mthd):
self._logger_methods["info"] = mthd
def setLoggerAction(self, mthd):
self._logger_methods["action"] = mthd
def setLoggerError(self, mthd):
self._logger_methods["error"] = mthd
def setLoggerDebug(self, mthd):
self._logger_methods["debug"] = mthd
def setLogFile(self, filepath):
if filepath is None:
self._log_file = None
return
parsed_path = os.path.abspath(filepath)
if os.path.isdir(os.path.dirname(parsed_path)) and not os.path.isdir(parsed_path):
self._log_file = parsed_path
else:
raise IOError("File not found: " + filepath)
def _write_log(self, log_type, log_time, message):
timestamp = datetime.datetime.now().strftime(" %Y-%m-%d %H:%M:%S")
log_entry = "[{}{}] {}".format(log_type, timestamp if log_time else "", message)
if self._logger and callable(getattr(self._logger, self._logger_methods[log_type], None)):
getattr(
self._logger,
self._logger_methods[log_type],
None
)(message if self._logger_no_prefix else log_entry)
elif self._log_file:
with open(self._log_file, 'a') as logfile:
try:
logfile.write(unicode(log_entry + "\n"))
except NameError:
logfile.write(log_entry + "\n")
else:
print(log_entry)
class SettingsMaster(object):
True
InfoLogs = True
DebugLogs = False
ErrorLogs = False
LogTime = False
LogPrefix = "user"
UserLogTime = True
y = 0.7
SlowMotionDelay = 3
WaitScanRate = 3
ObserveScanRate = 3
OberveMinChangedPixels = 50
DelayBeforeMouseDown = 0.3
DelayBeforeDrag = 0.3
DelayBeforeDrop = 0.3
ClickDelay = 0.0
TypeDelay = 0.0
= False
BundlePath = os.path.dirname(os.path.abspath(os.path.join(os.getcwd(), __main__.__file__)))
except AttributeError:
BundlePath = os.path.dirname(os.path.abspath(os.getcwd()))
ImagePaths = []
OcrDataPath = None
on = None
ext = False
def getSikuliVersion(self):
return "Lackey {} (compatible with SikuliX {})".format(__version__, __sikuli_version__)
Debug = DebugMaster()
Settings = SettingsMaster()
| true | true |
1c3d4ec652a2568c320b45c563710338231997a6 | 4,843 | py | Python | coopihc/space/State.py | jgori-ouistiti/interaction-agents | 922d9bddb2b14784e32c4639b66cec302e80e13a | [
"MIT"
] | null | null | null | coopihc/space/State.py | jgori-ouistiti/interaction-agents | 922d9bddb2b14784e32c4639b66cec302e80e13a | [
"MIT"
] | 5 | 2021-11-22T12:46:09.000Z | 2021-11-23T08:50:28.000Z | coopihc/space/State.py | jgori-ouistiti/interaction-agents | 922d9bddb2b14784e32c4639b66cec302e80e13a | [
"MIT"
] | null | null | null | from collections import OrderedDict
import copy
import json
from tabulate import tabulate
from coopihc.helpers import flatten
from coopihc.space.StateElement import StateElement
class State(OrderedDict):
"""The container that defines states.
:param \*args: Same as collections.OrderedDict
:param \*\*kwargs: Same as collections.OrderedDict
:return: A state Object
:rtype: State
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __bool__(self):
return bool(self.items())
def reset(self, dic={}):
"""Initialize the state. See StateElement"""
for key, value in self.items():
reset_dic = dic.get(key)
if reset_dic is None:
reset_dic = {}
value.reset(reset_dic)
def _flat(self):
values = []
spaces = []
labels = []
l, k = list(self.values()), list(self.keys())
for n, item in enumerate(l):
_values, _spaces, _labels = item._flat()
values.extend(_values)
spaces.extend(_spaces)
labels.extend([k[n] + "|" + label for label in _labels])
return values, spaces, labels
def filter(self, mode, filterdict=None):
"""Retain only parts of the state.
An example for filterdict's structure is as follows:
.. code-block:: python
ordereddict = OrderedDict(
{"substate1": OrderedDict({"substate_x": 0, "substate_w": 0})}
)
will filter out every component but the first component (index 0) for substates x and w contained in substate_1.
:param str mode: 'values' or 'spaces'
:param collections.OrderedDict filterdict: The OrderedDict which specifies which substates to keep and which to leave out.
:return: The filtered state
:rtype: collections.OrderedDict
"""
new_state = OrderedDict()
if filterdict is None:
filterdict = self
for key, values in filterdict.items():
if isinstance(self[key], State):
new_state[key] = self[key].filter(mode, values)
elif isinstance(self[key], StateElement):
# to make S.filter("values", S) possible.
# Warning: Contrary to what one would expect values != self[key]
if isinstance(values, StateElement):
values = slice(0, len(values), 1)
if mode == "spaces":
new_state[key] = flatten([self[key][mode][values]])
else:
new_state[key] = self[key][mode][values]
else:
new_state[key] = self[key]
return new_state
def __content__(self):
return list(self.keys())
# Here we override copy and deepcopy simply because there seems to be some overhead in the default deepcopy implementation. It turns out the gain is almost None, but keep it here as a reminder that deepcopy needs speeding up. Adapted from StateElement code
def __copy__(self):
cls = self.__class__
copy_object = cls.__new__(cls)
copy_object.__dict__.update(self.__dict__)
copy_object.update(self)
return copy_object
def __deepcopy__(self, memodict={}):
cls = self.__class__
deepcopy_object = cls.__new__(cls)
memodict[id(self)] = deepcopy_object
deepcopy_object.__dict__.update(self.__dict__)
for k, v in self.items():
deepcopy_object[k] = copy.deepcopy(v, memodict)
return deepcopy_object
def serialize(self):
"""Serialize state --> JSON output.
:return: JSON-like blob
:rtype: dict
"""
ret_dict = {}
for key, value in dict(self).items():
try:
value_ = json.dumps(value)
except TypeError:
try:
value_ = value.serialize()
except AttributeError:
print(
"warning: I don't know how to serialize {}. I'm sending the whole internal dictionnary of the object. Consider adding a serialize() method to your custom object".format(
value.__str__()
)
)
value_ = value.__dict__
ret_dict[key] = value_
return ret_dict
def __str__(self):
"""Print out the game_state and the name of each substate with according indices."""
table_header = ["Index", "Label", "Value", "Space", "Possible Value"]
table_rows = []
for i, (v, s, l) in enumerate(zip(*self._flat())):
table_rows.append([str(i), l, str(v), str(s)])
_str = tabulate(table_rows, table_header)
return _str
| 34.347518 | 261 | 0.57898 | from collections import OrderedDict
import copy
import json
from tabulate import tabulate
from coopihc.helpers import flatten
from coopihc.space.StateElement import StateElement
class State(OrderedDict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __bool__(self):
return bool(self.items())
def reset(self, dic={}):
for key, value in self.items():
reset_dic = dic.get(key)
if reset_dic is None:
reset_dic = {}
value.reset(reset_dic)
def _flat(self):
values = []
spaces = []
labels = []
l, k = list(self.values()), list(self.keys())
for n, item in enumerate(l):
_values, _spaces, _labels = item._flat()
values.extend(_values)
spaces.extend(_spaces)
labels.extend([k[n] + "|" + label for label in _labels])
return values, spaces, labels
def filter(self, mode, filterdict=None):
new_state = OrderedDict()
if filterdict is None:
filterdict = self
for key, values in filterdict.items():
if isinstance(self[key], State):
new_state[key] = self[key].filter(mode, values)
elif isinstance(self[key], StateElement):
if isinstance(values, StateElement):
values = slice(0, len(values), 1)
if mode == "spaces":
new_state[key] = flatten([self[key][mode][values]])
else:
new_state[key] = self[key][mode][values]
else:
new_state[key] = self[key]
return new_state
def __content__(self):
return list(self.keys())
def __copy__(self):
cls = self.__class__
copy_object = cls.__new__(cls)
copy_object.__dict__.update(self.__dict__)
copy_object.update(self)
return copy_object
def __deepcopy__(self, memodict={}):
cls = self.__class__
deepcopy_object = cls.__new__(cls)
memodict[id(self)] = deepcopy_object
deepcopy_object.__dict__.update(self.__dict__)
for k, v in self.items():
deepcopy_object[k] = copy.deepcopy(v, memodict)
return deepcopy_object
def serialize(self):
ret_dict = {}
for key, value in dict(self).items():
try:
value_ = json.dumps(value)
except TypeError:
try:
value_ = value.serialize()
except AttributeError:
print(
"warning: I don't know how to serialize {}. I'm sending the whole internal dictionnary of the object. Consider adding a serialize() method to your custom object".format(
value.__str__()
)
)
value_ = value.__dict__
ret_dict[key] = value_
return ret_dict
def __str__(self):
table_header = ["Index", "Label", "Value", "Space", "Possible Value"]
table_rows = []
for i, (v, s, l) in enumerate(zip(*self._flat())):
table_rows.append([str(i), l, str(v), str(s)])
_str = tabulate(table_rows, table_header)
return _str
| true | true |
1c3d4f9866486f59fb4018f6f042524170113483 | 2,699 | py | Python | encoder.py | EthanHolleman/ImageToBlink | fcb6a98881fc619053df771b867204ef68663671 | [
"MIT"
] | null | null | null | encoder.py | EthanHolleman/ImageToBlink | fcb6a98881fc619053df771b867204ef68663671 | [
"MIT"
] | null | null | null | encoder.py | EthanHolleman/ImageToBlink | fcb6a98881fc619053df771b867204ef68663671 | [
"MIT"
] | null | null | null | from pathlib import Path
from PIL import Image
import argparse
MORSE_CODE_DICT = { 'A':'.-', 'B':'-...',
'C':'-.-.', 'D':'-..', 'E':'.',
'F':'..-.', 'G':'--.', 'H':'....',
'I':'..', 'J':'.---', 'K':'-.-',
'L':'.-..', 'M':'--', 'N':'-.',
'O':'---', 'P':'.--.', 'Q':'--.-',
'R':'.-.', 'S':'...', 'T':'-',
'U':'..-', 'V':'...-', 'W':'.--',
'X':'-..-', 'Y':'-.--', 'Z':'--..',
'1':'.----', '2':'..---', '3':'...--',
'4':'....-', '5':'.....', '6':'-....',
'7':'--...', '8':'---..', '9':'----.',
'0':'-----', ', ':'--..--', '.':'.-.-.-',
'?':'..--..', '/':'-..-.', '-':'-....-',
'(':'-.--.', ')':'-.--.-'}
BLINK_HEADER = f'''
from machine import Pin
import time
led=Pin(25, Pin.OUT)
d={MORSE_CODE_DICT}
'''
def get_args():
parser = argparse.ArgumentParser(description='Convert images to blink micropython blink instructions.')
parser.add_argument('image', help='Path to image to convert')
return parser.parse_args()
def add_image_hex_to_header(hex_string):
header = BLINK_HEADER + f'\nimage_hex="{hex_string}"'
return header
def display_morse_string():
return '''
def display_morse(image_hex):
def get_time_on(dot_or_dash):
if dot_or_dash == '-':
time_on = 0.6
else:
time_on = 0.3
return time_on
for each_char in image_hex:
morse = d[each_char.upper()]
for each_symbol in morse:
time_on = get_time_on(each_symbol)
led.toggle()
time.sleep(time_on)
led.toggle()
time.sleep(0.3)
time.sleep(1)
display_morse(image_hex)
'''
def resize_image(image_path):
image_path = Path(image_path)
image = Image.open(str(image_path))
image = image.resize((128, 128))
small_image_path = image_path.with_suffix(f'.small.{image_path.suffix}')
image.save(str(small_image_path))
return small_image_path
def convert_image_to_hex(image_path):
with open(str(image_path), 'rb') as handle:
return handle.read().hex()
def write_main(hex_string):
header = add_image_hex_to_header(hex_string)
with open('main.py', 'w') as handle:
handle.write(header)
handle.write(display_morse_string())
def main():
args = get_args()
image = args.image
small_image = resize_image(image)
hex_string = convert_image_to_hex(small_image)
write_main(hex_string)
if __name__ == '__main__':
main()
| 26.722772 | 107 | 0.479807 | from pathlib import Path
from PIL import Image
import argparse
MORSE_CODE_DICT = { 'A':'.-', 'B':'-...',
'C':'-.-.', 'D':'-..', 'E':'.',
'F':'..-.', 'G':'--.', 'H':'....',
'I':'..', 'J':'.---', 'K':'-.-',
'L':'.-..', 'M':'--', 'N':'-.',
'O':'---', 'P':'.--.', 'Q':'--.-',
'R':'.-.', 'S':'...', 'T':'-',
'U':'..-', 'V':'...-', 'W':'.--',
'X':'-..-', 'Y':'-.--', 'Z':'--..',
'1':'.----', '2':'..---', '3':'...--',
'4':'....-', '5':'.....', '6':'-....',
'7':'--...', '8':'---..', '9':'----.',
'0':'-----', ', ':'--..--', '.':'.-.-.-',
'?':'..--..', '/':'-..-.', '-':'-....-',
'(':'-.--.', ')':'-.--.-'}
BLINK_HEADER = f'''
from machine import Pin
import time
led=Pin(25, Pin.OUT)
d={MORSE_CODE_DICT}
'''
def get_args():
parser = argparse.ArgumentParser(description='Convert images to blink micropython blink instructions.')
parser.add_argument('image', help='Path to image to convert')
return parser.parse_args()
def add_image_hex_to_header(hex_string):
header = BLINK_HEADER + f'\nimage_hex="{hex_string}"'
return header
def display_morse_string():
return '''
def display_morse(image_hex):
def get_time_on(dot_or_dash):
if dot_or_dash == '-':
time_on = 0.6
else:
time_on = 0.3
return time_on
for each_char in image_hex:
morse = d[each_char.upper()]
for each_symbol in morse:
time_on = get_time_on(each_symbol)
led.toggle()
time.sleep(time_on)
led.toggle()
time.sleep(0.3)
time.sleep(1)
display_morse(image_hex)
'''
def resize_image(image_path):
image_path = Path(image_path)
image = Image.open(str(image_path))
image = image.resize((128, 128))
small_image_path = image_path.with_suffix(f'.small.{image_path.suffix}')
image.save(str(small_image_path))
return small_image_path
def convert_image_to_hex(image_path):
with open(str(image_path), 'rb') as handle:
return handle.read().hex()
def write_main(hex_string):
header = add_image_hex_to_header(hex_string)
with open('main.py', 'w') as handle:
handle.write(header)
handle.write(display_morse_string())
def main():
args = get_args()
image = args.image
small_image = resize_image(image)
hex_string = convert_image_to_hex(small_image)
write_main(hex_string)
if __name__ == '__main__':
main()
| true | true |
1c3d507588957cb770b1475e4e60a2940b5e89c9 | 1,888 | py | Python | tests/test_loadfile.py | HebertWP/star_tracker | 4d6fe9e353222f22d0b8e0cfc823be51a7110d43 | [
"MIT"
] | null | null | null | tests/test_loadfile.py | HebertWP/star_tracker | 4d6fe9e353222f22d0b8e0cfc823be51a7110d43 | [
"MIT"
] | null | null | null | tests/test_loadfile.py | HebertWP/star_tracker | 4d6fe9e353222f22d0b8e0cfc823be51a7110d43 | [
"MIT"
] | null | null | null | from numpy.core.fromnumeric import size
import pandas
import pytest
import star_tracker.modules.loadfile as loadfile
from star_tracker.modules.loadfile import Movements
import matplotlib.pyplot as plt
def test_dat2csv():
loadfile.dat2csv("data/hip_main.dat","data/stars.csv")
try:
stars = pandas.read_csv("data/stars.csv")
assert True == True
except FileNotFoundError:
assert True == "File not found."
except pandas.errors.EmptyDataError:
assert True == "No data"
except pandas.errors.ParserError:
assert True == "Parse error"
except Exception:
assert True == "Some other exception"
class TestLoad:
def loadData(self):
self.n, self.v, self.ar, self.dec = loadfile.loadCatalog("data/stars.csv")
def loadMovements(self):
self._m = Movements('data/Movements.csv')
def test_loadCatalog(self):
self.loadData()
assert size(self.n) == size(self.dec)
def test_loadMovements(self):
self.loadMovements()
assert 4 == len(self._m)
def test_Movements_play(self):
self.loadMovements()
assert self._m.playing == False
self._m.play()
assert self._m.playing == True
def test_Movements_move(self):
self.loadMovements()
self._m.play()
a = [self._m.move(),self._m.move()]
b = {'time': 3, 'ar':0, 'dec':0, 'roll':0}
assert a[0] == b
def test_Movements_stop(self):
self.loadMovements()
self._m.play()
a = [self._m.move(),self._m.move()]
assert self._m.playing == False
def test_Movements_progress(self):
self.loadMovements()
self._m.play()
assert 0 == self._m.progress
self._m.move()
assert 37.5 == self._m.progress
self._m.move()
assert 100 == self._m.progress | 29.968254 | 82 | 0.613877 | from numpy.core.fromnumeric import size
import pandas
import pytest
import star_tracker.modules.loadfile as loadfile
from star_tracker.modules.loadfile import Movements
import matplotlib.pyplot as plt
def test_dat2csv():
loadfile.dat2csv("data/hip_main.dat","data/stars.csv")
try:
stars = pandas.read_csv("data/stars.csv")
assert True == True
except FileNotFoundError:
assert True == "File not found."
except pandas.errors.EmptyDataError:
assert True == "No data"
except pandas.errors.ParserError:
assert True == "Parse error"
except Exception:
assert True == "Some other exception"
class TestLoad:
def loadData(self):
self.n, self.v, self.ar, self.dec = loadfile.loadCatalog("data/stars.csv")
def loadMovements(self):
self._m = Movements('data/Movements.csv')
def test_loadCatalog(self):
self.loadData()
assert size(self.n) == size(self.dec)
def test_loadMovements(self):
self.loadMovements()
assert 4 == len(self._m)
def test_Movements_play(self):
self.loadMovements()
assert self._m.playing == False
self._m.play()
assert self._m.playing == True
def test_Movements_move(self):
self.loadMovements()
self._m.play()
a = [self._m.move(),self._m.move()]
b = {'time': 3, 'ar':0, 'dec':0, 'roll':0}
assert a[0] == b
def test_Movements_stop(self):
self.loadMovements()
self._m.play()
a = [self._m.move(),self._m.move()]
assert self._m.playing == False
def test_Movements_progress(self):
self.loadMovements()
self._m.play()
assert 0 == self._m.progress
self._m.move()
assert 37.5 == self._m.progress
self._m.move()
assert 100 == self._m.progress | true | true |
1c3d515dc82a173678d8019dedc9374c6cc4ecb4 | 835 | py | Python | qika/apps/apis/urls.py | XiYanXian/qikaACG | a465c211ed0f77263d8eca33a3422592b80010e4 | [
"Apache-2.0"
] | 6 | 2020-04-18T13:21:52.000Z | 2021-05-28T04:59:15.000Z | qika/apps/apis/urls.py | XiYanXian/qikaACG | a465c211ed0f77263d8eca33a3422592b80010e4 | [
"Apache-2.0"
] | 7 | 2020-06-05T22:36:33.000Z | 2022-03-11T23:57:38.000Z | qika/apps/apis/urls.py | XiYanXian/qikaACG | a465c211ed0f77263d8eca33a3422592b80010e4 | [
"Apache-2.0"
] | 1 | 2020-04-09T06:34:52.000Z | 2020-04-09T06:34:52.000Z | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^get_captcha/$', views.get_captcha, name='get_captcha'),
url(r'^check_captcha/$', views.check_captcha, name='check_captcha'),
url(r'^get_email_captcha/$', views.get_email_captcha, name='get_email_captcha'),
# url(r'^get_tag/(?P<tag>\w+)/$', views.get_tag, name='get_tag'),
url(r'^anime/judge/(?P<id>\d+)/$', views.JudgeCollection.as_view(), name='anime_judge'),
url(r'^anime/collection/(?P<id>\d+)/$', views.AnimeCollectionView.as_view(), name='anime_collection'),
url(r'^comment/like/(?P<id>\d+)/(?P<rank>)\w+$', views.CommentLikeView.as_view(), name='comment_like'),
url(r'^anime/search/$', views.SearchAnime.as_view(), name='anime_search'),
url(r'^change_avatar/$', views.ChangeAvatar.as_view(), name='change_avatar'),
] | 59.642857 | 107 | 0.68024 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^get_captcha/$', views.get_captcha, name='get_captcha'),
url(r'^check_captcha/$', views.check_captcha, name='check_captcha'),
url(r'^get_email_captcha/$', views.get_email_captcha, name='get_email_captcha'),
url(r'^anime/judge/(?P<id>\d+)/$', views.JudgeCollection.as_view(), name='anime_judge'),
url(r'^anime/collection/(?P<id>\d+)/$', views.AnimeCollectionView.as_view(), name='anime_collection'),
url(r'^comment/like/(?P<id>\d+)/(?P<rank>)\w+$', views.CommentLikeView.as_view(), name='comment_like'),
url(r'^anime/search/$', views.SearchAnime.as_view(), name='anime_search'),
url(r'^change_avatar/$', views.ChangeAvatar.as_view(), name='change_avatar'),
] | true | true |
1c3d52e2d6e1a6c76243ded117172f26cd1054ad | 6,499 | py | Python | setup.py | bunjiboys/lemur | b5fd8020055d8af07bd6f82f4dd38246dca8d0c5 | [
"Apache-2.0"
] | null | null | null | setup.py | bunjiboys/lemur | b5fd8020055d8af07bd6f82f4dd38246dca8d0c5 | [
"Apache-2.0"
] | 2 | 2020-04-03T09:28:20.000Z | 2020-04-04T04:56:35.000Z | setup.py | bunjiboys/lemur | b5fd8020055d8af07bd6f82f4dd38246dca8d0c5 | [
"Apache-2.0"
] | null | null | null | """
Lemur
=====
Is a TLS management and orchestration tool.
:copyright: (c) 2015 by Netflix, see AUTHORS for more
:license: Apache, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import json
import os.path
import datetime
from distutils import log
from distutils.core import Command
from setuptools.command.develop import develop
from setuptools.command.install import install
from setuptools.command.sdist import sdist
from setuptools import setup, find_packages
from subprocess import check_output
ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src/ directory to the sys.path.
sys.path.insert(0, ROOT)
about = {}
with open(os.path.join(ROOT, "lemur", "__about__.py")) as f:
exec(f.read(), about) # nosec: about file is benign
install_requires = [
'Flask==0.12',
'Flask-RESTful==0.3.6',
'Flask-SQLAlchemy==2.1',
'Flask-Script==2.0.5',
'Flask-Migrate==2.0.4',
'Flask-Bcrypt==0.7.1',
'Flask-Principal==0.4.0',
'Flask-Mail==0.9.1',
'SQLAlchemy-Utils==0.32.14',
'requests==2.11.1',
'ndg-httpsclient==0.4.2',
'psycopg2==2.7.1',
'arrow==0.10.0',
'six==1.10.0',
'marshmallow-sqlalchemy==0.13.1',
'gunicorn==19.7.1',
'marshmallow==2.13.5',
'cryptography==1.9',
'xmltodict==0.11.0',
'pyjwt==1.5.0',
'lockfile==0.12.2',
'inflection==0.3.1',
'future==0.16.0',
'boto3==1.4.4',
'acme==0.15.0',
'retrying==1.3.3',
'tabulate==0.7.7',
'pem==16.1.0',
'paramiko==2.1.2' # required for lemur_linuxdst plugin
]
tests_require = [
'pyflakes',
'moto==1.0.1',
'nose==1.3.7',
'pytest==3.1.2',
'factory-boy==2.8.1',
'fake-factory==0.7.2',
'pytest-flask==0.10.0',
'freezegun==0.3.9',
'requests-mock==1.3.0'
]
docs_require = [
'sphinx',
'sphinxcontrib-httpdomain',
'sphinx-rtd-theme'
]
dev_requires = [
'flake8>=3.2,<4.0',
'pre-commit',
'invoke',
'twine'
]
class SmartInstall(install):
"""
Installs Lemur into the Python environment.
If the package indicator is missing, this will also force a run of
`build_static` which is required for JavaScript assets and other things.
"""
def _needs_static(self):
return not os.path.exists(os.path.join(ROOT, 'lemur/static/dist'))
def run(self):
if self._needs_static():
self.run_command('build_static')
install.run(self)
class DevelopWithBuildStatic(develop):
def install_for_development(self):
self.run_command('build_static')
return develop.install_for_development(self)
class SdistWithBuildStatic(sdist):
def make_release_tree(self, *a, **kw):
dist_path = self.distribution.get_fullname()
sdist.make_release_tree(self, *a, **kw)
self.reinitialize_command('build_static', work_path=dist_path)
self.run_command('build_static')
with open(os.path.join(dist_path, 'lemur-package.json'), 'w') as fp:
json.dump({
'createdAt': datetime.datetime.utcnow().isoformat() + 'Z',
}, fp)
class BuildStatic(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
log.info("running [npm install --quiet] in {0}".format(ROOT))
try:
check_output(['npm', 'install', '--quiet'], cwd=ROOT)
log.info("running [gulp build]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'build'], cwd=ROOT)
log.info("running [gulp package]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'package'], cwd=ROOT)
except Exception as e:
log.warn("Unable to build static content")
setup(
name=about["__title__"],
version=about["__version__"],
author=about["__author__"],
author_email=about["__email__"],
url=about["__uri__"],
description=about["__summary__"],
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'tests': tests_require,
'docs': docs_require,
'dev': dev_requires,
},
cmdclass={
'build_static': BuildStatic,
'sdist': SdistWithBuildStatic,
'install': SmartInstall
},
entry_points={
'console_scripts': [
'lemur = lemur.manage:main',
],
'lemur.plugins': [
'verisign_issuer = lemur.plugins.lemur_verisign.plugin:VerisignIssuerPlugin',
'acme_issuer = lemur.plugins.lemur_acme.plugin:ACMEIssuerPlugin',
'aws_destination = lemur.plugins.lemur_aws.plugin:AWSDestinationPlugin',
'aws_source = lemur.plugins.lemur_aws.plugin:AWSSourcePlugin',
'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin',
'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin',
'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin',
'java_truststore_export = lemur.plugins.lemur_java.plugin:JavaTruststoreExportPlugin',
'java_keystore_export = lemur.plugins.lemur_java.plugin:JavaKeystoreExportPlugin',
'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin',
'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin',
'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin',
'cryptography_issuer = lemur.plugins.lemur_cryptography.plugin:CryptographyIssuerPlugin',
'cfssl_issuer = lemur.plugins.lemur_cfssl.plugin:CfsslIssuerPlugin',
'digicert_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertIssuerPlugin',
'digicert_cis_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertCISIssuerPlugin',
],
},
classifiers=[
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development',
"Programming Language :: Python :: 3.5",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License"
]
)
| 31.548544 | 105 | 0.646561 | from __future__ import absolute_import
import sys
import json
import os.path
import datetime
from distutils import log
from distutils.core import Command
from setuptools.command.develop import develop
from setuptools.command.install import install
from setuptools.command.sdist import sdist
from setuptools import setup, find_packages
from subprocess import check_output
ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
sys.path.insert(0, ROOT)
about = {}
with open(os.path.join(ROOT, "lemur", "__about__.py")) as f:
exec(f.read(), about)
install_requires = [
'Flask==0.12',
'Flask-RESTful==0.3.6',
'Flask-SQLAlchemy==2.1',
'Flask-Script==2.0.5',
'Flask-Migrate==2.0.4',
'Flask-Bcrypt==0.7.1',
'Flask-Principal==0.4.0',
'Flask-Mail==0.9.1',
'SQLAlchemy-Utils==0.32.14',
'requests==2.11.1',
'ndg-httpsclient==0.4.2',
'psycopg2==2.7.1',
'arrow==0.10.0',
'six==1.10.0',
'marshmallow-sqlalchemy==0.13.1',
'gunicorn==19.7.1',
'marshmallow==2.13.5',
'cryptography==1.9',
'xmltodict==0.11.0',
'pyjwt==1.5.0',
'lockfile==0.12.2',
'inflection==0.3.1',
'future==0.16.0',
'boto3==1.4.4',
'acme==0.15.0',
'retrying==1.3.3',
'tabulate==0.7.7',
'pem==16.1.0',
'paramiko==2.1.2'
]
tests_require = [
'pyflakes',
'moto==1.0.1',
'nose==1.3.7',
'pytest==3.1.2',
'factory-boy==2.8.1',
'fake-factory==0.7.2',
'pytest-flask==0.10.0',
'freezegun==0.3.9',
'requests-mock==1.3.0'
]
docs_require = [
'sphinx',
'sphinxcontrib-httpdomain',
'sphinx-rtd-theme'
]
dev_requires = [
'flake8>=3.2,<4.0',
'pre-commit',
'invoke',
'twine'
]
class SmartInstall(install):
def _needs_static(self):
return not os.path.exists(os.path.join(ROOT, 'lemur/static/dist'))
def run(self):
if self._needs_static():
self.run_command('build_static')
install.run(self)
class DevelopWithBuildStatic(develop):
def install_for_development(self):
self.run_command('build_static')
return develop.install_for_development(self)
class SdistWithBuildStatic(sdist):
def make_release_tree(self, *a, **kw):
dist_path = self.distribution.get_fullname()
sdist.make_release_tree(self, *a, **kw)
self.reinitialize_command('build_static', work_path=dist_path)
self.run_command('build_static')
with open(os.path.join(dist_path, 'lemur-package.json'), 'w') as fp:
json.dump({
'createdAt': datetime.datetime.utcnow().isoformat() + 'Z',
}, fp)
class BuildStatic(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
log.info("running [npm install --quiet] in {0}".format(ROOT))
try:
check_output(['npm', 'install', '--quiet'], cwd=ROOT)
log.info("running [gulp build]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'build'], cwd=ROOT)
log.info("running [gulp package]")
check_output([os.path.join(ROOT, 'node_modules', '.bin', 'gulp'), 'package'], cwd=ROOT)
except Exception as e:
log.warn("Unable to build static content")
setup(
name=about["__title__"],
version=about["__version__"],
author=about["__author__"],
author_email=about["__email__"],
url=about["__uri__"],
description=about["__summary__"],
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'tests': tests_require,
'docs': docs_require,
'dev': dev_requires,
},
cmdclass={
'build_static': BuildStatic,
'sdist': SdistWithBuildStatic,
'install': SmartInstall
},
entry_points={
'console_scripts': [
'lemur = lemur.manage:main',
],
'lemur.plugins': [
'verisign_issuer = lemur.plugins.lemur_verisign.plugin:VerisignIssuerPlugin',
'acme_issuer = lemur.plugins.lemur_acme.plugin:ACMEIssuerPlugin',
'aws_destination = lemur.plugins.lemur_aws.plugin:AWSDestinationPlugin',
'aws_source = lemur.plugins.lemur_aws.plugin:AWSSourcePlugin',
'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin',
'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin',
'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin',
'java_truststore_export = lemur.plugins.lemur_java.plugin:JavaTruststoreExportPlugin',
'java_keystore_export = lemur.plugins.lemur_java.plugin:JavaKeystoreExportPlugin',
'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin',
'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin',
'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin',
'cryptography_issuer = lemur.plugins.lemur_cryptography.plugin:CryptographyIssuerPlugin',
'cfssl_issuer = lemur.plugins.lemur_cfssl.plugin:CfsslIssuerPlugin',
'digicert_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertIssuerPlugin',
'digicert_cis_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertCISIssuerPlugin',
],
},
classifiers=[
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development',
"Programming Language :: Python :: 3.5",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License"
]
)
| true | true |
1c3d53f64d0e29b0ac2bdfffd3f13563d7105b61 | 7,660 | py | Python | src/python/pants/backend/python/util_rules/lockfile_metadata.py | ArthV/pants | 0aa7f290ed390ab3ceba0c44e4b38354d7a41d3d | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/python/util_rules/lockfile_metadata.py | ArthV/pants | 0aa7f290ed390ab3ceba0c44e4b38354d7a41d3d | [
"Apache-2.0"
] | 1 | 2022-02-22T18:15:03.000Z | 2022-02-22T18:15:03.000Z | src/python/pants/backend/python/util_rules/lockfile_metadata.py | ryanking/pants | e45b00d2eb467b599966bca262405a5d74d27bdd | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Any, Iterable, Set, cast
from pants.backend.python.pip_requirement import PipRequirement
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.core.util_rules.lockfile_metadata import (
LockfileMetadata,
LockfileMetadataValidation,
LockfileScope,
_get_metadata,
lockfile_metadata_registrar,
)
_python_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.PYTHON)
class InvalidPythonLockfileReason(Enum):
INVALIDATION_DIGEST_MISMATCH = "invalidation_digest_mismatch"
INTERPRETER_CONSTRAINTS_MISMATCH = "interpreter_constraints_mismatch"
REQUIREMENTS_MISMATCH = "requirements_mismatch"
@dataclass(frozen=True)
class PythonLockfileMetadata(LockfileMetadata):
scope = LockfileScope.PYTHON
valid_for_interpreter_constraints: InterpreterConstraints
@staticmethod
def new(
valid_for_interpreter_constraints: InterpreterConstraints,
requirements: set[PipRequirement],
) -> LockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
This static method should be used in place of the `LockfileMetadata` constructor. This gives
calling sites a predictable method to call to construct a new `LockfileMetadata` for
writing, while still allowing us to support _reading_ older, deprecated metadata versions.
"""
return PythonLockfileMetadataV2(valid_for_interpreter_constraints, requirements)
@classmethod
def from_lockfile(
cls, resolve_name: str, lockfile: bytes, lockfile_path: str | None = None
) -> PythonLockfileMetadata:
return cast(
PythonLockfileMetadata,
LockfileMetadata.from_lockfile_for_scope(
LockfileScope.PYTHON, lockfile, lockfile_path, resolve_name
),
)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadata, instance)
return {
"valid_for_interpreter_constraints": [
str(ic) for ic in instance.valid_for_interpreter_constraints
]
}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
"""Returns Truthy if this `PythonLockfileMetadata` can be used in the current execution
context."""
raise NotImplementedError("call `is_valid_for` on subclasses only")
@_python_lockfile_metadata(1)
@dataclass(frozen=True)
class PythonLockfileMetadataV1(PythonLockfileMetadata):
requirements_invalidation_digest: str
@classmethod
def _from_json_dict(
cls: type[PythonLockfileMetadataV1],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> PythonLockfileMetadataV1:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
requirements_digest = metadata("requirements_invalidation_digest", str, None)
return PythonLockfileMetadataV1(interpreter_constraints, requirements_digest)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV1, instance)
return {"requirements_invalidation_digest": instance.requirements_invalidation_digest}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement], # User requirements are not used by V1
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidPythonLockfileReason] = set()
if expected_invalidation_digest is None:
return LockfileMetadataValidation(failure_reasons)
if self.requirements_invalidation_digest != expected_invalidation_digest:
failure_reasons.add(InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
@_python_lockfile_metadata(2)
@dataclass(frozen=True)
class PythonLockfileMetadataV2(PythonLockfileMetadata):
"""Lockfile version that permits specifying a requirements as a set rather than a digest.
Validity is tested by the set of requirements strings being the same in the user requirements as
those in the stored requirements.
"""
requirements: set[PipRequirement]
@classmethod
def _from_json_dict(
cls: type[PythonLockfileMetadataV2],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> PythonLockfileMetadataV2:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
requirements = metadata(
"generated_with_requirements",
Set[PipRequirement],
lambda l: {PipRequirement.parse(i) for i in l},
)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
return PythonLockfileMetadataV2(interpreter_constraints, requirements)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV2, instance)
# Requirements need to be stringified then sorted so that tests are deterministic. Sorting
# followed by stringifying does not produce a meaningful result.
return {
"generated_with_requirements": (
sorted(str(i) for i in instance.requirements)
if instance.requirements is not None
else None
)
}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None, # Validation digests are not used by V2.
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = set()
invalid_reqs = (
self.requirements != set(user_requirements)
if is_tool
else not set(user_requirements).issubset(self.requirements)
)
if invalid_reqs:
failure_reasons.add(InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
| 37.004831 | 100 | 0.716841 |
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Any, Iterable, Set, cast
from pants.backend.python.pip_requirement import PipRequirement
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.core.util_rules.lockfile_metadata import (
LockfileMetadata,
LockfileMetadataValidation,
LockfileScope,
_get_metadata,
lockfile_metadata_registrar,
)
_python_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.PYTHON)
class InvalidPythonLockfileReason(Enum):
INVALIDATION_DIGEST_MISMATCH = "invalidation_digest_mismatch"
INTERPRETER_CONSTRAINTS_MISMATCH = "interpreter_constraints_mismatch"
REQUIREMENTS_MISMATCH = "requirements_mismatch"
@dataclass(frozen=True)
class PythonLockfileMetadata(LockfileMetadata):
scope = LockfileScope.PYTHON
valid_for_interpreter_constraints: InterpreterConstraints
@staticmethod
def new(
valid_for_interpreter_constraints: InterpreterConstraints,
requirements: set[PipRequirement],
) -> LockfileMetadata:
return PythonLockfileMetadataV2(valid_for_interpreter_constraints, requirements)
@classmethod
def from_lockfile(
cls, resolve_name: str, lockfile: bytes, lockfile_path: str | None = None
) -> PythonLockfileMetadata:
return cast(
PythonLockfileMetadata,
LockfileMetadata.from_lockfile_for_scope(
LockfileScope.PYTHON, lockfile, lockfile_path, resolve_name
),
)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadata, instance)
return {
"valid_for_interpreter_constraints": [
str(ic) for ic in instance.valid_for_interpreter_constraints
]
}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
raise NotImplementedError("call `is_valid_for` on subclasses only")
@_python_lockfile_metadata(1)
@dataclass(frozen=True)
class PythonLockfileMetadataV1(PythonLockfileMetadata):
requirements_invalidation_digest: str
@classmethod
def _from_json_dict(
cls: type[PythonLockfileMetadataV1],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> PythonLockfileMetadataV1:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
requirements_digest = metadata("requirements_invalidation_digest", str, None)
return PythonLockfileMetadataV1(interpreter_constraints, requirements_digest)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV1, instance)
return {"requirements_invalidation_digest": instance.requirements_invalidation_digest}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidPythonLockfileReason] = set()
if expected_invalidation_digest is None:
return LockfileMetadataValidation(failure_reasons)
if self.requirements_invalidation_digest != expected_invalidation_digest:
failure_reasons.add(InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
@_python_lockfile_metadata(2)
@dataclass(frozen=True)
class PythonLockfileMetadataV2(PythonLockfileMetadata):
requirements: set[PipRequirement]
@classmethod
def _from_json_dict(
cls: type[PythonLockfileMetadataV2],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> PythonLockfileMetadataV2:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
requirements = metadata(
"generated_with_requirements",
Set[PipRequirement],
lambda l: {PipRequirement.parse(i) for i in l},
)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
return PythonLockfileMetadataV2(interpreter_constraints, requirements)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(PythonLockfileMetadataV2, instance)
return {
"generated_with_requirements": (
sorted(str(i) for i in instance.requirements)
if instance.requirements is not None
else None
)
}
def is_valid_for(
self,
*,
is_tool: bool,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[PipRequirement],
) -> LockfileMetadataValidation:
failure_reasons = set()
invalid_reqs = (
self.requirements != set(user_requirements)
if is_tool
else not set(user_requirements).issubset(self.requirements)
)
if invalid_reqs:
failure_reasons.add(InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
| true | true |
1c3d547ec6954483f16bc8fd0fe87d1b95355936 | 1,602 | py | Python | DailyProgrammer/DP20150102.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | 2 | 2020-12-23T18:59:22.000Z | 2021-04-14T13:16:09.000Z | DailyProgrammer/DP20150102.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | DailyProgrammer/DP20150102.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | """
[2015-01-02] Challenge #195 [All] 2015 Prep Work
https://www.reddit.com/r/dailyprogrammer/comments/2r4wal/20150102_challenge_195_all_2015_prep_work/
#Description:
As we enter a new year it is a good time to get organized and be ready. One thing I have noticed as you use this
subreddit and finish challenges you repeat lots of code in solutions. This is true in the area of reading in data.
One thing I have done is develop some standard code I use in reading and parsing data.
For today's challenge you will be doing some prep work for yourself.
#Tool Development
Develop a tool or several tools you can use in the coming year for completing challenges. The tool is up to you. It can
be anything that you find you repeat in your code.
An example will be shown below that I use. But some basic ideas
* Read input from user
* Input from a file
* Output to user
* Output to a file
Do not limit yourself to these. Look at your previous code and find the pieces of code you repeat a lot and develop
your own library for handling that part of your challenges. Having this for your use will make solutions easier to
develop as you already have that code done.
#Example:
I tend to do a lot of work in C/objective C -- so I have this code I use a lot for getting input from the user and
parsing it. It can be further developed and added on by me which I will.
(https://github.com/coderd00d/standard-objects)
#Solutions:
Can be your code/link to your github/posting of it -- Also can just be ideas of tools you or others can develop.
"""
def main():
pass
if __name__ == "__main__":
main()
| 43.297297 | 119 | 0.764669 |
def main():
pass
if __name__ == "__main__":
main()
| true | true |
1c3d54a0f76b5eb3ede1d76a0f91483a3678702e | 11,037 | py | Python | aliyun/log/es_migration/mapping_index_converter.py | topdown618/aliyun-log-python-sdk | 395949a5c307722e8223d926b366c50dacd32126 | [
"MIT"
] | 130 | 2017-03-31T07:41:46.000Z | 2022-03-27T14:31:22.000Z | aliyun/log/es_migration/mapping_index_converter.py | topdown618/aliyun-log-python-sdk | 395949a5c307722e8223d926b366c50dacd32126 | [
"MIT"
] | 170 | 2017-02-17T06:07:31.000Z | 2022-03-17T02:32:42.000Z | aliyun/log/es_migration/mapping_index_converter.py | topdown618/aliyun-log-python-sdk | 395949a5c307722e8223d926b366c50dacd32126 | [
"MIT"
] | 111 | 2017-01-16T07:35:01.000Z | 2022-03-18T03:31:32.000Z | #!/usr/bin/env python
# encoding: utf-8
# Copyright (C) Alibaba Cloud Computing
# All rights reserved.
from .. import IndexConfig, IndexKeyConfig, IndexLineConfig
from ..index_config import IndexJsonKeyConfig
class AliyunLogFieldType(object):
TEXT = "text"
LONG = "long"
DOUBLE = "double"
JSON = "json"
class DocRangeComparator(object):
GT = "gt"
LT = "lt"
GTE = "gte"
LTE = "lte"
class MappingIndexConverter(object):
DEFAULT_TOKEN_LIST = [",", " ", "'", "\"", ";", "=", "(", ")", "[", "]", "{", "}", "?", "@", "&", "<", ">", "/",
":", "\\n", "\\t", "\\r"]
@classmethod
def to_index_config(cls, mapping):
if not mapping:
return None
line_config = IndexLineConfig(token_list=cls.DEFAULT_TOKEN_LIST, chinese=True)
key_config_list = cls.to_key_config_list(properties=mapping["properties"])
index_config = IndexConfig(line_config=line_config, key_config_list=key_config_list)
return index_config
@classmethod
def to_key_config_list(cls, properties):
key_config_list = {"_id": cls.handle_id()}
if not properties:
return key_config_list
fields = set()
for field_name, field_desc in properties.items():
if field_name.lower() in fields:
continue
fields.add(field_name.lower())
if "type" in field_desc:
field_type = field_desc["type"]
if field_type not in field_type_handlers:
continue
key_config = field_type_handlers[field_type]()
key_config_list[field_name] = key_config
elif "properties" in field_desc:
key_config = cls.handle_properties(field_desc["properties"])
key_config_list[field_name] = key_config
else:
raise Exception("invalid field_desc '%s'" % field_desc)
return key_config_list
@classmethod
def handle_id(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True
)
@classmethod
def handle_text(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
chinese=True,
)
@classmethod
def handle_keyword(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
chinese=True,
)
@classmethod
def handle_long(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_integer(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_short(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_byte(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_double(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_half_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_scaled_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_date(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_boolean(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_integer_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_float_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_long_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_double_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_date_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.TEXT, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_ip_range(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_geo_point(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_geo_shape(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_ip(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_properties(cls, properties):
json_key_config = IndexJsonKeyConfig()
key_configs = cls.parse_properties(None, properties)
for key_name, key_config in key_configs.items():
json_key_config.add_key(key_name=key_name, key_type=key_config.index_type, doc_value=key_config.doc_value)
if len(json_key_config.json_keys) >= 50:
break
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def parse_properties(cls, prefix, properties):
key_config_list = {}
for field_name, field_desc in properties.items():
if prefix:
field_name = prefix + "." + field_name
if "type" in field_desc:
field_type = field_desc["type"]
if field_type not in field_type_handlers:
continue
key_config = field_type_handlers[field_type]()
key_config_list[field_name] = key_config
elif "properties" in field_desc:
sub_key_config_list = cls.parse_properties(field_name, field_desc["properties"])
key_config_list.update(sub_key_config_list)
else:
raise Exception("invalid field_desc '%s'" % field_desc)
return key_config_list
field_type_handlers = {
"text": MappingIndexConverter.handle_text,
"keyword": MappingIndexConverter.handle_keyword,
"long": MappingIndexConverter.handle_long,
"integer": MappingIndexConverter.handle_integer,
"short": MappingIndexConverter.handle_short,
"byte": MappingIndexConverter.handle_byte,
"double": MappingIndexConverter.handle_double,
"float": MappingIndexConverter.handle_float,
"half_float": MappingIndexConverter.handle_half_float,
"scaled_float": MappingIndexConverter.handle_scaled_float,
"date": MappingIndexConverter.handle_date,
"boolean": MappingIndexConverter.handle_boolean,
"integer_range": MappingIndexConverter.handle_integer_range,
"float_range": MappingIndexConverter.handle_float_range,
"long_range": MappingIndexConverter.handle_long_range,
"double_range": MappingIndexConverter.handle_double_range,
"date_range": MappingIndexConverter.handle_date_range,
"ip_range": MappingIndexConverter.handle_ip_range,
"geo_point": MappingIndexConverter.handle_geo_point,
"geo_shape": MappingIndexConverter.handle_geo_shape,
"ip": MappingIndexConverter.handle_ip,
}
| 36.425743 | 118 | 0.668388 |
from .. import IndexConfig, IndexKeyConfig, IndexLineConfig
from ..index_config import IndexJsonKeyConfig
class AliyunLogFieldType(object):
TEXT = "text"
LONG = "long"
DOUBLE = "double"
JSON = "json"
class DocRangeComparator(object):
GT = "gt"
LT = "lt"
GTE = "gte"
LTE = "lte"
class MappingIndexConverter(object):
DEFAULT_TOKEN_LIST = [",", " ", "'", "\"", ";", "=", "(", ")", "[", "]", "{", "}", "?", "@", "&", "<", ">", "/",
":", "\\n", "\\t", "\\r"]
@classmethod
def to_index_config(cls, mapping):
if not mapping:
return None
line_config = IndexLineConfig(token_list=cls.DEFAULT_TOKEN_LIST, chinese=True)
key_config_list = cls.to_key_config_list(properties=mapping["properties"])
index_config = IndexConfig(line_config=line_config, key_config_list=key_config_list)
return index_config
@classmethod
def to_key_config_list(cls, properties):
key_config_list = {"_id": cls.handle_id()}
if not properties:
return key_config_list
fields = set()
for field_name, field_desc in properties.items():
if field_name.lower() in fields:
continue
fields.add(field_name.lower())
if "type" in field_desc:
field_type = field_desc["type"]
if field_type not in field_type_handlers:
continue
key_config = field_type_handlers[field_type]()
key_config_list[field_name] = key_config
elif "properties" in field_desc:
key_config = cls.handle_properties(field_desc["properties"])
key_config_list[field_name] = key_config
else:
raise Exception("invalid field_desc '%s'" % field_desc)
return key_config_list
@classmethod
def handle_id(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True
)
@classmethod
def handle_text(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
chinese=True,
)
@classmethod
def handle_keyword(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
chinese=True,
)
@classmethod
def handle_long(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_integer(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_short(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_byte(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.LONG,
doc_value=True,
)
@classmethod
def handle_double(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_half_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_scaled_float(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.DOUBLE,
doc_value=True,
)
@classmethod
def handle_date(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_boolean(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_integer_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_float_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_long_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.LONG, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_double_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.DOUBLE, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_date_range(cls):
json_key_config = IndexJsonKeyConfig()
json_key_config.add_key(key_name=DocRangeComparator.GT, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LT, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.GTE, key_type=AliyunLogFieldType.TEXT, doc_value=True)
json_key_config.add_key(key_name=DocRangeComparator.LTE, key_type=AliyunLogFieldType.TEXT, doc_value=True)
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def handle_ip_range(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_geo_point(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_geo_shape(cls):
return IndexKeyConfig(
token_list=cls.DEFAULT_TOKEN_LIST,
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_ip(cls):
return IndexKeyConfig(
index_type=AliyunLogFieldType.TEXT,
doc_value=True,
)
@classmethod
def handle_properties(cls, properties):
json_key_config = IndexJsonKeyConfig()
key_configs = cls.parse_properties(None, properties)
for key_name, key_config in key_configs.items():
json_key_config.add_key(key_name=key_name, key_type=key_config.index_type, doc_value=key_config.doc_value)
if len(json_key_config.json_keys) >= 50:
break
return IndexKeyConfig(
index_type=AliyunLogFieldType.JSON,
json_key_config=json_key_config
)
@classmethod
def parse_properties(cls, prefix, properties):
key_config_list = {}
for field_name, field_desc in properties.items():
if prefix:
field_name = prefix + "." + field_name
if "type" in field_desc:
field_type = field_desc["type"]
if field_type not in field_type_handlers:
continue
key_config = field_type_handlers[field_type]()
key_config_list[field_name] = key_config
elif "properties" in field_desc:
sub_key_config_list = cls.parse_properties(field_name, field_desc["properties"])
key_config_list.update(sub_key_config_list)
else:
raise Exception("invalid field_desc '%s'" % field_desc)
return key_config_list
field_type_handlers = {
"text": MappingIndexConverter.handle_text,
"keyword": MappingIndexConverter.handle_keyword,
"long": MappingIndexConverter.handle_long,
"integer": MappingIndexConverter.handle_integer,
"short": MappingIndexConverter.handle_short,
"byte": MappingIndexConverter.handle_byte,
"double": MappingIndexConverter.handle_double,
"float": MappingIndexConverter.handle_float,
"half_float": MappingIndexConverter.handle_half_float,
"scaled_float": MappingIndexConverter.handle_scaled_float,
"date": MappingIndexConverter.handle_date,
"boolean": MappingIndexConverter.handle_boolean,
"integer_range": MappingIndexConverter.handle_integer_range,
"float_range": MappingIndexConverter.handle_float_range,
"long_range": MappingIndexConverter.handle_long_range,
"double_range": MappingIndexConverter.handle_double_range,
"date_range": MappingIndexConverter.handle_date_range,
"ip_range": MappingIndexConverter.handle_ip_range,
"geo_point": MappingIndexConverter.handle_geo_point,
"geo_shape": MappingIndexConverter.handle_geo_shape,
"ip": MappingIndexConverter.handle_ip,
}
| true | true |
1c3d54b4dc476b0348ec7eb0d061e0cb6a18ca07 | 1,514 | py | Python | raid/models.py | valdergallo/raidmanager | 8e4877cd2b10de98ba7b1e2c51719b7ff0532858 | [
"MIT"
] | null | null | null | raid/models.py | valdergallo/raidmanager | 8e4877cd2b10de98ba7b1e2c51719b7ff0532858 | [
"MIT"
] | 4 | 2016-04-03T12:32:30.000Z | 2016-10-31T09:54:42.000Z | raid/models.py | valdergallo/raidmanager | 8e4877cd2b10de98ba7b1e2c51719b7ff0532858 | [
"MIT"
] | null | null | null | # encoding: utf-8
from django.db import models
from core.models import DifficultType
from core.models import Game
class Raid(models.Model):
image = models.ImageField(upload_to="/raid/", null=True, blank=True)
game = models.ForeignKey(Game)
name = models.CharField(blank=True, max_length=100)
lvl = models.IntegerField(blank=True, null=True)
difficult_type = models.ForeignKey(DifficultType)
def __str__(self):
return self.name
class Boss(models.Model):
image = models.ImageField(upload_to="/boss/", null=True, blank=True)
name = models.CharField(blank=True, max_length=100)
lvl = models.IntegerField(blank=True, null=True)
health = models.CharField(blank=True, max_length=100)
difficult_type = models.ForeignKey(DifficultType)
def __str__(self):
return self.name
class RaidGroupAvaliable(models.Model):
game = models.ForeignKey(Game)
raid = models.ForeignKey(Raid)
boss = models.ForeignKey(Boss)
difficult_type = models.ForeignKey(DifficultType)
raid_group = models.ForeignKey('RaidGroup')
status = models.BooleanField(default=False)
execution_date = models.DateField(null=True, blank=True)
class RaidGroup(models.Model):
image = models.ImageField(upload_to="/raid_group/", null=True, blank=True)
game = models.ForeignKey(Game)
name = models.CharField(blank=True, max_length=100)
raids = models.ManyToManyField(Raid, through='RaidGroupAvaliable')
def __str__(self):
return self.name
| 32.212766 | 78 | 0.725231 |
from django.db import models
from core.models import DifficultType
from core.models import Game
class Raid(models.Model):
image = models.ImageField(upload_to="/raid/", null=True, blank=True)
game = models.ForeignKey(Game)
name = models.CharField(blank=True, max_length=100)
lvl = models.IntegerField(blank=True, null=True)
difficult_type = models.ForeignKey(DifficultType)
def __str__(self):
return self.name
class Boss(models.Model):
image = models.ImageField(upload_to="/boss/", null=True, blank=True)
name = models.CharField(blank=True, max_length=100)
lvl = models.IntegerField(blank=True, null=True)
health = models.CharField(blank=True, max_length=100)
difficult_type = models.ForeignKey(DifficultType)
def __str__(self):
return self.name
class RaidGroupAvaliable(models.Model):
game = models.ForeignKey(Game)
raid = models.ForeignKey(Raid)
boss = models.ForeignKey(Boss)
difficult_type = models.ForeignKey(DifficultType)
raid_group = models.ForeignKey('RaidGroup')
status = models.BooleanField(default=False)
execution_date = models.DateField(null=True, blank=True)
class RaidGroup(models.Model):
image = models.ImageField(upload_to="/raid_group/", null=True, blank=True)
game = models.ForeignKey(Game)
name = models.CharField(blank=True, max_length=100)
raids = models.ManyToManyField(Raid, through='RaidGroupAvaliable')
def __str__(self):
return self.name
| true | true |
1c3d567557b7171435429acec713b61a7ee8c9ac | 5,303 | py | Python | tests/func/experiments/test_checkpoints.py | Honzys/dvc | 8205c84077a7e07b48c768d07710108078a78149 | [
"Apache-2.0"
] | null | null | null | tests/func/experiments/test_checkpoints.py | Honzys/dvc | 8205c84077a7e07b48c768d07710108078a78149 | [
"Apache-2.0"
] | null | null | null | tests/func/experiments/test_checkpoints.py | Honzys/dvc | 8205c84077a7e07b48c768d07710108078a78149 | [
"Apache-2.0"
] | null | null | null | import pytest
from funcy import first
from dvc.exceptions import DvcException
from dvc.repo.experiments import MultipleBranchError
from dvc.repo.experiments.base import EXEC_APPLY, EXEC_CHECKPOINT
@pytest.mark.parametrize("workspace", [True, False])
def test_new_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, mocker, workspace
):
new_mock = mocker.spy(dvc.experiments, "new")
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
exp = first(results)
new_mock.assert_called_once()
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
if workspace:
assert (tmp_dir / "foo").read_text().strip() == str(
checkpoint_stage.iterations
)
assert (tmp_dir / "metrics.yaml").read_text().strip() == "foo: 2"
@pytest.mark.parametrize(
"checkpoint_resume, workspace",
[(None, True), (None, False), ("foo", True), ("foo", False)],
)
def test_resume_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, checkpoint_resume, workspace
):
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
with pytest.raises(DvcException):
dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume="abc1234",
tmp_dir=not workspace,
)
if checkpoint_resume:
checkpoint_resume = first(results)
if not workspace:
dvc.experiments.apply(first(results))
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=checkpoint_resume,
tmp_dir=not workspace,
)
exp = first(results)
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
@pytest.mark.parametrize("workspace", [True, False])
def test_reset_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, caplog, workspace
):
dvc.experiments.run(
checkpoint_stage.addressing, name="foo", tmp_dir=not workspace,
)
if workspace:
scm.reset(hard=True)
scm.gitpython.repo.git.clean(force=True)
results = dvc.experiments.run(
checkpoint_stage.addressing,
params=["foo=2"],
name="foo",
tmp_dir=not workspace,
reset=True,
)
exp = first(results)
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
@pytest.mark.parametrize("workspace", [True, False])
def test_resume_branch(tmp_dir, scm, dvc, checkpoint_stage, workspace):
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
branch_rev = first(results)
if not workspace:
dvc.experiments.apply(branch_rev)
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=branch_rev,
tmp_dir=not workspace,
)
checkpoint_a = first(results)
dvc.experiments.apply(branch_rev, force=True)
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=branch_rev,
params=["foo=100"],
tmp_dir=not workspace,
)
checkpoint_b = first(results)
for rev in dvc.brancher([checkpoint_a]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
for rev in dvc.brancher([checkpoint_b]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 100"
with pytest.raises(MultipleBranchError):
dvc.experiments.get_branch_by_rev(branch_rev)
assert branch_rev == dvc.experiments.scm.gitpython.repo.git.merge_base(
checkpoint_a, checkpoint_b
)
| 31.754491 | 78 | 0.633981 | import pytest
from funcy import first
from dvc.exceptions import DvcException
from dvc.repo.experiments import MultipleBranchError
from dvc.repo.experiments.base import EXEC_APPLY, EXEC_CHECKPOINT
@pytest.mark.parametrize("workspace", [True, False])
def test_new_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, mocker, workspace
):
new_mock = mocker.spy(dvc.experiments, "new")
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
exp = first(results)
new_mock.assert_called_once()
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
if workspace:
assert (tmp_dir / "foo").read_text().strip() == str(
checkpoint_stage.iterations
)
assert (tmp_dir / "metrics.yaml").read_text().strip() == "foo: 2"
@pytest.mark.parametrize(
"checkpoint_resume, workspace",
[(None, True), (None, False), ("foo", True), ("foo", False)],
)
def test_resume_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, checkpoint_resume, workspace
):
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
with pytest.raises(DvcException):
dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume="abc1234",
tmp_dir=not workspace,
)
if checkpoint_resume:
checkpoint_resume = first(results)
if not workspace:
dvc.experiments.apply(first(results))
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=checkpoint_resume,
tmp_dir=not workspace,
)
exp = first(results)
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
@pytest.mark.parametrize("workspace", [True, False])
def test_reset_checkpoint(
tmp_dir, scm, dvc, checkpoint_stage, caplog, workspace
):
dvc.experiments.run(
checkpoint_stage.addressing, name="foo", tmp_dir=not workspace,
)
if workspace:
scm.reset(hard=True)
scm.gitpython.repo.git.clean(force=True)
results = dvc.experiments.run(
checkpoint_stage.addressing,
params=["foo=2"],
name="foo",
tmp_dir=not workspace,
reset=True,
)
exp = first(results)
for rev in dvc.brancher([exp]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
if workspace:
assert scm.get_ref(EXEC_APPLY) == exp
assert scm.get_ref(EXEC_CHECKPOINT) == exp
@pytest.mark.parametrize("workspace", [True, False])
def test_resume_branch(tmp_dir, scm, dvc, checkpoint_stage, workspace):
results = dvc.experiments.run(
checkpoint_stage.addressing, params=["foo=2"], tmp_dir=not workspace
)
branch_rev = first(results)
if not workspace:
dvc.experiments.apply(branch_rev)
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=branch_rev,
tmp_dir=not workspace,
)
checkpoint_a = first(results)
dvc.experiments.apply(branch_rev, force=True)
results = dvc.experiments.run(
checkpoint_stage.addressing,
checkpoint_resume=branch_rev,
params=["foo=100"],
tmp_dir=not workspace,
)
checkpoint_b = first(results)
for rev in dvc.brancher([checkpoint_a]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 2"
for rev in dvc.brancher([checkpoint_b]):
if rev == "workspace":
continue
tree = dvc.repo_tree
with tree.open(tmp_dir / "foo") as fobj:
assert fobj.read().strip() == str(2 * checkpoint_stage.iterations)
with tree.open(tmp_dir / "metrics.yaml") as fobj:
assert fobj.read().strip() == "foo: 100"
with pytest.raises(MultipleBranchError):
dvc.experiments.get_branch_by_rev(branch_rev)
assert branch_rev == dvc.experiments.scm.gitpython.repo.git.merge_base(
checkpoint_a, checkpoint_b
)
| true | true |
1c3d56a96ec47934370b4f3ce4d6b0b21d197197 | 9,334 | py | Python | stix2elevator/ids.py | sthagen/oasis-open-cti-stix-elevator | e41f0870b3d77f4023c1a30224e681d6759b32e9 | [
"BSD-3-Clause"
] | null | null | null | stix2elevator/ids.py | sthagen/oasis-open-cti-stix-elevator | e41f0870b3d77f4023c1a30224e681d6759b32e9 | [
"BSD-3-Clause"
] | null | null | null | stix2elevator/ids.py | sthagen/oasis-open-cti-stix-elevator | e41f0870b3d77f4023c1a30224e681d6759b32e9 | [
"BSD-3-Clause"
] | null | null | null | # Standard Library
import importlib
import inspect
import re
import uuid
# external
from stix2.base import SCO_DET_ID_NAMESPACE
from stix2.canonicalization.Canonicalize import canonicalize
# internal
from stix2elevator.options import error, info, warn
from stix2elevator.utils import (find_key_in_dict_case_insensitive,
find_string_in_list_case_insensitive,
map_1x_type_to_20)
def record_ids(stix_id, new_id):
if stix_id in _IDS_TO_NEW_IDS:
info("%s is already associated other ids: %s", 703, str(stix_id), tuple(_IDS_TO_NEW_IDS[stix_id]))
if new_id is None:
error("Can not associate %s with None", 611, stix_id)
return
add_id_value(stix_id, new_id)
_SDO_ID_WITH_NO_1X_OBJECT = []
def clear_ids_with_no_1x_object():
global _SDO_ID_WITH_NO_1X_OBJECT
_SDO_ID_WITH_NO_1X_OBJECT = []
def exists_ids_with_no_1x_object(sdo_id):
return sdo_id in _SDO_ID_WITH_NO_1X_OBJECT
def add_ids_with_no_1x_object(sdo_id):
if not exists_ids_with_no_1x_object(sdo_id):
_SDO_ID_WITH_NO_1X_OBJECT.append(sdo_id)
# arguments:
# stix20SOName - the name of the type of object in 2.0
# stix12ID - the ID on the STIX 1.x object. In STIX 1.x, embedded objects might not have an ID. Additionally
# some objects in STIX 1.x didn't have IDs, but the corresponding object in STIX 2.0 does
# id_used - sometimes (with TTPs and ETs), more than one object in 2.0 is created from a 1.x object - this flag
# indicates that the 1.x's ID has been used for another 2.0 object, so a new one must be created
#
# algorithm:
# if a stix12ID is given, and it hasn't been used already, then
# split the stix12ID into its type and UUID parts.
# if the stix20SOName has been given, create the new id from it and the UUID
# otherwise, unless the stix12ID's type is ttp or et (which don't exist in 2.0) use the mapped 1.x type
#
# if a stix12ID isn't given or it has been used already (STIX 1.x TTPs, etc can generate multiple STIX 2.0 objects)
# generated a new UUID
# create the new id using stix20SOName and the new UUID
def generate_stix2x_id(stix2x_so_name, stix12_id=None, id_used=False):
if not stix12_id or id_used:
new_id = stix2x_so_name + "--" + str(uuid.uuid4())
add_ids_with_no_1x_object(new_id)
if id_used and stix12_id:
warn("%s already used, generated new id %s", 726, stix12_id, new_id)
return new_id
else:
# this works for all versions of UUID
result = re.search('^(.+)-([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})',
stix12_id)
if result:
current_uuid = result.group(2)
if stix2x_so_name is None:
stx1x_type = result.group(1).split(":")
if stx1x_type[1].lower() == "ttp" or stx1x_type[1].lower() == "et":
error("Unable to determine the STIX 2.x type for %s", 604, stix12_id)
return None
else:
return map_1x_type_to_20(stx1x_type[1]) + "--" + current_uuid
else:
return stix2x_so_name + "--" + current_uuid
else:
if stix2x_so_name:
warn("Malformed id %s. Generated a new uuid", 605, stix12_id)
return stix2x_so_name + "--" + str(uuid.uuid4())
else:
error("Unable to determine the STIX 2.x type for %s, which is malformed", 629, stix12_id)
return None
_SCO_CLASSES = {}
def _choose_one_hash(hash_dict):
if "MD5" in hash_dict:
return {"MD5": hash_dict["MD5"]}
elif "SHA-1" in hash_dict:
return {"SHA-1": hash_dict["SHA-1"]}
elif "SHA-256" in hash_dict:
return {"SHA-256": hash_dict["SHA-256"]}
elif "SHA-512" in hash_dict:
return {"SHA-512": hash_dict["SHA-512"]}
else:
k = next(iter(hash_dict), None)
if k is not None:
return {k: hash_dict[k]}
def generate_sco_id(type, instance):
required_prefix = type + "--"
if not _SCO_CLASSES:
# compute it once
module = importlib.import_module("stix2.v21")
for k, c in inspect.getmembers(module, inspect.isclass):
if hasattr(c, "_properties") and "type" in c._properties:
_SCO_CLASSES[c._properties["type"]._fixed_value] = c
if type in _SCO_CLASSES:
klass = _SCO_CLASSES[type]
if klass and hasattr(klass, "_id_contributing_properties") and klass._id_contributing_properties:
contributing_properties = klass._id_contributing_properties
# streamlined_obj_vals = []
streamlined_object = {}
possible_hash = None
if "hashes" in instance and "hashes" in contributing_properties:
possible_hash = _choose_one_hash(instance["hashes"])
if possible_hash:
# streamlined_obj_vals.append(possible_hash)
streamlined_object["hashes"] = possible_hash
for key in contributing_properties:
if key != "hashes" and key in instance:
# We don't need to handle the isinstance(...) cases here
# because the elevator uses Python default containers
# to represent its content.
# streamlined_obj_vals.append(instance[key])
streamlined_object[key] = instance[key]
# if streamlined_obj_vals:
if streamlined_object:
# data = canonicalize(streamlined_obj_vals, utf8=False)
data = canonicalize(streamlined_object, utf8=False)
# try/except here to enable python 2 compatibility
try:
return required_prefix + str(uuid.uuid5(SCO_DET_ID_NAMESPACE, data))
except UnicodeDecodeError:
return required_prefix + str(uuid.uuid5(SCO_DET_ID_NAMESPACE, data.encode("utf-8")))
return required_prefix + str(uuid.uuid4())
_IDS_TO_NEW_IDS = {}
def exists_id_key(key):
return find_key_in_dict_case_insensitive(key, _IDS_TO_NEW_IDS)
def get_id_value(key):
if exists_id_key(key):
return _IDS_TO_NEW_IDS[key]
else:
return []
def get_id_values():
return _IDS_TO_NEW_IDS.values()
def add_id_value(key, value):
if not value:
warn("No object mapped to %s", 610, key)
if exists_id_key(key):
_IDS_TO_NEW_IDS[key].append(value)
else:
_IDS_TO_NEW_IDS[key] = [value]
def clear_id_mapping():
global _IDS_TO_NEW_IDS
_IDS_TO_NEW_IDS = {}
_IDS_TO_CYBER_OBSERVABLES = {}
def clear_object_id_mapping():
global _IDS_TO_CYBER_OBSERVABLES
_IDS_TO_CYBER_OBSERVABLES = {}
def exists_object_id_key(key):
return find_key_in_dict_case_insensitive(key, _IDS_TO_CYBER_OBSERVABLES)
def get_object_id_value(key):
if exists_object_id_key(key):
return _IDS_TO_CYBER_OBSERVABLES[key]
else:
return []
def get_object_id_values():
return _IDS_TO_CYBER_OBSERVABLES.values()
def add_object_id_value(key, value):
if exists_object_id_key(key):
warn("This observable %s already is associated with cyber observables", 639, key)
else:
_IDS_TO_CYBER_OBSERVABLES[key] = value
if not value:
warn("Can not associate %s with None", 611, key)
_ID_OF_OBSERVABLES_IN_SIGHTINGS = []
def clear_id_of_obs_in_sightings():
global _ID_OF_OBSERVABLES_IN_SIGHTINGS
_ID_OF_OBSERVABLES_IN_SIGHTINGS = []
def exists_id_of_obs_in_sightings(id):
return find_string_in_list_case_insensitive(id, _ID_OF_OBSERVABLES_IN_SIGHTINGS)
def add_id_of_obs_in_sightings(id):
global _ID_OF_OBSERVABLES_IN_SIGHTINGS
if not exists_id_of_obs_in_sightings(id):
_ID_OF_OBSERVABLES_IN_SIGHTINGS.append(id)
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = []
def clear_id_of_obs_in_characterizations():
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = []
def exists_id_of_obs_in_characterizations(id):
return find_string_in_list_case_insensitive(id, _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS)
def add_id_of_obs_in_characterizations(id):
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
if not exists_id_of_obs_in_characterizations(id):
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS.append(id)
def fix_ids_in_characterizations():
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
remaining_ids = []
for id in _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS:
if exists_id_key(id):
remaining_ids.extend(get_id_value(id))
else:
remaining_ids.append(id)
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = remaining_ids
def get_uuid_from_id(id, separator="--"):
type_and_uuid = id.split(separator)
return type_and_uuid[1]
def get_type_from_id(id, separator="--"):
type_and_uuid = id.split(separator)
return type_and_uuid[0]
def is_stix1x_id(id):
return id and id.find("--") == -1 and id.find("-") != -1
def property_contains_stix1x_id(obj, property):
if property in obj:
value = obj[property]
return isinstance(value, str) and is_stix1x_id(value)
| 33.217082 | 117 | 0.666702 |
import importlib
import inspect
import re
import uuid
from stix2.base import SCO_DET_ID_NAMESPACE
from stix2.canonicalization.Canonicalize import canonicalize
from stix2elevator.options import error, info, warn
from stix2elevator.utils import (find_key_in_dict_case_insensitive,
find_string_in_list_case_insensitive,
map_1x_type_to_20)
def record_ids(stix_id, new_id):
if stix_id in _IDS_TO_NEW_IDS:
info("%s is already associated other ids: %s", 703, str(stix_id), tuple(_IDS_TO_NEW_IDS[stix_id]))
if new_id is None:
error("Can not associate %s with None", 611, stix_id)
return
add_id_value(stix_id, new_id)
_SDO_ID_WITH_NO_1X_OBJECT = []
def clear_ids_with_no_1x_object():
global _SDO_ID_WITH_NO_1X_OBJECT
_SDO_ID_WITH_NO_1X_OBJECT = []
def exists_ids_with_no_1x_object(sdo_id):
return sdo_id in _SDO_ID_WITH_NO_1X_OBJECT
def add_ids_with_no_1x_object(sdo_id):
if not exists_ids_with_no_1x_object(sdo_id):
_SDO_ID_WITH_NO_1X_OBJECT.append(sdo_id)
# id_used - sometimes (with TTPs and ETs), more than one object in 2.0 is created from a 1.x object - this flag
# indicates that the 1.x's ID has been used for another 2.0 object, so a new one must be created
# split the stix12ID into its type and UUID parts.
# if the stix20SOName has been given, create the new id from it and the UUID
# otherwise, unless the stix12ID's type is ttp or et (which don't exist in 2.0) use the mapped 1.x type
#
# if a stix12ID isn't given or it has been used already (STIX 1.x TTPs, etc can generate multiple STIX 2.0 objects)
def generate_stix2x_id(stix2x_so_name, stix12_id=None, id_used=False):
if not stix12_id or id_used:
new_id = stix2x_so_name + "--" + str(uuid.uuid4())
add_ids_with_no_1x_object(new_id)
if id_used and stix12_id:
warn("%s already used, generated new id %s", 726, stix12_id, new_id)
return new_id
else:
result = re.search('^(.+)-([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})',
stix12_id)
if result:
current_uuid = result.group(2)
if stix2x_so_name is None:
stx1x_type = result.group(1).split(":")
if stx1x_type[1].lower() == "ttp" or stx1x_type[1].lower() == "et":
error("Unable to determine the STIX 2.x type for %s", 604, stix12_id)
return None
else:
return map_1x_type_to_20(stx1x_type[1]) + "--" + current_uuid
else:
return stix2x_so_name + "--" + current_uuid
else:
if stix2x_so_name:
warn("Malformed id %s. Generated a new uuid", 605, stix12_id)
return stix2x_so_name + "--" + str(uuid.uuid4())
else:
error("Unable to determine the STIX 2.x type for %s, which is malformed", 629, stix12_id)
return None
_SCO_CLASSES = {}
def _choose_one_hash(hash_dict):
if "MD5" in hash_dict:
return {"MD5": hash_dict["MD5"]}
elif "SHA-1" in hash_dict:
return {"SHA-1": hash_dict["SHA-1"]}
elif "SHA-256" in hash_dict:
return {"SHA-256": hash_dict["SHA-256"]}
elif "SHA-512" in hash_dict:
return {"SHA-512": hash_dict["SHA-512"]}
else:
k = next(iter(hash_dict), None)
if k is not None:
return {k: hash_dict[k]}
def generate_sco_id(type, instance):
required_prefix = type + "--"
if not _SCO_CLASSES:
module = importlib.import_module("stix2.v21")
for k, c in inspect.getmembers(module, inspect.isclass):
if hasattr(c, "_properties") and "type" in c._properties:
_SCO_CLASSES[c._properties["type"]._fixed_value] = c
if type in _SCO_CLASSES:
klass = _SCO_CLASSES[type]
if klass and hasattr(klass, "_id_contributing_properties") and klass._id_contributing_properties:
contributing_properties = klass._id_contributing_properties
streamlined_object = {}
possible_hash = None
if "hashes" in instance and "hashes" in contributing_properties:
possible_hash = _choose_one_hash(instance["hashes"])
if possible_hash:
streamlined_object["hashes"] = possible_hash
for key in contributing_properties:
if key != "hashes" and key in instance:
# because the elevator uses Python default containers
# to represent its content.
# streamlined_obj_vals.append(instance[key])
streamlined_object[key] = instance[key]
# if streamlined_obj_vals:
if streamlined_object:
# data = canonicalize(streamlined_obj_vals, utf8=False)
data = canonicalize(streamlined_object, utf8=False)
# try/except here to enable python 2 compatibility
try:
return required_prefix + str(uuid.uuid5(SCO_DET_ID_NAMESPACE, data))
except UnicodeDecodeError:
return required_prefix + str(uuid.uuid5(SCO_DET_ID_NAMESPACE, data.encode("utf-8")))
return required_prefix + str(uuid.uuid4())
_IDS_TO_NEW_IDS = {}
def exists_id_key(key):
return find_key_in_dict_case_insensitive(key, _IDS_TO_NEW_IDS)
def get_id_value(key):
if exists_id_key(key):
return _IDS_TO_NEW_IDS[key]
else:
return []
def get_id_values():
return _IDS_TO_NEW_IDS.values()
def add_id_value(key, value):
if not value:
warn("No object mapped to %s", 610, key)
if exists_id_key(key):
_IDS_TO_NEW_IDS[key].append(value)
else:
_IDS_TO_NEW_IDS[key] = [value]
def clear_id_mapping():
global _IDS_TO_NEW_IDS
_IDS_TO_NEW_IDS = {}
_IDS_TO_CYBER_OBSERVABLES = {}
def clear_object_id_mapping():
global _IDS_TO_CYBER_OBSERVABLES
_IDS_TO_CYBER_OBSERVABLES = {}
def exists_object_id_key(key):
return find_key_in_dict_case_insensitive(key, _IDS_TO_CYBER_OBSERVABLES)
def get_object_id_value(key):
if exists_object_id_key(key):
return _IDS_TO_CYBER_OBSERVABLES[key]
else:
return []
def get_object_id_values():
return _IDS_TO_CYBER_OBSERVABLES.values()
def add_object_id_value(key, value):
if exists_object_id_key(key):
warn("This observable %s already is associated with cyber observables", 639, key)
else:
_IDS_TO_CYBER_OBSERVABLES[key] = value
if not value:
warn("Can not associate %s with None", 611, key)
_ID_OF_OBSERVABLES_IN_SIGHTINGS = []
def clear_id_of_obs_in_sightings():
global _ID_OF_OBSERVABLES_IN_SIGHTINGS
_ID_OF_OBSERVABLES_IN_SIGHTINGS = []
def exists_id_of_obs_in_sightings(id):
return find_string_in_list_case_insensitive(id, _ID_OF_OBSERVABLES_IN_SIGHTINGS)
def add_id_of_obs_in_sightings(id):
global _ID_OF_OBSERVABLES_IN_SIGHTINGS
if not exists_id_of_obs_in_sightings(id):
_ID_OF_OBSERVABLES_IN_SIGHTINGS.append(id)
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = []
def clear_id_of_obs_in_characterizations():
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = []
def exists_id_of_obs_in_characterizations(id):
return find_string_in_list_case_insensitive(id, _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS)
def add_id_of_obs_in_characterizations(id):
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
if not exists_id_of_obs_in_characterizations(id):
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS.append(id)
def fix_ids_in_characterizations():
global _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS
remaining_ids = []
for id in _ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS:
if exists_id_key(id):
remaining_ids.extend(get_id_value(id))
else:
remaining_ids.append(id)
_ID_OF_OBSERVABLES_IN_CHARACTERIZATIONS = remaining_ids
def get_uuid_from_id(id, separator="--"):
type_and_uuid = id.split(separator)
return type_and_uuid[1]
def get_type_from_id(id, separator="--"):
type_and_uuid = id.split(separator)
return type_and_uuid[0]
def is_stix1x_id(id):
return id and id.find("--") == -1 and id.find("-") != -1
def property_contains_stix1x_id(obj, property):
if property in obj:
value = obj[property]
return isinstance(value, str) and is_stix1x_id(value)
| true | true |
1c3d5854ce1b5e633e52d9736a355665d451aa44 | 11,805 | py | Python | py3.7/multiprocess/synchronize.py | UniverseFly/multiprocess | 97f67493eccfb893ac1bba7285cf452bfc640211 | [
"BSD-3-Clause"
] | 356 | 2015-06-21T21:05:10.000Z | 2022-03-30T11:57:08.000Z | py3.7/multiprocess/synchronize.py | UniverseFly/multiprocess | 97f67493eccfb893ac1bba7285cf452bfc640211 | [
"BSD-3-Clause"
] | 103 | 2015-06-22T01:44:14.000Z | 2022-03-01T03:44:25.000Z | py3.7/multiprocess/synchronize.py | UniverseFly/multiprocess | 97f67493eccfb893ac1bba7285cf452bfc640211 | [
"BSD-3-Clause"
] | 72 | 2015-09-02T14:10:24.000Z | 2022-03-25T06:49:43.000Z | #
# Module implementing synchronization primitives
#
# multiprocessing/synchronize.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
__all__ = [
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event'
]
import threading
import sys
import tempfile
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
import time
from . import context
from . import process
from . import util
# Try to import the mp.synchronize module cleanly, if it fails
# raise ImportError for platforms lacking a working sem_open implementation.
# See issue 3770
try:
from _multiprocess import SemLock, sem_unlink
except ImportError:
try:
from _multiprocessing import SemLock, sem_unlink
except (ImportError):
raise ImportError("This platform lacks a functioning sem_open" +
" implementation, therefore, the required" +
" synchronization primitives needed will not" +
" function, see issue 3770.")
#
# Constants
#
RECURSIVE_MUTEX, SEMAPHORE = list(range(2))
SEM_VALUE_MAX = _multiprocessing.SemLock.SEM_VALUE_MAX
#
# Base class for semaphores and mutexes; wraps `_multiprocessing.SemLock`
#
class SemLock(object):
_rand = tempfile._RandomNameSequence()
def __init__(self, kind, value, maxvalue, *, ctx):
if ctx is None:
ctx = context._default_context.get_context()
name = ctx.get_start_method()
unlink_now = sys.platform == 'win32' or name == 'fork'
for i in range(100):
try:
sl = self._semlock = _multiprocessing.SemLock(
kind, value, maxvalue, self._make_name(),
unlink_now)
except FileExistsError:
pass
else:
break
else:
raise FileExistsError('cannot find name for semaphore')
util.debug('created semlock with handle %s' % sl.handle)
self._make_methods()
if sys.platform != 'win32':
def _after_fork(obj):
obj._semlock._after_fork()
util.register_after_fork(self, _after_fork)
if self._semlock.name is not None:
# We only get here if we are on Unix with forking
# disabled. When the object is garbage collected or the
# process shuts down we unlink the semaphore name
from .semaphore_tracker import register
register(self._semlock.name)
util.Finalize(self, SemLock._cleanup, (self._semlock.name,),
exitpriority=0)
@staticmethod
def _cleanup(name):
from .semaphore_tracker import unregister
sem_unlink(name)
unregister(name)
def _make_methods(self):
self.acquire = self._semlock.acquire
self.release = self._semlock.release
def __enter__(self):
return self._semlock.__enter__()
def __exit__(self, *args):
return self._semlock.__exit__(*args)
def __getstate__(self):
context.assert_spawning(self)
sl = self._semlock
if sys.platform == 'win32':
h = context.get_spawning_popen().duplicate_for_child(sl.handle)
else:
h = sl.handle
return (h, sl.kind, sl.maxvalue, sl.name)
def __setstate__(self, state):
self._semlock = _multiprocessing.SemLock._rebuild(*state)
util.debug('recreated blocker with handle %r' % state[0])
self._make_methods()
@staticmethod
def _make_name():
return '%s-%s' % (process.current_process()._config['semprefix'],
next(SemLock._rand))
#
# Semaphore
#
class Semaphore(SemLock):
def __init__(self, value=1, *, ctx):
SemLock.__init__(self, SEMAPHORE, value, SEM_VALUE_MAX, ctx=ctx)
def get_value(self):
return self._semlock._get_value()
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<%s(value=%s)>' % (self.__class__.__name__, value)
#
# Bounded semaphore
#
class BoundedSemaphore(Semaphore):
def __init__(self, value=1, *, ctx):
SemLock.__init__(self, SEMAPHORE, value, value, ctx=ctx)
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<%s(value=%s, maxvalue=%s)>' % \
(self.__class__.__name__, value, self._semlock.maxvalue)
#
# Non-recursive lock
#
class Lock(SemLock):
def __init__(self, *, ctx):
SemLock.__init__(self, SEMAPHORE, 1, 1, ctx=ctx)
def __repr__(self):
try:
if self._semlock._is_mine():
name = process.current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
elif self._semlock._get_value() == 1:
name = 'None'
elif self._semlock._count() > 0:
name = 'SomeOtherThread'
else:
name = 'SomeOtherProcess'
except Exception:
name = 'unknown'
return '<%s(owner=%s)>' % (self.__class__.__name__, name)
#
# Recursive lock
#
class RLock(SemLock):
def __init__(self, *, ctx):
SemLock.__init__(self, RECURSIVE_MUTEX, 1, 1, ctx=ctx)
def __repr__(self):
try:
if self._semlock._is_mine():
name = process.current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
count = self._semlock._count()
elif self._semlock._get_value() == 1:
name, count = 'None', 0
elif self._semlock._count() > 0:
name, count = 'SomeOtherThread', 'nonzero'
else:
name, count = 'SomeOtherProcess', 'nonzero'
except Exception:
name, count = 'unknown', 'unknown'
return '<%s(%s, %s)>' % (self.__class__.__name__, name, count)
#
# Condition variable
#
class Condition(object):
def __init__(self, lock=None, *, ctx):
self._lock = lock or ctx.RLock()
self._sleeping_count = ctx.Semaphore(0)
self._woken_count = ctx.Semaphore(0)
self._wait_semaphore = ctx.Semaphore(0)
self._make_methods()
def __getstate__(self):
context.assert_spawning(self)
return (self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore)
def __setstate__(self, state):
(self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore) = state
self._make_methods()
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def _make_methods(self):
self.acquire = self._lock.acquire
self.release = self._lock.release
def __repr__(self):
try:
num_waiters = (self._sleeping_count._semlock._get_value() -
self._woken_count._semlock._get_value())
except Exception:
num_waiters = 'unknown'
return '<%s(%s, %s)>' % (self.__class__.__name__, self._lock, num_waiters)
def wait(self, timeout=None):
assert self._lock._semlock._is_mine(), \
'must acquire() condition before using wait()'
# indicate that this thread is going to sleep
self._sleeping_count.release()
# release lock
count = self._lock._semlock._count()
for i in range(count):
self._lock.release()
try:
# wait for notification or timeout
return self._wait_semaphore.acquire(True, timeout)
finally:
# indicate that this thread has woken
self._woken_count.release()
# reacquire lock
for i in range(count):
self._lock.acquire()
def notify(self, n=1):
assert self._lock._semlock._is_mine(), 'lock is not owned'
assert not self._wait_semaphore.acquire(
False), ('notify: Should not have been able to acquire'
+ '_wait_semaphore')
# to take account of timeouts since last notify*() we subtract
# woken_count from sleeping_count and rezero woken_count
while self._woken_count.acquire(False):
res = self._sleeping_count.acquire(False)
assert res, ('notify: Bug in sleeping_count.acquire'
+ '- res should not be False')
sleepers = 0
while sleepers < n and self._sleeping_count.acquire(False):
self._wait_semaphore.release() # wake up one sleeper
sleepers += 1
if sleepers:
for i in range(sleepers):
self._woken_count.acquire() # wait for a sleeper to wake
# rezero wait_semaphore in case some timeouts just happened
while self._wait_semaphore.acquire(False):
pass
def notify_all(self):
self.notify(n=sys.maxsize)
def wait_for(self, predicate, timeout=None):
result = predicate()
if result:
return result
if timeout is not None:
endtime = getattr(time,'monotonic',time.time)() + timeout
else:
endtime = None
waittime = None
while not result:
if endtime is not None:
waittime = endtime - getattr(time,'monotonic',time.time)()
if waittime <= 0:
break
self.wait(waittime)
result = predicate()
return result
#
# Event
#
class Event(object):
def __init__(self, *, ctx):
self._cond = ctx.Condition(ctx.Lock())
self._flag = ctx.Semaphore(0)
def is_set(self):
with self._cond:
if self._flag.acquire(False):
self._flag.release()
return True
return False
def set(self):
with self._cond:
self._flag.acquire(False)
self._flag.release()
self._cond.notify_all()
def clear(self):
with self._cond:
self._flag.acquire(False)
def wait(self, timeout=None):
with self._cond:
if self._flag.acquire(False):
self._flag.release()
else:
self._cond.wait(timeout)
if self._flag.acquire(False):
self._flag.release()
return True
return False
#
# Barrier
#
class Barrier(threading.Barrier):
def __init__(self, parties, action=None, timeout=None, *, ctx):
import struct
from .heap import BufferWrapper
wrapper = BufferWrapper(struct.calcsize('i') * 2)
cond = ctx.Condition()
self.__setstate__((parties, action, timeout, cond, wrapper))
self._state = 0
self._count = 0
def __setstate__(self, state):
(self._parties, self._action, self._timeout,
self._cond, self._wrapper) = state
self._array = self._wrapper.create_memoryview().cast('i')
def __getstate__(self):
return (self._parties, self._action, self._timeout,
self._cond, self._wrapper)
@property
def _state(self):
return self._array[0]
@_state.setter
def _state(self, value):
self._array[0] = value
@property
def _count(self):
return self._array[1]
@_count.setter
def _count(self, value):
self._array[1] = value
| 29.365672 | 82 | 0.587802 |
__all__ = [
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event'
]
import threading
import sys
import tempfile
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
import time
from . import context
from . import process
from . import util
try:
from _multiprocess import SemLock, sem_unlink
except ImportError:
try:
from _multiprocessing import SemLock, sem_unlink
except (ImportError):
raise ImportError("This platform lacks a functioning sem_open" +
" implementation, therefore, the required" +
" synchronization primitives needed will not" +
" function, see issue 3770.")
RECURSIVE_MUTEX, SEMAPHORE = list(range(2))
SEM_VALUE_MAX = _multiprocessing.SemLock.SEM_VALUE_MAX
class SemLock(object):
_rand = tempfile._RandomNameSequence()
def __init__(self, kind, value, maxvalue, *, ctx):
if ctx is None:
ctx = context._default_context.get_context()
name = ctx.get_start_method()
unlink_now = sys.platform == 'win32' or name == 'fork'
for i in range(100):
try:
sl = self._semlock = _multiprocessing.SemLock(
kind, value, maxvalue, self._make_name(),
unlink_now)
except FileExistsError:
pass
else:
break
else:
raise FileExistsError('cannot find name for semaphore')
util.debug('created semlock with handle %s' % sl.handle)
self._make_methods()
if sys.platform != 'win32':
def _after_fork(obj):
obj._semlock._after_fork()
util.register_after_fork(self, _after_fork)
if self._semlock.name is not None:
from .semaphore_tracker import register
register(self._semlock.name)
util.Finalize(self, SemLock._cleanup, (self._semlock.name,),
exitpriority=0)
@staticmethod
def _cleanup(name):
from .semaphore_tracker import unregister
sem_unlink(name)
unregister(name)
def _make_methods(self):
self.acquire = self._semlock.acquire
self.release = self._semlock.release
def __enter__(self):
return self._semlock.__enter__()
def __exit__(self, *args):
return self._semlock.__exit__(*args)
def __getstate__(self):
context.assert_spawning(self)
sl = self._semlock
if sys.platform == 'win32':
h = context.get_spawning_popen().duplicate_for_child(sl.handle)
else:
h = sl.handle
return (h, sl.kind, sl.maxvalue, sl.name)
def __setstate__(self, state):
self._semlock = _multiprocessing.SemLock._rebuild(*state)
util.debug('recreated blocker with handle %r' % state[0])
self._make_methods()
@staticmethod
def _make_name():
return '%s-%s' % (process.current_process()._config['semprefix'],
next(SemLock._rand))
class Semaphore(SemLock):
def __init__(self, value=1, *, ctx):
SemLock.__init__(self, SEMAPHORE, value, SEM_VALUE_MAX, ctx=ctx)
def get_value(self):
return self._semlock._get_value()
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<%s(value=%s)>' % (self.__class__.__name__, value)
class BoundedSemaphore(Semaphore):
def __init__(self, value=1, *, ctx):
SemLock.__init__(self, SEMAPHORE, value, value, ctx=ctx)
def __repr__(self):
try:
value = self._semlock._get_value()
except Exception:
value = 'unknown'
return '<%s(value=%s, maxvalue=%s)>' % \
(self.__class__.__name__, value, self._semlock.maxvalue)
class Lock(SemLock):
def __init__(self, *, ctx):
SemLock.__init__(self, SEMAPHORE, 1, 1, ctx=ctx)
def __repr__(self):
try:
if self._semlock._is_mine():
name = process.current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
elif self._semlock._get_value() == 1:
name = 'None'
elif self._semlock._count() > 0:
name = 'SomeOtherThread'
else:
name = 'SomeOtherProcess'
except Exception:
name = 'unknown'
return '<%s(owner=%s)>' % (self.__class__.__name__, name)
class RLock(SemLock):
def __init__(self, *, ctx):
SemLock.__init__(self, RECURSIVE_MUTEX, 1, 1, ctx=ctx)
def __repr__(self):
try:
if self._semlock._is_mine():
name = process.current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
count = self._semlock._count()
elif self._semlock._get_value() == 1:
name, count = 'None', 0
elif self._semlock._count() > 0:
name, count = 'SomeOtherThread', 'nonzero'
else:
name, count = 'SomeOtherProcess', 'nonzero'
except Exception:
name, count = 'unknown', 'unknown'
return '<%s(%s, %s)>' % (self.__class__.__name__, name, count)
class Condition(object):
def __init__(self, lock=None, *, ctx):
self._lock = lock or ctx.RLock()
self._sleeping_count = ctx.Semaphore(0)
self._woken_count = ctx.Semaphore(0)
self._wait_semaphore = ctx.Semaphore(0)
self._make_methods()
def __getstate__(self):
context.assert_spawning(self)
return (self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore)
def __setstate__(self, state):
(self._lock, self._sleeping_count,
self._woken_count, self._wait_semaphore) = state
self._make_methods()
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def _make_methods(self):
self.acquire = self._lock.acquire
self.release = self._lock.release
def __repr__(self):
try:
num_waiters = (self._sleeping_count._semlock._get_value() -
self._woken_count._semlock._get_value())
except Exception:
num_waiters = 'unknown'
return '<%s(%s, %s)>' % (self.__class__.__name__, self._lock, num_waiters)
def wait(self, timeout=None):
assert self._lock._semlock._is_mine(), \
'must acquire() condition before using wait()'
self._sleeping_count.release()
count = self._lock._semlock._count()
for i in range(count):
self._lock.release()
try:
return self._wait_semaphore.acquire(True, timeout)
finally:
self._woken_count.release()
for i in range(count):
self._lock.acquire()
def notify(self, n=1):
assert self._lock._semlock._is_mine(), 'lock is not owned'
assert not self._wait_semaphore.acquire(
False), ('notify: Should not have been able to acquire'
+ '_wait_semaphore')
while self._woken_count.acquire(False):
res = self._sleeping_count.acquire(False)
assert res, ('notify: Bug in sleeping_count.acquire'
+ '- res should not be False')
sleepers = 0
while sleepers < n and self._sleeping_count.acquire(False):
self._wait_semaphore.release()
sleepers += 1
if sleepers:
for i in range(sleepers):
self._woken_count.acquire()
while self._wait_semaphore.acquire(False):
pass
def notify_all(self):
self.notify(n=sys.maxsize)
def wait_for(self, predicate, timeout=None):
result = predicate()
if result:
return result
if timeout is not None:
endtime = getattr(time,'monotonic',time.time)() + timeout
else:
endtime = None
waittime = None
while not result:
if endtime is not None:
waittime = endtime - getattr(time,'monotonic',time.time)()
if waittime <= 0:
break
self.wait(waittime)
result = predicate()
return result
class Event(object):
def __init__(self, *, ctx):
self._cond = ctx.Condition(ctx.Lock())
self._flag = ctx.Semaphore(0)
def is_set(self):
with self._cond:
if self._flag.acquire(False):
self._flag.release()
return True
return False
def set(self):
with self._cond:
self._flag.acquire(False)
self._flag.release()
self._cond.notify_all()
def clear(self):
with self._cond:
self._flag.acquire(False)
def wait(self, timeout=None):
with self._cond:
if self._flag.acquire(False):
self._flag.release()
else:
self._cond.wait(timeout)
if self._flag.acquire(False):
self._flag.release()
return True
return False
class Barrier(threading.Barrier):
def __init__(self, parties, action=None, timeout=None, *, ctx):
import struct
from .heap import BufferWrapper
wrapper = BufferWrapper(struct.calcsize('i') * 2)
cond = ctx.Condition()
self.__setstate__((parties, action, timeout, cond, wrapper))
self._state = 0
self._count = 0
def __setstate__(self, state):
(self._parties, self._action, self._timeout,
self._cond, self._wrapper) = state
self._array = self._wrapper.create_memoryview().cast('i')
def __getstate__(self):
return (self._parties, self._action, self._timeout,
self._cond, self._wrapper)
@property
def _state(self):
return self._array[0]
@_state.setter
def _state(self, value):
self._array[0] = value
@property
def _count(self):
return self._array[1]
@_count.setter
def _count(self, value):
self._array[1] = value
| true | true |
1c3d59573d7b8a95a19ab3576338b1d3c75031c9 | 374 | py | Python | data/syn_project/types/syn_project.py | ki-tools/sls_ki_synapse | 8c726a9ec568e3d416049a8813c21bbe87740f16 | [
"Apache-2.0"
] | 1 | 2018-11-21T19:54:34.000Z | 2018-11-21T19:54:34.000Z | data/syn_project/types/syn_project.py | pcstout/sls_ki_synapse | 8c726a9ec568e3d416049a8813c21bbe87740f16 | [
"Apache-2.0"
] | 5 | 2019-03-12T16:44:35.000Z | 2019-03-15T21:46:00.000Z | data/syn_project/types/syn_project.py | ki-tools/sls_ki_synapse | 8c726a9ec568e3d416049a8813c21bbe87740f16 | [
"Apache-2.0"
] | 2 | 2019-02-28T23:16:32.000Z | 2019-03-05T22:16:39.000Z | import graphene
class SynProject(graphene.ObjectType):
"""
Defines the SynProject type.
"""
id = graphene.String()
name = graphene.String()
@staticmethod
def from_project(project):
"""
Converts a Project to a SynProject.
"""
return SynProject(
id=project.id,
name=project.name
)
| 18.7 | 43 | 0.561497 | import graphene
class SynProject(graphene.ObjectType):
id = graphene.String()
name = graphene.String()
@staticmethod
def from_project(project):
return SynProject(
id=project.id,
name=project.name
)
| true | true |
1c3d5a6ebd1c0006e61e5a5b298a6d7a1a73038f | 37,683 | py | Python | locust/stats.py | sachajw/locust-template | 27a6aca87865f68073245e48851b41f55cd7580e | [
"MIT"
] | 1 | 2021-10-22T02:58:36.000Z | 2021-10-22T02:58:36.000Z | locust/stats.py | sachajw/locust-template | 27a6aca87865f68073245e48851b41f55cd7580e | [
"MIT"
] | null | null | null | locust/stats.py | sachajw/locust-template | 27a6aca87865f68073245e48851b41f55cd7580e | [
"MIT"
] | 1 | 2020-07-21T10:49:05.000Z | 2020-07-21T10:49:05.000Z | import datetime
import hashlib
import time
from collections import namedtuple, OrderedDict
from copy import copy
from itertools import chain
import csv
import gevent
from .exception import StopUser, CatchResponseError
import logging
console_logger = logging.getLogger("locust.stats_logger")
STATS_NAME_WIDTH = 60
STATS_TYPE_WIDTH = 8
"""Default interval for how frequently results are written to console."""
CONSOLE_STATS_INTERVAL_SEC = 2
"""Default interval for how frequently results are written to history."""
HISTORY_STATS_INTERVAL_SEC = 5
"""Default interval for how frequently CSV files are written if this option is configured."""
CSV_STATS_INTERVAL_SEC = 1
CSV_STATS_FLUSH_INTERVAL_SEC = 10
"""
Default window size/resolution - in seconds - when calculating the current
response time percentile
"""
CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW = 10
CachedResponseTimes = namedtuple("CachedResponseTimes", ["response_times", "num_requests"])
PERCENTILES_TO_REPORT = [0.50, 0.66, 0.75, 0.80, 0.90, 0.95, 0.98, 0.99, 0.999, 0.9999, 1.0]
class RequestStatsAdditionError(Exception):
pass
def get_readable_percentiles(percentile_list):
"""
Converts a list of percentiles from 0-1 fraction to 0%-100% view for using in console & csv reporting
:param percentile_list: The list of percentiles in range 0-1
:return: The list of string representation for each percentile in 0%-100% view
"""
return [
f"{int(percentile * 100) if (percentile * 100).is_integer() else round(100 * percentile, 6)}%"
for percentile in percentile_list
]
def calculate_response_time_percentile(response_times, num_requests, percent):
"""
Get the response time that a certain number of percent of the requests
finished within. Arguments:
response_times: A StatsEntry.response_times dict
num_requests: Number of request made (could be derived from response_times,
but we save some CPU cycles by using the value which we already store)
percent: The percentile we want to calculate. Specified in range: 0.0 - 1.0
"""
num_of_request = int((num_requests * percent))
processed_count = 0
for response_time in sorted(response_times.keys(), reverse=True):
processed_count += response_times[response_time]
if num_requests - processed_count <= num_of_request:
return response_time
# if all response times were None
return 0
def diff_response_time_dicts(latest, old):
"""
Returns the delta between two {response_times:request_count} dicts.
Used together with the response_times cache to get the response times for the
last X seconds, which in turn is used to calculate the current response time
percentiles.
"""
new = {}
for t in latest:
diff = latest[t] - old.get(t, 0)
if diff:
new[t] = diff
return new
class RequestStats:
"""
Class that holds the request statistics.
"""
def __init__(self, use_response_times_cache=True):
"""
:param use_response_times_cache: The value of use_response_times_cache will be set for each StatsEntry()
when they are created. Settings it to False saves some memory and CPU
cycles which we can do on Worker nodes where the response_times_cache
is not needed.
"""
self.use_response_times_cache = use_response_times_cache
self.entries = {}
self.errors = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.history = []
@property
def num_requests(self):
return self.total.num_requests
@property
def num_none_requests(self):
return self.total.num_none_requests
@property
def num_failures(self):
return self.total.num_failures
@property
def last_request_timestamp(self):
return self.total.last_request_timestamp
@property
def start_time(self):
return self.total.start_time
def log_request(self, method, name, response_time, content_length):
self.total.log(response_time, content_length)
self.get(name, method).log(response_time, content_length)
def log_error(self, method, name, error):
self.total.log_error(error)
self.get(name, method).log_error(error)
# store error in errors dict
key = StatsError.create_key(method, name, error)
entry = self.errors.get(key)
if not entry:
entry = StatsError(method, name, error)
self.errors[key] = entry
entry.occurred()
def get(self, name, method):
"""
Retrieve a StatsEntry instance by name and method
"""
entry = self.entries.get((name, method))
if not entry:
entry = StatsEntry(self, name, method, use_response_times_cache=self.use_response_times_cache)
self.entries[(name, method)] = entry
return entry
def reset_all(self):
"""
Go through all stats entries and reset them to zero
"""
self.total.reset()
self.errors = {}
for r in self.entries.values():
r.reset()
self.history = []
def clear_all(self):
"""
Remove all stats entries and errors
"""
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.entries = {}
self.errors = {}
self.history = []
def serialize_stats(self):
return [
self.entries[key].get_stripped_report()
for key in self.entries.keys()
if not (self.entries[key].num_requests == 0 and self.entries[key].num_failures == 0)
]
def serialize_errors(self):
return dict([(k, e.to_dict()) for k, e in self.errors.items()])
class StatsEntry:
"""
Represents a single stats entry (name and method)
"""
name = None
""" Name (URL) of this stats entry """
method = None
""" Method (GET, POST, PUT, etc.) """
num_requests = None
""" The number of requests made """
num_none_requests = None
""" The number of requests made with a None response time (typically async requests) """
num_failures = None
""" Number of failed request """
total_response_time = None
""" Total sum of the response times """
min_response_time = None
""" Minimum response time """
max_response_time = None
""" Maximum response time """
num_reqs_per_sec = None
""" A {second => request_count} dict that holds the number of requests made per second """
num_fail_per_sec = None
""" A (second => failure_count) dict that hold the number of failures per second """
response_times = None
"""
A {response_time => count} dict that holds the response time distribution of all
the requests.
The keys (the response time in ms) are rounded to store 1, 2, ... 9, 10, 20. .. 90,
100, 200 .. 900, 1000, 2000 ... 9000, in order to save memory.
This dict is used to calculate the median and percentile response times.
"""
use_response_times_cache = False
"""
If set to True, the copy of the response_time dict will be stored in response_times_cache
every second, and kept for 20 seconds (by default, will be CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10).
We can use this dict to calculate the *current* median response time, as well as other response
time percentiles.
"""
response_times_cache = None
"""
If use_response_times_cache is set to True, this will be a {timestamp => CachedResponseTimes()}
OrderedDict that holds a copy of the response_times dict for each of the last 20 seconds.
"""
total_content_length = None
""" The sum of the content length of all the requests for this entry """
start_time = None
""" Time of the first request for this entry """
last_request_timestamp = None
""" Time of the last request for this entry """
def __init__(self, stats, name, method, use_response_times_cache=False):
self.stats = stats
self.name = name
self.method = method
self.use_response_times_cache = use_response_times_cache
self.reset()
def reset(self):
self.start_time = time.time()
self.num_requests = 0
self.num_none_requests = 0
self.num_failures = 0
self.total_response_time = 0
self.response_times = {}
self.min_response_time = None
self.max_response_time = 0
self.last_request_timestamp = None
self.num_reqs_per_sec = {}
self.num_fail_per_sec = {}
self.total_content_length = 0
if self.use_response_times_cache:
self.response_times_cache = OrderedDict()
self._cache_response_times(int(time.time()))
def log(self, response_time, content_length):
# get the time
current_time = time.time()
t = int(current_time)
if self.use_response_times_cache and self.last_request_timestamp and t > int(self.last_request_timestamp):
# see if we shall make a copy of the response_times dict and store in the cache
self._cache_response_times(t - 1)
self.num_requests += 1
self._log_time_of_request(current_time)
self._log_response_time(response_time)
# increase total content-length
self.total_content_length += content_length
def _log_time_of_request(self, current_time):
t = int(current_time)
self.num_reqs_per_sec[t] = self.num_reqs_per_sec.setdefault(t, 0) + 1
self.last_request_timestamp = current_time
def _log_response_time(self, response_time):
if response_time is None:
self.num_none_requests += 1
return
self.total_response_time += response_time
if self.min_response_time is None:
self.min_response_time = response_time
self.min_response_time = min(self.min_response_time, response_time)
self.max_response_time = max(self.max_response_time, response_time)
# to avoid to much data that has to be transferred to the master node when
# running in distributed mode, we save the response time rounded in a dict
# so that 147 becomes 150, 3432 becomes 3400 and 58760 becomes 59000
if response_time < 100:
rounded_response_time = round(response_time)
elif response_time < 1000:
rounded_response_time = round(response_time, -1)
elif response_time < 10000:
rounded_response_time = round(response_time, -2)
else:
rounded_response_time = round(response_time, -3)
# increase request count for the rounded key in response time dict
self.response_times.setdefault(rounded_response_time, 0)
self.response_times[rounded_response_time] += 1
def log_error(self, error):
self.num_failures += 1
t = int(time.time())
self.num_fail_per_sec[t] = self.num_fail_per_sec.setdefault(t, 0) + 1
@property
def fail_ratio(self):
try:
return float(self.num_failures) / self.num_requests
except ZeroDivisionError:
if self.num_failures > 0:
return 1.0
else:
return 0.0
@property
def avg_response_time(self):
try:
return float(self.total_response_time) / (self.num_requests - self.num_none_requests)
except ZeroDivisionError:
return 0
@property
def median_response_time(self):
if not self.response_times:
return 0
median = median_from_dict(self.num_requests - self.num_none_requests, self.response_times) or 0
# Since we only use two digits of precision when calculating the median response time
# while still using the exact values for min and max response times, the following checks
# makes sure that we don't report a median > max or median < min when a StatsEntry only
# have one (or very few) really slow requests
if median > self.max_response_time:
median = self.max_response_time
elif median < self.min_response_time:
median = self.min_response_time
return median
@property
def current_rps(self):
if self.stats.last_request_timestamp is None:
return 0
slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0))
reqs = [
self.num_reqs_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2)
]
return avg(reqs)
@property
def current_fail_per_sec(self):
if self.stats.last_request_timestamp is None:
return 0
slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0))
reqs = [
self.num_fail_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2)
]
return avg(reqs)
@property
def total_rps(self):
if not self.stats.last_request_timestamp or not self.stats.start_time:
return 0.0
try:
return self.num_requests / (self.stats.last_request_timestamp - self.stats.start_time)
except ZeroDivisionError:
return 0.0
@property
def total_fail_per_sec(self):
if not self.stats.last_request_timestamp or not self.stats.start_time:
return 0.0
try:
return self.num_failures / (self.stats.last_request_timestamp - self.stats.start_time)
except ZeroDivisionError:
return 0.0
@property
def avg_content_length(self):
try:
return self.total_content_length / self.num_requests
except ZeroDivisionError:
return 0
def extend(self, other):
"""
Extend the data from the current StatsEntry with the stats from another
StatsEntry instance.
"""
# save the old last_request_timestamp, to see if we should store a new copy
# of the response times in the response times cache
old_last_request_timestamp = self.last_request_timestamp
if self.last_request_timestamp is not None and other.last_request_timestamp is not None:
self.last_request_timestamp = max(self.last_request_timestamp, other.last_request_timestamp)
elif other.last_request_timestamp is not None:
self.last_request_timestamp = other.last_request_timestamp
self.start_time = min(self.start_time, other.start_time)
self.num_requests = self.num_requests + other.num_requests
self.num_none_requests = self.num_none_requests + other.num_none_requests
self.num_failures = self.num_failures + other.num_failures
self.total_response_time = self.total_response_time + other.total_response_time
self.max_response_time = max(self.max_response_time, other.max_response_time)
if self.min_response_time is not None and other.min_response_time is not None:
self.min_response_time = min(self.min_response_time, other.min_response_time)
elif other.min_response_time is not None:
# this means self.min_response_time is None, so we can safely replace it
self.min_response_time = other.min_response_time
self.total_content_length = self.total_content_length + other.total_content_length
for key in other.response_times:
self.response_times[key] = self.response_times.get(key, 0) + other.response_times[key]
for key in other.num_reqs_per_sec:
self.num_reqs_per_sec[key] = self.num_reqs_per_sec.get(key, 0) + other.num_reqs_per_sec[key]
for key in other.num_fail_per_sec:
self.num_fail_per_sec[key] = self.num_fail_per_sec.get(key, 0) + other.num_fail_per_sec[key]
if self.use_response_times_cache:
# If we've entered a new second, we'll cache the response times. Note that there
# might still be reports from other worker nodes - that contains requests for the same
# time periods - that hasn't been received/accounted for yet. This will cause the cache to
# lag behind a second or two, but since StatsEntry.current_response_time_percentile()
# (which is what the response times cache is used for) uses an approximation of the
# last 10 seconds anyway, it should be fine to ignore this.
last_time = self.last_request_timestamp and int(self.last_request_timestamp) or None
if last_time and last_time > (old_last_request_timestamp and int(old_last_request_timestamp) or 0):
self._cache_response_times(last_time)
def serialize(self):
return {
"name": self.name,
"method": self.method,
"last_request_timestamp": self.last_request_timestamp,
"start_time": self.start_time,
"num_requests": self.num_requests,
"num_none_requests": self.num_none_requests,
"num_failures": self.num_failures,
"total_response_time": self.total_response_time,
"max_response_time": self.max_response_time,
"min_response_time": self.min_response_time,
"total_content_length": self.total_content_length,
"response_times": self.response_times,
"num_reqs_per_sec": self.num_reqs_per_sec,
"num_fail_per_sec": self.num_fail_per_sec,
}
@classmethod
def unserialize(cls, data):
obj = cls(None, data["name"], data["method"])
for key in [
"last_request_timestamp",
"start_time",
"num_requests",
"num_none_requests",
"num_failures",
"total_response_time",
"max_response_time",
"min_response_time",
"total_content_length",
"response_times",
"num_reqs_per_sec",
"num_fail_per_sec",
]:
setattr(obj, key, data[key])
return obj
def get_stripped_report(self):
"""
Return the serialized version of this StatsEntry, and then clear the current stats.
"""
report = self.serialize()
self.reset()
return report
def to_string(self, current=True):
"""
Return the stats as a string suitable for console output. If current is True, it'll show
the RPS and failure rate for the last 10 seconds. If it's false, it'll show the total stats
for the whole run.
"""
if current:
rps = self.current_rps
fail_per_sec = self.current_fail_per_sec
else:
rps = self.total_rps
fail_per_sec = self.total_fail_per_sec
return (" %-" + str(STATS_NAME_WIDTH) + "s %7d %12s | %7d %7d %7d %7d | %7.2f %7.2f") % (
(self.method and self.method + " " or "") + self.name,
self.num_requests,
"%d(%.2f%%)" % (self.num_failures, self.fail_ratio * 100),
self.avg_response_time,
self.min_response_time or 0,
self.max_response_time,
self.median_response_time or 0,
rps or 0,
fail_per_sec or 0,
)
def __str__(self):
return self.to_string(current=True)
def get_response_time_percentile(self, percent):
"""
Get the response time that a certain number of percent of the requests
finished within.
Percent specified in range: 0.0 - 1.0
"""
return calculate_response_time_percentile(self.response_times, self.num_requests, percent)
def get_current_response_time_percentile(self, percent):
"""
Calculate the *current* response time for a certain percentile. We use a sliding
window of (approximately) the last 10 seconds (specified by CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW)
when calculating this.
"""
if not self.use_response_times_cache:
raise ValueError(
"StatsEntry.use_response_times_cache must be set to True if we should be able to calculate the _current_ response time percentile"
)
# First, we want to determine which of the cached response_times dicts we should
# use to get response_times for approximately 10 seconds ago.
t = int(time.time())
# Since we can't be sure that the cache contains an entry for every second.
# We'll construct a list of timestamps which we consider acceptable keys to be used
# when trying to fetch the cached response_times. We construct this list in such a way
# that it's ordered by preference by starting to add t-10, then t-11, t-9, t-12, t-8,
# and so on
acceptable_timestamps = []
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW)
for i in range(1, 9):
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW - i)
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + i)
cached = None
for ts in acceptable_timestamps:
if ts in self.response_times_cache:
cached = self.response_times_cache[ts]
break
if cached:
# If we fond an acceptable cached response times, we'll calculate a new response
# times dict of the last 10 seconds (approximately) by diffing it with the current
# total response times. Then we'll use that to calculate a response time percentile
# for that timeframe
return calculate_response_time_percentile(
diff_response_time_dicts(self.response_times, cached.response_times),
self.num_requests - cached.num_requests,
percent,
)
def percentile(self):
if not self.num_requests:
raise ValueError("Can't calculate percentile on url with no successful requests")
tpl = f" %-{str(STATS_TYPE_WIDTH)}s %-{str(STATS_NAME_WIDTH)}s %8d {' '.join(['%6d'] * len(PERCENTILES_TO_REPORT))}"
return tpl % (
(self.method, self.name)
+ tuple([self.get_response_time_percentile(p) for p in PERCENTILES_TO_REPORT])
+ (self.num_requests,)
)
def _cache_response_times(self, t):
self.response_times_cache[t] = CachedResponseTimes(
response_times=copy(self.response_times),
num_requests=self.num_requests,
)
# We'll use a cache size of CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10 since - in the extreme case -
# we might still use response times (from the cache) for t-CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW-10
# to calculate the current response time percentile, if we're missing cached values for the subsequent
# 20 seconds
cache_size = CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10
if len(self.response_times_cache) > cache_size:
# only keep the latest 20 response_times dicts
for i in range(len(self.response_times_cache) - cache_size):
self.response_times_cache.popitem(last=False)
class StatsError:
def __init__(self, method, name, error, occurrences=0):
self.method = method
self.name = name
self.error = error
self.occurrences = occurrences
@classmethod
def parse_error(cls, error):
string_error = repr(error)
target = "object at 0x"
target_index = string_error.find(target)
if target_index < 0:
return string_error
start = target_index + len(target) - 2
end = string_error.find(">", start)
if end < 0:
return string_error
hex_address = string_error[start:end]
return string_error.replace(hex_address, "0x....")
@classmethod
def create_key(cls, method, name, error):
key = "%s.%s.%r" % (method, name, StatsError.parse_error(error))
return hashlib.md5(key.encode("utf-8")).hexdigest()
def occurred(self):
self.occurrences += 1
def to_name(self):
error = self.error
if isinstance(error, CatchResponseError):
# standalone
unwrapped_error = error.args[0]
if isinstance(error, str) and error.startswith("CatchResponseError("):
# distributed
length = len("CatchResponseError(")
unwrapped_error = error[length:-1]
else:
# standalone, unwrapped exception
unwrapped_error = repr(error)
return "%s %s: %s" % (self.method, self.name, unwrapped_error)
def to_dict(self):
return {
"method": self.method,
"name": self.name,
"error": StatsError.parse_error(self.error),
"occurrences": self.occurrences,
}
@classmethod
def from_dict(cls, data):
return cls(data["method"], data["name"], data["error"], data["occurrences"])
def avg(values):
return sum(values, 0.0) / max(len(values), 1)
def median_from_dict(total, count):
"""
total is the number of requests made
count is a dict {response_time: count}
"""
pos = (total - 1) / 2
for k in sorted(count.keys()):
if pos < count[k]:
return k
pos -= count[k]
def setup_distributed_stats_event_listeners(events, stats):
def on_report_to_master(client_id, data):
data["stats"] = stats.serialize_stats()
data["stats_total"] = stats.total.get_stripped_report()
data["errors"] = stats.serialize_errors()
stats.errors = {}
def on_worker_report(client_id, data):
for stats_data in data["stats"]:
entry = StatsEntry.unserialize(stats_data)
request_key = (entry.name, entry.method)
if not request_key in stats.entries:
stats.entries[request_key] = StatsEntry(stats, entry.name, entry.method, use_response_times_cache=True)
stats.entries[request_key].extend(entry)
for error_key, error in data["errors"].items():
if error_key not in stats.errors:
stats.errors[error_key] = StatsError.from_dict(error)
else:
stats.errors[error_key].occurrences += error["occurrences"]
stats.total.extend(StatsEntry.unserialize(data["stats_total"]))
events.report_to_master.add_listener(on_report_to_master)
events.worker_report.add_listener(on_worker_report)
def print_stats(stats, current=True):
console_logger.info(
(" %-" + str(STATS_NAME_WIDTH) + "s %7s %12s | %7s %7s %7s %7s | %7s %7s")
% ("Name", "# reqs", "# fails", "Avg", "Min", "Max", "Median", "req/s", "failures/s")
)
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
for key in sorted(stats.entries.keys()):
r = stats.entries[key]
console_logger.info(r.to_string(current=current))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
console_logger.info(stats.total.to_string(current=current))
console_logger.info("")
def print_percentile_stats(stats):
console_logger.info("Response time percentiles (approximated)")
headers = ("Type", "Name") + tuple(get_readable_percentiles(PERCENTILES_TO_REPORT)) + ("# reqs",)
console_logger.info(
(
f" %-{str(STATS_TYPE_WIDTH)}s %-{str(STATS_NAME_WIDTH)}s %8s "
f"{' '.join(['%6s'] * len(PERCENTILES_TO_REPORT))}"
)
% headers
)
separator = (
f'{"-" * STATS_TYPE_WIDTH}|{"-" * STATS_NAME_WIDTH}|{"-" * 9}|{("-" * 6 + "|") * len(PERCENTILES_TO_REPORT)}'
)
console_logger.info(separator)
for key in sorted(stats.entries.keys()):
r = stats.entries[key]
if r.response_times:
console_logger.info(r.percentile())
console_logger.info(separator)
if stats.total.response_times:
console_logger.info(stats.total.percentile())
console_logger.info("")
def print_error_report(stats):
if not len(stats.errors):
return
console_logger.info("Error report")
console_logger.info(" %-18s %-100s" % ("# occurrences", "Error"))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
for error in stats.errors.values():
console_logger.info(" %-18i %-100s" % (error.occurrences, error.to_name()))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
console_logger.info("")
def stats_printer(stats):
def stats_printer_func():
while True:
print_stats(stats)
gevent.sleep(CONSOLE_STATS_INTERVAL_SEC)
return stats_printer_func
def sort_stats(stats):
return [stats[key] for key in sorted(stats.keys())]
def stats_history(runner):
"""Save current stats info to history for charts of report."""
while True:
stats = runner.stats
if not stats.total.use_response_times_cache:
break
r = {
"time": datetime.datetime.now().strftime("%H:%M:%S"),
"current_rps": stats.total.current_rps or 0,
"current_fail_per_sec": stats.total.current_fail_per_sec or 0,
"response_time_percentile_95": stats.total.get_current_response_time_percentile(0.95) or 0,
"response_time_percentile_50": stats.total.get_current_response_time_percentile(0.5) or 0,
"user_count": runner.user_count or 0,
}
stats.history.append(r)
gevent.sleep(HISTORY_STATS_INTERVAL_SEC)
class StatsCSV:
"""Write statistics to csv_writer stream."""
def __init__(self, environment, percentiles_to_report):
super().__init__()
self.environment = environment
self.percentiles_to_report = percentiles_to_report
self.percentiles_na = ["N/A"] * len(self.percentiles_to_report)
self.requests_csv_columns = [
"Type",
"Name",
"Request Count",
"Failure Count",
"Median Response Time",
"Average Response Time",
"Min Response Time",
"Max Response Time",
"Average Content Size",
"Requests/s",
"Failures/s",
] + get_readable_percentiles(self.percentiles_to_report)
self.failures_columns = [
"Method",
"Name",
"Error",
"Occurrences",
]
def _percentile_fields(self, stats_entry):
return (
[int(stats_entry.get_response_time_percentile(x) or 0) for x in self.percentiles_to_report]
if stats_entry.num_requests
else self.percentiles_na
)
def requests_csv(self, csv_writer):
"""Write requests csv with header and data rows."""
csv_writer.writerow(self.requests_csv_columns)
self._requests_data_rows(csv_writer)
def _requests_data_rows(self, csv_writer):
"""Write requests csv data row, excluding header."""
stats = self.environment.stats
for stats_entry in chain(sort_stats(stats.entries), [stats.total]):
csv_writer.writerow(
chain(
[
stats_entry.method,
stats_entry.name,
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
stats_entry.total_rps,
stats_entry.total_fail_per_sec,
],
self._percentile_fields(stats_entry),
)
)
def failures_csv(self, csv_writer):
csv_writer.writerow(self.failures_columns)
self._failures_data_rows(csv_writer)
def _failures_data_rows(self, csv_writer):
for stats_error in sort_stats(self.environment.stats.errors):
csv_writer.writerow(
[
stats_error.method,
stats_error.name,
stats_error.error,
stats_error.occurrences,
]
)
class StatsCSVFileWriter(StatsCSV):
"""Write statistics to to CSV files"""
def __init__(self, environment, percentiles_to_report, base_filepath, full_history=False):
super().__init__(environment, percentiles_to_report)
self.base_filepath = base_filepath
self.full_history = full_history
self.requests_csv_filehandle = open(self.base_filepath + "_stats.csv", "w")
self.requests_csv_writer = csv.writer(self.requests_csv_filehandle)
self.stats_history_csv_filehandle = open(self.stats_history_file_name(), "w")
self.stats_history_csv_writer = csv.writer(self.stats_history_csv_filehandle)
self.failures_csv_filehandle = open(self.base_filepath + "_failures.csv", "w")
self.failures_csv_writer = csv.writer(self.failures_csv_filehandle)
self.failures_csv_data_start = 0
self.stats_history_csv_columns = [
"Timestamp",
"User Count",
"Type",
"Name",
"Requests/s",
"Failures/s",
*get_readable_percentiles(self.percentiles_to_report),
"Total Request Count",
"Total Failure Count",
"Total Median Response Time",
"Total Average Response Time",
"Total Min Response Time",
"Total Max Response Time",
"Total Average Content Size",
]
def __call__(self):
self.stats_writer()
def stats_writer(self):
"""Writes all the csv files for the locust run."""
# Write header row for all files and save posistion for non-append files
self.requests_csv_writer.writerow(self.requests_csv_columns)
requests_csv_data_start = self.requests_csv_filehandle.tell()
self.stats_history_csv_writer.writerow(self.stats_history_csv_columns)
self.failures_csv_writer.writerow(self.failures_columns)
self.failures_csv_data_start = self.failures_csv_filehandle.tell()
# Continuously write date rows for all files
last_flush_time = 0
while True:
now = time.time()
self.requests_csv_filehandle.seek(requests_csv_data_start)
self._requests_data_rows(self.requests_csv_writer)
self.requests_csv_filehandle.truncate()
self._stats_history_data_rows(self.stats_history_csv_writer, now)
self.failures_csv_filehandle.seek(self.failures_csv_data_start)
self._failures_data_rows(self.failures_csv_writer)
self.failures_csv_filehandle.truncate()
if now - last_flush_time > CSV_STATS_FLUSH_INTERVAL_SEC:
self.requests_flush()
self.stats_history_flush()
self.failures_flush()
last_flush_time = now
gevent.sleep(CSV_STATS_INTERVAL_SEC)
def _stats_history_data_rows(self, csv_writer, now):
"""
Write CSV rows with the *current* stats. By default only includes the
Aggregated stats entry, but if self.full_history is set to True, a row for each entry will
will be included.
Note that this method differs from the other methods as it appends time-stamped data to the file, whereas the other methods overwrites the data.
"""
stats = self.environment.stats
timestamp = int(now)
stats_entries = []
if self.full_history:
stats_entries = sort_stats(stats.entries)
for stats_entry in chain(stats_entries, [stats.total]):
csv_writer.writerow(
chain(
(
timestamp,
self.environment.runner.user_count,
stats_entry.method or "",
stats_entry.name,
f"{stats_entry.current_rps:2f}",
f"{stats_entry.current_fail_per_sec:2f}",
),
self._percentile_fields(stats_entry),
(
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
),
)
)
def requests_flush(self):
self.requests_csv_filehandle.flush()
def stats_history_flush(self):
self.stats_history_csv_filehandle.flush()
def failures_flush(self):
self.failures_csv_filehandle.flush()
def close_files(self):
self.requests_csv_filehandle.close()
self.stats_history_csv_filehandle.close()
self.failures_csv_filehandle.close()
def stats_history_file_name(self):
return self.base_filepath + "_stats_history.csv"
| 37.346878 | 152 | 0.637343 | import datetime
import hashlib
import time
from collections import namedtuple, OrderedDict
from copy import copy
from itertools import chain
import csv
import gevent
from .exception import StopUser, CatchResponseError
import logging
console_logger = logging.getLogger("locust.stats_logger")
STATS_NAME_WIDTH = 60
STATS_TYPE_WIDTH = 8
CONSOLE_STATS_INTERVAL_SEC = 2
HISTORY_STATS_INTERVAL_SEC = 5
CSV_STATS_INTERVAL_SEC = 1
CSV_STATS_FLUSH_INTERVAL_SEC = 10
CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW = 10
CachedResponseTimes = namedtuple("CachedResponseTimes", ["response_times", "num_requests"])
PERCENTILES_TO_REPORT = [0.50, 0.66, 0.75, 0.80, 0.90, 0.95, 0.98, 0.99, 0.999, 0.9999, 1.0]
class RequestStatsAdditionError(Exception):
pass
def get_readable_percentiles(percentile_list):
return [
f"{int(percentile * 100) if (percentile * 100).is_integer() else round(100 * percentile, 6)}%"
for percentile in percentile_list
]
def calculate_response_time_percentile(response_times, num_requests, percent):
num_of_request = int((num_requests * percent))
processed_count = 0
for response_time in sorted(response_times.keys(), reverse=True):
processed_count += response_times[response_time]
if num_requests - processed_count <= num_of_request:
return response_time
return 0
def diff_response_time_dicts(latest, old):
new = {}
for t in latest:
diff = latest[t] - old.get(t, 0)
if diff:
new[t] = diff
return new
class RequestStats:
def __init__(self, use_response_times_cache=True):
self.use_response_times_cache = use_response_times_cache
self.entries = {}
self.errors = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.history = []
@property
def num_requests(self):
return self.total.num_requests
@property
def num_none_requests(self):
return self.total.num_none_requests
@property
def num_failures(self):
return self.total.num_failures
@property
def last_request_timestamp(self):
return self.total.last_request_timestamp
@property
def start_time(self):
return self.total.start_time
def log_request(self, method, name, response_time, content_length):
self.total.log(response_time, content_length)
self.get(name, method).log(response_time, content_length)
def log_error(self, method, name, error):
self.total.log_error(error)
self.get(name, method).log_error(error)
key = StatsError.create_key(method, name, error)
entry = self.errors.get(key)
if not entry:
entry = StatsError(method, name, error)
self.errors[key] = entry
entry.occurred()
def get(self, name, method):
entry = self.entries.get((name, method))
if not entry:
entry = StatsEntry(self, name, method, use_response_times_cache=self.use_response_times_cache)
self.entries[(name, method)] = entry
return entry
def reset_all(self):
self.total.reset()
self.errors = {}
for r in self.entries.values():
r.reset()
self.history = []
def clear_all(self):
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.entries = {}
self.errors = {}
self.history = []
def serialize_stats(self):
return [
self.entries[key].get_stripped_report()
for key in self.entries.keys()
if not (self.entries[key].num_requests == 0 and self.entries[key].num_failures == 0)
]
def serialize_errors(self):
return dict([(k, e.to_dict()) for k, e in self.errors.items()])
class StatsEntry:
name = None
method = None
num_requests = None
num_none_requests = None
num_failures = None
total_response_time = None
min_response_time = None
max_response_time = None
num_reqs_per_sec = None
num_fail_per_sec = None
response_times = None
use_response_times_cache = False
response_times_cache = None
total_content_length = None
start_time = None
last_request_timestamp = None
def __init__(self, stats, name, method, use_response_times_cache=False):
self.stats = stats
self.name = name
self.method = method
self.use_response_times_cache = use_response_times_cache
self.reset()
def reset(self):
self.start_time = time.time()
self.num_requests = 0
self.num_none_requests = 0
self.num_failures = 0
self.total_response_time = 0
self.response_times = {}
self.min_response_time = None
self.max_response_time = 0
self.last_request_timestamp = None
self.num_reqs_per_sec = {}
self.num_fail_per_sec = {}
self.total_content_length = 0
if self.use_response_times_cache:
self.response_times_cache = OrderedDict()
self._cache_response_times(int(time.time()))
def log(self, response_time, content_length):
current_time = time.time()
t = int(current_time)
if self.use_response_times_cache and self.last_request_timestamp and t > int(self.last_request_timestamp):
self._cache_response_times(t - 1)
self.num_requests += 1
self._log_time_of_request(current_time)
self._log_response_time(response_time)
self.total_content_length += content_length
def _log_time_of_request(self, current_time):
t = int(current_time)
self.num_reqs_per_sec[t] = self.num_reqs_per_sec.setdefault(t, 0) + 1
self.last_request_timestamp = current_time
def _log_response_time(self, response_time):
if response_time is None:
self.num_none_requests += 1
return
self.total_response_time += response_time
if self.min_response_time is None:
self.min_response_time = response_time
self.min_response_time = min(self.min_response_time, response_time)
self.max_response_time = max(self.max_response_time, response_time)
if response_time < 100:
rounded_response_time = round(response_time)
elif response_time < 1000:
rounded_response_time = round(response_time, -1)
elif response_time < 10000:
rounded_response_time = round(response_time, -2)
else:
rounded_response_time = round(response_time, -3)
self.response_times.setdefault(rounded_response_time, 0)
self.response_times[rounded_response_time] += 1
def log_error(self, error):
self.num_failures += 1
t = int(time.time())
self.num_fail_per_sec[t] = self.num_fail_per_sec.setdefault(t, 0) + 1
@property
def fail_ratio(self):
try:
return float(self.num_failures) / self.num_requests
except ZeroDivisionError:
if self.num_failures > 0:
return 1.0
else:
return 0.0
@property
def avg_response_time(self):
try:
return float(self.total_response_time) / (self.num_requests - self.num_none_requests)
except ZeroDivisionError:
return 0
@property
def median_response_time(self):
if not self.response_times:
return 0
median = median_from_dict(self.num_requests - self.num_none_requests, self.response_times) or 0
# have one (or very few) really slow requests
if median > self.max_response_time:
median = self.max_response_time
elif median < self.min_response_time:
median = self.min_response_time
return median
@property
def current_rps(self):
if self.stats.last_request_timestamp is None:
return 0
slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0))
reqs = [
self.num_reqs_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2)
]
return avg(reqs)
@property
def current_fail_per_sec(self):
if self.stats.last_request_timestamp is None:
return 0
slice_start_time = max(int(self.stats.last_request_timestamp) - 12, int(self.stats.start_time or 0))
reqs = [
self.num_fail_per_sec.get(t, 0) for t in range(slice_start_time, int(self.stats.last_request_timestamp) - 2)
]
return avg(reqs)
@property
def total_rps(self):
if not self.stats.last_request_timestamp or not self.stats.start_time:
return 0.0
try:
return self.num_requests / (self.stats.last_request_timestamp - self.stats.start_time)
except ZeroDivisionError:
return 0.0
@property
def total_fail_per_sec(self):
if not self.stats.last_request_timestamp or not self.stats.start_time:
return 0.0
try:
return self.num_failures / (self.stats.last_request_timestamp - self.stats.start_time)
except ZeroDivisionError:
return 0.0
@property
def avg_content_length(self):
try:
return self.total_content_length / self.num_requests
except ZeroDivisionError:
return 0
def extend(self, other):
# save the old last_request_timestamp, to see if we should store a new copy
# of the response times in the response times cache
old_last_request_timestamp = self.last_request_timestamp
if self.last_request_timestamp is not None and other.last_request_timestamp is not None:
self.last_request_timestamp = max(self.last_request_timestamp, other.last_request_timestamp)
elif other.last_request_timestamp is not None:
self.last_request_timestamp = other.last_request_timestamp
self.start_time = min(self.start_time, other.start_time)
self.num_requests = self.num_requests + other.num_requests
self.num_none_requests = self.num_none_requests + other.num_none_requests
self.num_failures = self.num_failures + other.num_failures
self.total_response_time = self.total_response_time + other.total_response_time
self.max_response_time = max(self.max_response_time, other.max_response_time)
if self.min_response_time is not None and other.min_response_time is not None:
self.min_response_time = min(self.min_response_time, other.min_response_time)
elif other.min_response_time is not None:
# this means self.min_response_time is None, so we can safely replace it
self.min_response_time = other.min_response_time
self.total_content_length = self.total_content_length + other.total_content_length
for key in other.response_times:
self.response_times[key] = self.response_times.get(key, 0) + other.response_times[key]
for key in other.num_reqs_per_sec:
self.num_reqs_per_sec[key] = self.num_reqs_per_sec.get(key, 0) + other.num_reqs_per_sec[key]
for key in other.num_fail_per_sec:
self.num_fail_per_sec[key] = self.num_fail_per_sec.get(key, 0) + other.num_fail_per_sec[key]
if self.use_response_times_cache:
# If we've entered a new second, we'll cache the response times. Note that there
# might still be reports from other worker nodes - that contains requests for the same
# time periods - that hasn't been received/accounted for yet. This will cause the cache to
last_time = self.last_request_timestamp and int(self.last_request_timestamp) or None
if last_time and last_time > (old_last_request_timestamp and int(old_last_request_timestamp) or 0):
self._cache_response_times(last_time)
def serialize(self):
return {
"name": self.name,
"method": self.method,
"last_request_timestamp": self.last_request_timestamp,
"start_time": self.start_time,
"num_requests": self.num_requests,
"num_none_requests": self.num_none_requests,
"num_failures": self.num_failures,
"total_response_time": self.total_response_time,
"max_response_time": self.max_response_time,
"min_response_time": self.min_response_time,
"total_content_length": self.total_content_length,
"response_times": self.response_times,
"num_reqs_per_sec": self.num_reqs_per_sec,
"num_fail_per_sec": self.num_fail_per_sec,
}
@classmethod
def unserialize(cls, data):
obj = cls(None, data["name"], data["method"])
for key in [
"last_request_timestamp",
"start_time",
"num_requests",
"num_none_requests",
"num_failures",
"total_response_time",
"max_response_time",
"min_response_time",
"total_content_length",
"response_times",
"num_reqs_per_sec",
"num_fail_per_sec",
]:
setattr(obj, key, data[key])
return obj
def get_stripped_report(self):
report = self.serialize()
self.reset()
return report
def to_string(self, current=True):
if current:
rps = self.current_rps
fail_per_sec = self.current_fail_per_sec
else:
rps = self.total_rps
fail_per_sec = self.total_fail_per_sec
return (" %-" + str(STATS_NAME_WIDTH) + "s %7d %12s | %7d %7d %7d %7d | %7.2f %7.2f") % (
(self.method and self.method + " " or "") + self.name,
self.num_requests,
"%d(%.2f%%)" % (self.num_failures, self.fail_ratio * 100),
self.avg_response_time,
self.min_response_time or 0,
self.max_response_time,
self.median_response_time or 0,
rps or 0,
fail_per_sec or 0,
)
def __str__(self):
return self.to_string(current=True)
def get_response_time_percentile(self, percent):
return calculate_response_time_percentile(self.response_times, self.num_requests, percent)
def get_current_response_time_percentile(self, percent):
if not self.use_response_times_cache:
raise ValueError(
"StatsEntry.use_response_times_cache must be set to True if we should be able to calculate the _current_ response time percentile"
)
t = int(time.time())
# We'll construct a list of timestamps which we consider acceptable keys to be used
# and so on
acceptable_timestamps = []
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW)
for i in range(1, 9):
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW - i)
acceptable_timestamps.append(t - CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + i)
cached = None
for ts in acceptable_timestamps:
if ts in self.response_times_cache:
cached = self.response_times_cache[ts]
break
if cached:
# If we fond an acceptable cached response times, we'll calculate a new response
# for that timeframe
return calculate_response_time_percentile(
diff_response_time_dicts(self.response_times, cached.response_times),
self.num_requests - cached.num_requests,
percent,
)
def percentile(self):
if not self.num_requests:
raise ValueError("Can't calculate percentile on url with no successful requests")
tpl = f" %-{str(STATS_TYPE_WIDTH)}s %-{str(STATS_NAME_WIDTH)}s %8d {' '.join(['%6d'] * len(PERCENTILES_TO_REPORT))}"
return tpl % (
(self.method, self.name)
+ tuple([self.get_response_time_percentile(p) for p in PERCENTILES_TO_REPORT])
+ (self.num_requests,)
)
def _cache_response_times(self, t):
self.response_times_cache[t] = CachedResponseTimes(
response_times=copy(self.response_times),
num_requests=self.num_requests,
)
# we might still use response times (from the cache) for t-CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW-10
# to calculate the current response time percentile, if we're missing cached values for the subsequent
cache_size = CURRENT_RESPONSE_TIME_PERCENTILE_WINDOW + 10
if len(self.response_times_cache) > cache_size:
for i in range(len(self.response_times_cache) - cache_size):
self.response_times_cache.popitem(last=False)
class StatsError:
def __init__(self, method, name, error, occurrences=0):
self.method = method
self.name = name
self.error = error
self.occurrences = occurrences
@classmethod
def parse_error(cls, error):
string_error = repr(error)
target = "object at 0x"
target_index = string_error.find(target)
if target_index < 0:
return string_error
start = target_index + len(target) - 2
end = string_error.find(">", start)
if end < 0:
return string_error
hex_address = string_error[start:end]
return string_error.replace(hex_address, "0x....")
@classmethod
def create_key(cls, method, name, error):
key = "%s.%s.%r" % (method, name, StatsError.parse_error(error))
return hashlib.md5(key.encode("utf-8")).hexdigest()
def occurred(self):
self.occurrences += 1
def to_name(self):
error = self.error
if isinstance(error, CatchResponseError):
unwrapped_error = error.args[0]
if isinstance(error, str) and error.startswith("CatchResponseError("):
length = len("CatchResponseError(")
unwrapped_error = error[length:-1]
else:
unwrapped_error = repr(error)
return "%s %s: %s" % (self.method, self.name, unwrapped_error)
def to_dict(self):
return {
"method": self.method,
"name": self.name,
"error": StatsError.parse_error(self.error),
"occurrences": self.occurrences,
}
@classmethod
def from_dict(cls, data):
return cls(data["method"], data["name"], data["error"], data["occurrences"])
def avg(values):
return sum(values, 0.0) / max(len(values), 1)
def median_from_dict(total, count):
pos = (total - 1) / 2
for k in sorted(count.keys()):
if pos < count[k]:
return k
pos -= count[k]
def setup_distributed_stats_event_listeners(events, stats):
def on_report_to_master(client_id, data):
data["stats"] = stats.serialize_stats()
data["stats_total"] = stats.total.get_stripped_report()
data["errors"] = stats.serialize_errors()
stats.errors = {}
def on_worker_report(client_id, data):
for stats_data in data["stats"]:
entry = StatsEntry.unserialize(stats_data)
request_key = (entry.name, entry.method)
if not request_key in stats.entries:
stats.entries[request_key] = StatsEntry(stats, entry.name, entry.method, use_response_times_cache=True)
stats.entries[request_key].extend(entry)
for error_key, error in data["errors"].items():
if error_key not in stats.errors:
stats.errors[error_key] = StatsError.from_dict(error)
else:
stats.errors[error_key].occurrences += error["occurrences"]
stats.total.extend(StatsEntry.unserialize(data["stats_total"]))
events.report_to_master.add_listener(on_report_to_master)
events.worker_report.add_listener(on_worker_report)
def print_stats(stats, current=True):
console_logger.info(
(" %-" + str(STATS_NAME_WIDTH) + "s %7s %12s | %7s %7s %7s %7s | %7s %7s")
% ("Name", "# reqs", "# fails", "Avg", "Min", "Max", "Median", "req/s", "failures/s")
)
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
for key in sorted(stats.entries.keys()):
r = stats.entries[key]
console_logger.info(r.to_string(current=current))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
console_logger.info(stats.total.to_string(current=current))
console_logger.info("")
def print_percentile_stats(stats):
console_logger.info("Response time percentiles (approximated)")
headers = ("Type", "Name") + tuple(get_readable_percentiles(PERCENTILES_TO_REPORT)) + ("# reqs",)
console_logger.info(
(
f" %-{str(STATS_TYPE_WIDTH)}s %-{str(STATS_NAME_WIDTH)}s %8s "
f"{' '.join(['%6s'] * len(PERCENTILES_TO_REPORT))}"
)
% headers
)
separator = (
f'{"-" * STATS_TYPE_WIDTH}|{"-" * STATS_NAME_WIDTH}|{"-" * 9}|{("-" * 6 + "|") * len(PERCENTILES_TO_REPORT)}'
)
console_logger.info(separator)
for key in sorted(stats.entries.keys()):
r = stats.entries[key]
if r.response_times:
console_logger.info(r.percentile())
console_logger.info(separator)
if stats.total.response_times:
console_logger.info(stats.total.percentile())
console_logger.info("")
def print_error_report(stats):
if not len(stats.errors):
return
console_logger.info("Error report")
console_logger.info(" %-18s %-100s" % ("# occurrences", "Error"))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
for error in stats.errors.values():
console_logger.info(" %-18i %-100s" % (error.occurrences, error.to_name()))
console_logger.info("-" * (80 + STATS_NAME_WIDTH))
console_logger.info("")
def stats_printer(stats):
def stats_printer_func():
while True:
print_stats(stats)
gevent.sleep(CONSOLE_STATS_INTERVAL_SEC)
return stats_printer_func
def sort_stats(stats):
return [stats[key] for key in sorted(stats.keys())]
def stats_history(runner):
while True:
stats = runner.stats
if not stats.total.use_response_times_cache:
break
r = {
"time": datetime.datetime.now().strftime("%H:%M:%S"),
"current_rps": stats.total.current_rps or 0,
"current_fail_per_sec": stats.total.current_fail_per_sec or 0,
"response_time_percentile_95": stats.total.get_current_response_time_percentile(0.95) or 0,
"response_time_percentile_50": stats.total.get_current_response_time_percentile(0.5) or 0,
"user_count": runner.user_count or 0,
}
stats.history.append(r)
gevent.sleep(HISTORY_STATS_INTERVAL_SEC)
class StatsCSV:
def __init__(self, environment, percentiles_to_report):
super().__init__()
self.environment = environment
self.percentiles_to_report = percentiles_to_report
self.percentiles_na = ["N/A"] * len(self.percentiles_to_report)
self.requests_csv_columns = [
"Type",
"Name",
"Request Count",
"Failure Count",
"Median Response Time",
"Average Response Time",
"Min Response Time",
"Max Response Time",
"Average Content Size",
"Requests/s",
"Failures/s",
] + get_readable_percentiles(self.percentiles_to_report)
self.failures_columns = [
"Method",
"Name",
"Error",
"Occurrences",
]
def _percentile_fields(self, stats_entry):
return (
[int(stats_entry.get_response_time_percentile(x) or 0) for x in self.percentiles_to_report]
if stats_entry.num_requests
else self.percentiles_na
)
def requests_csv(self, csv_writer):
csv_writer.writerow(self.requests_csv_columns)
self._requests_data_rows(csv_writer)
def _requests_data_rows(self, csv_writer):
stats = self.environment.stats
for stats_entry in chain(sort_stats(stats.entries), [stats.total]):
csv_writer.writerow(
chain(
[
stats_entry.method,
stats_entry.name,
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
stats_entry.total_rps,
stats_entry.total_fail_per_sec,
],
self._percentile_fields(stats_entry),
)
)
def failures_csv(self, csv_writer):
csv_writer.writerow(self.failures_columns)
self._failures_data_rows(csv_writer)
def _failures_data_rows(self, csv_writer):
for stats_error in sort_stats(self.environment.stats.errors):
csv_writer.writerow(
[
stats_error.method,
stats_error.name,
stats_error.error,
stats_error.occurrences,
]
)
class StatsCSVFileWriter(StatsCSV):
def __init__(self, environment, percentiles_to_report, base_filepath, full_history=False):
super().__init__(environment, percentiles_to_report)
self.base_filepath = base_filepath
self.full_history = full_history
self.requests_csv_filehandle = open(self.base_filepath + "_stats.csv", "w")
self.requests_csv_writer = csv.writer(self.requests_csv_filehandle)
self.stats_history_csv_filehandle = open(self.stats_history_file_name(), "w")
self.stats_history_csv_writer = csv.writer(self.stats_history_csv_filehandle)
self.failures_csv_filehandle = open(self.base_filepath + "_failures.csv", "w")
self.failures_csv_writer = csv.writer(self.failures_csv_filehandle)
self.failures_csv_data_start = 0
self.stats_history_csv_columns = [
"Timestamp",
"User Count",
"Type",
"Name",
"Requests/s",
"Failures/s",
*get_readable_percentiles(self.percentiles_to_report),
"Total Request Count",
"Total Failure Count",
"Total Median Response Time",
"Total Average Response Time",
"Total Min Response Time",
"Total Max Response Time",
"Total Average Content Size",
]
def __call__(self):
self.stats_writer()
def stats_writer(self):
self.requests_csv_writer.writerow(self.requests_csv_columns)
requests_csv_data_start = self.requests_csv_filehandle.tell()
self.stats_history_csv_writer.writerow(self.stats_history_csv_columns)
self.failures_csv_writer.writerow(self.failures_columns)
self.failures_csv_data_start = self.failures_csv_filehandle.tell()
last_flush_time = 0
while True:
now = time.time()
self.requests_csv_filehandle.seek(requests_csv_data_start)
self._requests_data_rows(self.requests_csv_writer)
self.requests_csv_filehandle.truncate()
self._stats_history_data_rows(self.stats_history_csv_writer, now)
self.failures_csv_filehandle.seek(self.failures_csv_data_start)
self._failures_data_rows(self.failures_csv_writer)
self.failures_csv_filehandle.truncate()
if now - last_flush_time > CSV_STATS_FLUSH_INTERVAL_SEC:
self.requests_flush()
self.stats_history_flush()
self.failures_flush()
last_flush_time = now
gevent.sleep(CSV_STATS_INTERVAL_SEC)
def _stats_history_data_rows(self, csv_writer, now):
stats = self.environment.stats
timestamp = int(now)
stats_entries = []
if self.full_history:
stats_entries = sort_stats(stats.entries)
for stats_entry in chain(stats_entries, [stats.total]):
csv_writer.writerow(
chain(
(
timestamp,
self.environment.runner.user_count,
stats_entry.method or "",
stats_entry.name,
f"{stats_entry.current_rps:2f}",
f"{stats_entry.current_fail_per_sec:2f}",
),
self._percentile_fields(stats_entry),
(
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
),
)
)
def requests_flush(self):
self.requests_csv_filehandle.flush()
def stats_history_flush(self):
self.stats_history_csv_filehandle.flush()
def failures_flush(self):
self.failures_csv_filehandle.flush()
def close_files(self):
self.requests_csv_filehandle.close()
self.stats_history_csv_filehandle.close()
self.failures_csv_filehandle.close()
def stats_history_file_name(self):
return self.base_filepath + "_stats_history.csv"
| true | true |
1c3d5b63e29d303fe4319d611e1cb9cb2f223b5f | 17,264 | py | Python | python3-virtualenv/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py | GinaJame/Portfolio_MLH | 541709dcf034ddca885a8b08f9922dc352c113f8 | [
"MIT"
] | null | null | null | python3-virtualenv/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py | GinaJame/Portfolio_MLH | 541709dcf034ddca885a8b08f9922dc352c113f8 | [
"MIT"
] | null | null | null | python3-virtualenv/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py | GinaJame/Portfolio_MLH | 541709dcf034ddca885a8b08f9922dc352c113f8 | [
"MIT"
] | null | null | null | import collections
import operator
from .providers import AbstractResolver
from .structs import DirectedGraph, IteratorMapping, build_iter_view
RequirementInformation = collections.namedtuple(
"RequirementInformation", ["requirement", "parent"]
)
class ResolverException(Exception):
"""A base class for all exceptions raised by this module.
Exceptions derived by this class should all be handled in this module. Any
bubbling pass the resolver should be treated as a bug.
"""
class RequirementsConflicted(ResolverException):
def __init__(self, criterion):
super(RequirementsConflicted, self).__init__(criterion)
self.criterion = criterion
def __str__(self):
return "Requirements conflict: {}".format(
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class InconsistentCandidate(ResolverException):
def __init__(self, candidate, criterion):
super(InconsistentCandidate, self).__init__(candidate, criterion)
self.candidate = candidate
self.criterion = criterion
def __str__(self):
return "Provided candidate {!r} does not satisfy {}".format(
self.candidate,
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class Criterion(object):
"""Representation of possible resolution results of a package.
This holds three attributes:
* `information` is a collection of `RequirementInformation` pairs.
Each pair is a requirement contributing to this criterion, and the
candidate that provides the requirement.
* `incompatibilities` is a collection of all known not-to-work candidates
to exclude from consideration.
* `candidates` is a collection containing all possible candidates deducted
from the union of contributing requirements and known incompatibilities.
It should never be empty, except when the criterion is an attribute of a
raised `RequirementsConflicted` (in which case it is always empty).
.. note::
This class is intended to be externally immutable. **Do not** mutate
any of its attribute containers.
"""
def __init__(self, candidates, information, incompatibilities):
self.candidates = candidates
self.information = information
self.incompatibilities = incompatibilities
def __repr__(self):
requirements = ", ".join(
"({!r}, via={!r})".format(req, parent) for req, parent in self.information
)
return "Criterion({})".format(requirements)
def iter_requirement(self):
return (i.requirement for i in self.information)
def iter_parent(self):
return (i.parent for i in self.information)
class ResolutionError(ResolverException):
pass
class ResolutionImpossible(ResolutionError):
def __init__(self, causes):
super(ResolutionImpossible, self).__init__(causes)
# causes is a list of RequirementInformation objects
self.causes = causes
class ResolutionTooDeep(ResolutionError):
def __init__(self, round_count):
super(ResolutionTooDeep, self).__init__(round_count)
self.round_count = round_count
# Resolution state in a round.
State = collections.namedtuple("State", "mapping criteria")
class Resolution(object):
"""Stateful resolution object.
This is designed as a one-off object that holds information to kick start
the resolution process, and holds the results afterwards.
"""
def __init__(self, provider, reporter):
self._p = provider
self._r = reporter
self._states = []
@property
def state(self):
try:
return self._states[-1]
except IndexError:
raise AttributeError("state")
def _push_new_state(self):
"""Push a new state into history.
This new state will be used to hold resolution results of the next
coming round.
"""
base = self._states[-1]
state = State(
mapping=base.mapping.copy(),
criteria=base.criteria.copy(),
)
self._states.append(state)
def _merge_into_criterion(self, requirement, parent):
self._r.adding_requirement(requirement=requirement, parent=parent)
identifier = self._p.identify(requirement_or_candidate=requirement)
criterion = self.state.criteria.get(identifier)
if criterion:
incompatibilities = list(criterion.incompatibilities)
else:
incompatibilities = []
matches = self._p.find_matches(
identifier=identifier,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
{identifier: [requirement]},
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{identifier: incompatibilities},
),
)
if criterion:
information = list(criterion.information)
information.append(RequirementInformation(requirement, parent))
else:
information = [RequirementInformation(requirement, parent)]
criterion = Criterion(
candidates=build_iter_view(matches),
information=information,
incompatibilities=incompatibilities,
)
if not criterion.candidates:
raise RequirementsConflicted(criterion)
return identifier, criterion
def _get_preference(self, name):
return self._p.get_preference(
identifier=name,
resolutions=self.state.mapping,
candidates=IteratorMapping(
self.state.criteria,
operator.attrgetter("candidates"),
),
information=IteratorMapping(
self.state.criteria,
operator.attrgetter("information"),
),
)
def _is_current_pin_satisfying(self, name, criterion):
try:
current_pin = self.state.mapping[name]
except KeyError:
return False
return all(
self._p.is_satisfied_by(requirement=r, candidate=current_pin)
for r in criterion.iter_requirement()
)
def _get_criteria_to_update(self, candidate):
criteria = {}
for r in self._p.get_dependencies(candidate=candidate):
name, crit = self._merge_into_criterion(r, parent=candidate)
criteria[name] = crit
return criteria
def _attempt_to_pin_criterion(self, name):
criterion = self.state.criteria[name]
causes = []
for candidate in criterion.candidates:
try:
criteria = self._get_criteria_to_update(candidate)
except RequirementsConflicted as e:
causes.append(e.criterion)
continue
# Check the newly-pinned candidate actually works. This should
# always pass under normal circumstances, but in the case of a
# faulty provider, we will raise an error to notify the implementer
# to fix find_matches() and/or is_satisfied_by().
satisfied = all(
self._p.is_satisfied_by(requirement=r, candidate=candidate)
for r in criterion.iter_requirement()
)
if not satisfied:
raise InconsistentCandidate(candidate, criterion)
# Put newly-pinned candidate at the end. This is essential because
# backtracking looks at this mapping to get the last pin.
self._r.pinning(candidate=candidate)
self.state.mapping.pop(name, None)
self.state.mapping[name] = candidate
self.state.criteria.update(criteria)
return []
# All candidates tried, nothing works. This criterion is a dead
# end, signal for backtracking.
return causes
def _backtrack(self):
"""Perform backtracking.
When we enter here, the stack is like this::
[ state Z ]
[ state Y ]
[ state X ]
.... earlier states are irrelevant.
1. No pins worked for Z, so it does not have a pin.
2. We want to reset state Y to unpinned, and pin another candidate.
3. State X holds what state Y was before the pin, but does not
have the incompatibility information gathered in state Y.
Each iteration of the loop will:
1. Discard Z.
2. Discard Y but remember its incompatibility information gathered
previously, and the failure we're dealing with right now.
3. Push a new state Y' based on X, and apply the incompatibility
information from Y to Y'.
4a. If this causes Y' to conflict, we need to backtrack again. Make Y'
the new Z and go back to step 2.
4b. If the incompatibilities apply cleanly, end backtracking.
"""
while len(self._states) >= 3:
# Remove the state that triggered backtracking.
del self._states[-1]
# Retrieve the last candidate pin and known incompatibilities.
broken_state = self._states.pop()
name, candidate = broken_state.mapping.popitem()
incompatibilities_from_broken = [
(k, list(v.incompatibilities)) for k, v in broken_state.criteria.items()
]
# Also mark the newly known incompatibility.
incompatibilities_from_broken.append((name, [candidate]))
self._r.backtracking(candidate=candidate)
# Create a new state from the last known-to-work one, and apply
# the previously gathered incompatibility information.
def _patch_criteria():
for k, incompatibilities in incompatibilities_from_broken:
if not incompatibilities:
continue
try:
criterion = self.state.criteria[k]
except KeyError:
continue
matches = self._p.find_matches(
identifier=k,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{k: incompatibilities},
),
)
candidates = build_iter_view(matches)
if not candidates:
return False
incompatibilities.extend(criterion.incompatibilities)
self.state.criteria[k] = Criterion(
candidates=candidates,
information=list(criterion.information),
incompatibilities=incompatibilities,
)
return True
self._push_new_state()
success = _patch_criteria()
# It works! Let's work on this new state.
if success:
return True
# State does not work after applying known incompatibilities.
# Try the still previous state.
# No way to backtrack anymore.
return False
def resolve(self, requirements, max_rounds):
if self._states:
raise RuntimeError("already resolved")
self._r.starting()
# Initialize the root state.
self._states = [State(mapping=collections.OrderedDict(), criteria={})]
for r in requirements:
try:
name, crit = self._merge_into_criterion(r, parent=None)
except RequirementsConflicted as e:
raise ResolutionImpossible(e.criterion.information)
self.state.criteria[name] = crit
# The root state is saved as a sentinel so the first ever pin can have
# something to backtrack to if it fails. The root state is basically
# pinning the virtual "root" package in the graph.
self._push_new_state()
for round_index in range(max_rounds):
self._r.starting_round(index=round_index)
unsatisfied_names = [
key
for key, criterion in self.state.criteria.items()
if not self._is_current_pin_satisfying(key, criterion)
]
# All criteria are accounted for. Nothing more to pin, we are done!
if not unsatisfied_names:
self._r.ending(state=self.state)
return self.state
# Choose the most preferred unpinned criterion to try.
name = min(unsatisfied_names, key=self._get_preference)
failure_causes = self._attempt_to_pin_criterion(name)
if failure_causes:
# Backtrack if pinning fails. The backtrack process puts us in
# an unpinned state, so we can work on it in the next round.
success = self._backtrack()
# Dead ends everywhere. Give up.
if not success:
causes = [i for c in failure_causes for i in c.information]
raise ResolutionImpossible(causes)
else:
# Pinning was successful. Push a new state to do another pin.
self._push_new_state()
self._r.ending_round(index=round_index, state=self.state)
raise ResolutionTooDeep(max_rounds)
def _has_route_to_root(criteria, key, all_keys, connected):
if key in connected:
return True
if key not in criteria:
return False
for p in criteria[key].iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey in connected:
connected.add(key)
return True
if _has_route_to_root(criteria, pkey, all_keys, connected):
connected.add(key)
return True
return False
Result = collections.namedtuple("Result", "mapping graph criteria")
def _build_result(state):
mapping = state.mapping
all_keys = {id(v): k for k, v in mapping.items()}
all_keys[id(None)] = None
graph = DirectedGraph()
graph.add(None) # Sentinel as root dependencies' parent.
connected = {None}
for key, criterion in state.criteria.items():
if not _has_route_to_root(state.criteria, key, all_keys, connected):
continue
if key not in graph:
graph.add(key)
for p in criterion.iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey not in graph:
graph.add(pkey)
graph.connect(pkey, key)
return Result(
mapping={k: v for k, v in mapping.items() if k in connected},
graph=graph,
criteria=state.criteria,
)
class Resolver(AbstractResolver):
"""The thing that performs the actual resolution work."""
base_exception = ResolverException
def resolve(self, requirements, max_rounds=100):
"""Take a collection of constraints, spit out the resolution result.
The return value is a representation to the final resolution result. It
is a tuple subclass with three public members:
* `mapping`: A dict of resolved candidates. Each key is an identifier
of a requirement (as returned by the provider's `identify` method),
and the value is the resolved candidate.
* `graph`: A `DirectedGraph` instance representing the dependency tree.
The vertices are keys of `mapping`, and each edge represents *why*
a particular package is included. A special vertex `None` is
included to represent parents of user-supplied requirements.
* `criteria`: A dict of "criteria" that hold detailed information on
how edges in the graph are derived. Each key is an identifier of a
requirement, and the value is a `Criterion` instance.
The following exceptions may be raised if a resolution cannot be found:
* `ResolutionImpossible`: A resolution cannot be found for the given
combination of requirements. The `causes` attribute of the
exception is a list of (requirement, parent), giving the
requirements that could not be satisfied.
* `ResolutionTooDeep`: The dependency tree is too deeply nested and
the resolver gave up. This is usually caused by a circular
dependency, but you can try to resolve this by increasing the
`max_rounds` argument.
"""
resolution = Resolution(self.provider, self.reporter)
state = resolution.resolve(requirements, max_rounds=max_rounds)
return _build_result(state)
| 36.498943 | 88 | 0.61301 | import collections
import operator
from .providers import AbstractResolver
from .structs import DirectedGraph, IteratorMapping, build_iter_view
RequirementInformation = collections.namedtuple(
"RequirementInformation", ["requirement", "parent"]
)
class ResolverException(Exception):
class RequirementsConflicted(ResolverException):
def __init__(self, criterion):
super(RequirementsConflicted, self).__init__(criterion)
self.criterion = criterion
def __str__(self):
return "Requirements conflict: {}".format(
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class InconsistentCandidate(ResolverException):
def __init__(self, candidate, criterion):
super(InconsistentCandidate, self).__init__(candidate, criterion)
self.candidate = candidate
self.criterion = criterion
def __str__(self):
return "Provided candidate {!r} does not satisfy {}".format(
self.candidate,
", ".join(repr(r) for r in self.criterion.iter_requirement()),
)
class Criterion(object):
def __init__(self, candidates, information, incompatibilities):
self.candidates = candidates
self.information = information
self.incompatibilities = incompatibilities
def __repr__(self):
requirements = ", ".join(
"({!r}, via={!r})".format(req, parent) for req, parent in self.information
)
return "Criterion({})".format(requirements)
def iter_requirement(self):
return (i.requirement for i in self.information)
def iter_parent(self):
return (i.parent for i in self.information)
class ResolutionError(ResolverException):
pass
class ResolutionImpossible(ResolutionError):
def __init__(self, causes):
super(ResolutionImpossible, self).__init__(causes)
self.causes = causes
class ResolutionTooDeep(ResolutionError):
def __init__(self, round_count):
super(ResolutionTooDeep, self).__init__(round_count)
self.round_count = round_count
State = collections.namedtuple("State", "mapping criteria")
class Resolution(object):
def __init__(self, provider, reporter):
self._p = provider
self._r = reporter
self._states = []
@property
def state(self):
try:
return self._states[-1]
except IndexError:
raise AttributeError("state")
def _push_new_state(self):
base = self._states[-1]
state = State(
mapping=base.mapping.copy(),
criteria=base.criteria.copy(),
)
self._states.append(state)
def _merge_into_criterion(self, requirement, parent):
self._r.adding_requirement(requirement=requirement, parent=parent)
identifier = self._p.identify(requirement_or_candidate=requirement)
criterion = self.state.criteria.get(identifier)
if criterion:
incompatibilities = list(criterion.incompatibilities)
else:
incompatibilities = []
matches = self._p.find_matches(
identifier=identifier,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
{identifier: [requirement]},
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{identifier: incompatibilities},
),
)
if criterion:
information = list(criterion.information)
information.append(RequirementInformation(requirement, parent))
else:
information = [RequirementInformation(requirement, parent)]
criterion = Criterion(
candidates=build_iter_view(matches),
information=information,
incompatibilities=incompatibilities,
)
if not criterion.candidates:
raise RequirementsConflicted(criterion)
return identifier, criterion
def _get_preference(self, name):
return self._p.get_preference(
identifier=name,
resolutions=self.state.mapping,
candidates=IteratorMapping(
self.state.criteria,
operator.attrgetter("candidates"),
),
information=IteratorMapping(
self.state.criteria,
operator.attrgetter("information"),
),
)
def _is_current_pin_satisfying(self, name, criterion):
try:
current_pin = self.state.mapping[name]
except KeyError:
return False
return all(
self._p.is_satisfied_by(requirement=r, candidate=current_pin)
for r in criterion.iter_requirement()
)
def _get_criteria_to_update(self, candidate):
criteria = {}
for r in self._p.get_dependencies(candidate=candidate):
name, crit = self._merge_into_criterion(r, parent=candidate)
criteria[name] = crit
return criteria
def _attempt_to_pin_criterion(self, name):
criterion = self.state.criteria[name]
causes = []
for candidate in criterion.candidates:
try:
criteria = self._get_criteria_to_update(candidate)
except RequirementsConflicted as e:
causes.append(e.criterion)
continue
satisfied = all(
self._p.is_satisfied_by(requirement=r, candidate=candidate)
for r in criterion.iter_requirement()
)
if not satisfied:
raise InconsistentCandidate(candidate, criterion)
self._r.pinning(candidate=candidate)
self.state.mapping.pop(name, None)
self.state.mapping[name] = candidate
self.state.criteria.update(criteria)
return []
return causes
def _backtrack(self):
while len(self._states) >= 3:
del self._states[-1]
broken_state = self._states.pop()
name, candidate = broken_state.mapping.popitem()
incompatibilities_from_broken = [
(k, list(v.incompatibilities)) for k, v in broken_state.criteria.items()
]
incompatibilities_from_broken.append((name, [candidate]))
self._r.backtracking(candidate=candidate)
def _patch_criteria():
for k, incompatibilities in incompatibilities_from_broken:
if not incompatibilities:
continue
try:
criterion = self.state.criteria[k]
except KeyError:
continue
matches = self._p.find_matches(
identifier=k,
requirements=IteratorMapping(
self.state.criteria,
operator.methodcaller("iter_requirement"),
),
incompatibilities=IteratorMapping(
self.state.criteria,
operator.attrgetter("incompatibilities"),
{k: incompatibilities},
),
)
candidates = build_iter_view(matches)
if not candidates:
return False
incompatibilities.extend(criterion.incompatibilities)
self.state.criteria[k] = Criterion(
candidates=candidates,
information=list(criterion.information),
incompatibilities=incompatibilities,
)
return True
self._push_new_state()
success = _patch_criteria()
if success:
return True
# State does not work after applying known incompatibilities.
# Try the still previous state.
# No way to backtrack anymore.
return False
def resolve(self, requirements, max_rounds):
if self._states:
raise RuntimeError("already resolved")
self._r.starting()
# Initialize the root state.
self._states = [State(mapping=collections.OrderedDict(), criteria={})]
for r in requirements:
try:
name, crit = self._merge_into_criterion(r, parent=None)
except RequirementsConflicted as e:
raise ResolutionImpossible(e.criterion.information)
self.state.criteria[name] = crit
# The root state is saved as a sentinel so the first ever pin can have
# something to backtrack to if it fails. The root state is basically
# pinning the virtual "root" package in the graph.
self._push_new_state()
for round_index in range(max_rounds):
self._r.starting_round(index=round_index)
unsatisfied_names = [
key
for key, criterion in self.state.criteria.items()
if not self._is_current_pin_satisfying(key, criterion)
]
# All criteria are accounted for. Nothing more to pin, we are done!
if not unsatisfied_names:
self._r.ending(state=self.state)
return self.state
# Choose the most preferred unpinned criterion to try.
name = min(unsatisfied_names, key=self._get_preference)
failure_causes = self._attempt_to_pin_criterion(name)
if failure_causes:
# Backtrack if pinning fails. The backtrack process puts us in
# an unpinned state, so we can work on it in the next round.
success = self._backtrack()
# Dead ends everywhere. Give up.
if not success:
causes = [i for c in failure_causes for i in c.information]
raise ResolutionImpossible(causes)
else:
# Pinning was successful. Push a new state to do another pin.
self._push_new_state()
self._r.ending_round(index=round_index, state=self.state)
raise ResolutionTooDeep(max_rounds)
def _has_route_to_root(criteria, key, all_keys, connected):
if key in connected:
return True
if key not in criteria:
return False
for p in criteria[key].iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey in connected:
connected.add(key)
return True
if _has_route_to_root(criteria, pkey, all_keys, connected):
connected.add(key)
return True
return False
Result = collections.namedtuple("Result", "mapping graph criteria")
def _build_result(state):
mapping = state.mapping
all_keys = {id(v): k for k, v in mapping.items()}
all_keys[id(None)] = None
graph = DirectedGraph()
graph.add(None) # Sentinel as root dependencies' parent.
connected = {None}
for key, criterion in state.criteria.items():
if not _has_route_to_root(state.criteria, key, all_keys, connected):
continue
if key not in graph:
graph.add(key)
for p in criterion.iter_parent():
try:
pkey = all_keys[id(p)]
except KeyError:
continue
if pkey not in graph:
graph.add(pkey)
graph.connect(pkey, key)
return Result(
mapping={k: v for k, v in mapping.items() if k in connected},
graph=graph,
criteria=state.criteria,
)
class Resolver(AbstractResolver):
base_exception = ResolverException
def resolve(self, requirements, max_rounds=100):
resolution = Resolution(self.provider, self.reporter)
state = resolution.resolve(requirements, max_rounds=max_rounds)
return _build_result(state)
| true | true |
1c3d5c3404bc6866d1ac21408009487b6f2e969d | 27,540 | py | Python | test/integration/run_gadgetron_test.py | roopchansinghv/gadgetron | fb6c56b643911152c27834a754a7b6ee2dd912da | [
"MIT"
] | 1 | 2022-02-22T21:06:36.000Z | 2022-02-22T21:06:36.000Z | test/integration/run_gadgetron_test.py | apd47/gadgetron | 073e84dabe77d2dae3b3dd9aa4bf9edbf1f890f2 | [
"MIT"
] | null | null | null | test/integration/run_gadgetron_test.py | apd47/gadgetron | 073e84dabe77d2dae3b3dd9aa4bf9edbf1f890f2 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import os
# Importing h5py on windows will mess with your environment. When we pass the messed up environment to gadgetron
# child processes, they won't load properly. We're saving our environment here to spare our children from the
# crimes of h5py.
environment = dict(os.environ)
import sys
import glob
import shutil
import argparse
import configparser
import re
import time
import functools
import json
import h5py
import numpy
import string
import ismrmrd
import pathlib
import tempfile
import itertools
import subprocess
import urllib.request
import urllib.error
default_config_values = {
"DEFAULT": {
'parameter_xml': 'IsmrmrdParameterMap_Siemens.xml',
'parameter_xsl': 'IsmrmrdParameterMap_Siemens.xsl',
'value_comparison_threshold': '0.01',
'scale_comparison_threshold': '0.01',
'node_port_base': '9050',
'dataset_group': 'dataset',
'reference_group': 'dataset',
'disable_image_header_test': 'false',
'disable_image_meta_test': 'false',
}
}
Passed = "Passed", 0
Failure = "Failure", 1
_codes = {
'red': '\033[91m',
'green': '\033[92m',
'cyan': '\033[96m',
'end': '\033[0m',
}
def _colors_disabled(text, color):
return text
def _colors_enabled(text, color):
return "{begin}{text}{end}".format(
begin=_codes.get(color),
text=text,
end=_codes.get('end'),
)
def enabled(option):
return option.lower() in ['true', 'yes', '1', 'enabled']
def report_test(*, color_handler, section, result, reason):
print("{section:<26} [{status}] ({reason})".format(
section=section,
status=color_handler("FAILURE", 'red') if result else color_handler("OK", 'green'),
reason=reason,
))
def siemens_to_ismrmrd(echo_handler, *, input, output, parameters, schema, measurement, flag=None):
command = ["siemens_to_ismrmrd", "-X",
"-f", input,
"-m", parameters,
"-x", schema,
"-o", output,
"-z", measurement] + ([flag] if flag else [])
echo_handler(command)
subprocess.run(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def send_data_to_gadgetron(echo_handler, gadgetron, *, input, output, configuration, group, log, additional_arguments):
print("Passing data to Gadgetron: {} -> {}".format(input, output))
command = ["gadgetron_ismrmrd_client",
"-a", gadgetron.host,
"-p", gadgetron.port,
"-f", input,
"-o", output,
"-G", group] + configuration
if additional_arguments:
command = command + additional_arguments.split()
echo_handler(command)
subprocess.run(command,
env=environment,
stdout=log,
stderr=log)
def wait_for_storage_server(port, proc, retries=20):
for i in range(retries):
try:
urllib.request.urlopen(f"http://localhost:{port}/healthcheck")
return
except (urllib.error.URLError, urllib.error.HTTPError) as e:
if i == retries - 1 or proc.poll() is not None:
raise RuntimeError("Unable to get a successful response from storage server.") from e
time.sleep(0.2)
def start_storage_server(*, log, port, storage_folder):
storage_server_environment = environment.copy()
storage_server_environment["MRD_STORAGE_SERVER_PORT"] = port
storage_server_environment["MRD_STORAGE_SERVER_STORAGE_CONNECTION_STRING"] = storage_folder
storage_server_environment["MRD_STORAGE_SERVER_DATABASE_CONNECTION_STRING"] = storage_folder + "/metadata.db"
retries = 5
for i in range(retries):
print("Starting MRD Storage Server on port", port)
proc = subprocess.Popen(["mrd-storage-server", "--require-parent-pid", str(os.getpid())],
stdout=log,
stderr=log,
env=storage_server_environment)
try:
wait_for_storage_server(port, proc)
return proc
except:
# If the process has exited, it might be because the
# port was in use. This can be because the previous storage server
# instance was just killed. So we try again.
if proc.poll() is not None and i < retries:
time.sleep(1)
else:
proc.kill()
raise
def start_gadgetron_instance(*, log, port, storage_address, env=environment):
print("Starting Gadgetron instance on port", port)
proc = subprocess.Popen(["gadgetron", "-p", port, "-E", storage_address],
stdout=log,
stderr=log,
env=env)
return proc
def validate_dataset(*, dataset_file, reference_file, dataset_group, reference_group):
try:
dataset_file = ismrmrd.File(dataset_file, 'r')
except OSError as e:
return Failure, "Failed to read dataset file '{}'".format(dataset_file)
try:
reference_file = ismrmrd.File(reference_file, 'r')
except OSError as e:
return Failure, "Failed to read reference file '{}'".format(reference_file)
header = dataset_file[dataset_group].header
ref_header = reference_file[reference_group].header
if not dataset_file[dataset_group].header == reference_file[reference_group].header:
import deepdiff
diff = deepdiff.diff.DeepDiff(header, ref_header)
print(diff.pretty())
return Failure, "Dataset header did not match reference header"
for attribute in ['acquisitions', 'waveforms', 'images']:
dataset = getattr(dataset_file[dataset_group], attribute) or []
reference = getattr(reference_file[reference_group], attribute) or []
if not list(dataset) == list(reference):
return Failure, "Dataset {attr} did not match reference {attr}".format(attr=attribute)
return None, "Dataset matched reference"
def validate_output(*, output_file, reference_file, output_group, reference_group, value_threshold, scale_threshold):
try:
# The errors produced by h5py are not entirely excellent. We spend some code here to clear them up a bit.
def get_group_data(file, group):
with h5py.File(file, mode='r') as f:
try:
group = group + '/data'
return numpy.squeeze(f[group])
except KeyError:
raise RuntimeError("Did not find group '{}' in file {}".format(group, file))
output_data = get_group_data(output_file, output_group)
reference_data = get_group_data(reference_file, reference_group)
except OSError as e:
return Failure, str(e)
except RuntimeError as e:
return Failure, str(e)
output = output_data[...].flatten().astype('float32')
reference = reference_data[...].flatten().astype('float32')
norm_diff = numpy.linalg.norm(output - reference) / numpy.linalg.norm(reference)
scale = numpy.dot(output, output) / numpy.dot(output, reference)
if value_threshold < norm_diff:
return Failure, "Comparing values, norm diff: {} (threshold: {})".format(norm_diff, value_threshold)
if value_threshold < abs(1 - scale):
return Failure, "Comparing image scales, ratio: {} ({}) (threshold: {})".format(scale, abs(1 - scale),
scale_threshold)
return None, "Norm: {:.1e} [{}] Scale: {:.1e} [{}]".format(norm_diff, value_threshold, abs(1 - scale),
scale_threshold)
def validate_image_header(*, output_file, reference_file, output_group, reference_group):
def equals():
return lambda out, ref: out == ref
def approx(threshold=1e-6):
return lambda out, ref: abs(out - ref) <= threshold
def ignore():
return lambda out, ref: True
def each(rule):
return lambda out, ref: all(rule(out, ref) for out, ref in itertools.zip_longest(out, ref))
header_rules = {
'version': equals(),
'data_type': equals(),
'flags': equals(),
'measurement_uid': equals(),
'matrix_size': each(equals()),
'field_of_view': each(approx()),
'channels': equals(),
'position': each(approx()),
'read_dir': each(approx()),
'phase_dir': each(approx()),
'slice_dir': each(approx()),
'patient_table_position': each(approx()),
'average': equals(),
'slice': equals(),
'contrast': equals(),
'phase': equals(),
'repetition': equals(),
'set': equals(),
'acquisition_time_stamp': ignore(),
'physiology_time_stamp': each(ignore()),
'image_type': equals(),
'image_index': equals(),
'image_series_index': ignore(),
'user_int': each(equals()),
'user_float': each(approx()),
'attribute_string_len': ignore()
}
def check_image_header(output, reference):
if not output:
raise RuntimeError("Missing output")
if not reference:
raise RuntimeError("Missing reference")
output = output.getHead()
reference = reference.getHead()
for attribute, rule in header_rules.items():
if not rule(getattr(output, attribute), getattr(reference, attribute)):
print(output)
print(reference)
raise RuntimeError(
"Image header '{}' does not match reference. [index {}, series {}]".format(
attribute,
output.image_index,
output.image_series_index
)
)
try:
with ismrmrd.File(output_file, 'r') as output_file:
with ismrmrd.File(reference_file, 'r') as reference_file:
output_images = output_file[output_group].images or []
reference_images = reference_file[reference_group].images or []
for output_image, reference_image in itertools.zip_longest(output_images, reference_images):
check_image_header(output_image, reference_image)
except OSError as e:
return Failure, str(e)
except RuntimeError as e:
return Failure, str(e)
return None, "Output headers matched reference"
def error_handlers(args, config):
def handle_subprocess_errors(cont, **state):
try:
return cont(**state)
except subprocess.CalledProcessError as e:
print("An error occurred in a subprocess with the following command:")
print(' '.join(e.cmd))
return Failure
yield handle_subprocess_errors
def clear_test_folder(args, config):
def clear_test_folder_action(cont, **state):
if os.path.exists(args.test_folder):
shutil.rmtree(args.test_folder)
os.makedirs(args.test_folder, exist_ok=True)
return cont(**state)
yield clear_test_folder_action
def ensure_storage_server(args, config):
class Storage:
def __init__(self, address):
self.address = address
if args.external:
return
def start_storage_server_action(cont, **state):
with open(os.path.join(args.test_folder, 'storage.log'), 'w') as log:
with tempfile.TemporaryDirectory() as storage_folder:
with start_storage_server(
log=log,
port=str(args.storage_port),
storage_folder=storage_folder
) as proc:
try:
return cont(storage=Storage("http://localhost:" + str(args.storage_port)), **state)
finally:
proc.kill()
yield start_storage_server_action
def start_additional_nodes(args, config):
if args.external:
return
if not config.has_section('distributed'):
return
def set_distributed_environment_action(cont, *, worker_list=[], env=dict(environment), **state):
if sys.platform.startswith('win32'):
env['GADGETRON_REMOTE_WORKER_COMMAND'] = 'cmd /k echo ' + json.dumps(worker_list) + ' & exit'
else:
env["GADGETRON_REMOTE_WORKER_COMMAND"] = "echo " + json.dumps(worker_list)
print("Setting env to", env["GADGETRON_REMOTE_WORKER_COMMAND"])
return cont(env=env, **state)
base_port = int(config['distributed']['node_port_base'])
number_of_nodes = int(config['distributed']['nodes'])
def create_worker_ports_action(ids, cont, **state):
print("Will start additional Gadgetron workers on ports:", *map(lambda idx: base_port + idx, ids))
return cont(**state)
def start_additional_worker_action(port, cont, *, storage, worker_list=[], **state):
with open(os.path.join(args.test_folder, 'gadgetron_worker' + port + '.log'), 'w') as log:
with start_gadgetron_instance(log=log, port=port, storage_address=storage.address) as instance:
try:
return cont(worker_list=worker_list + ['localhost:' + port], storage=storage, **state)
finally:
instance.kill()
yield functools.partial(create_worker_ports_action, range(number_of_nodes))
yield from (functools.partial(start_additional_worker_action, str(base_port + idx))
for idx in range(number_of_nodes))
yield set_distributed_environment_action
def ensure_gadgetron_instance(args, config):
class Gadgetron:
def __init__(self, *, host, port):
self.host = host
self.port = port
gadgetron = Gadgetron(host=str(args.host), port=str(args.port))
def start_gadgetron_action(cont, *, storage, env=environment, **state):
with open(os.path.join(args.test_folder, 'gadgetron.log'), 'w') as log:
with start_gadgetron_instance(log=log, port=gadgetron.port, storage_address=storage.address,
env=env) as instance:
try:
return cont(gadgetron=gadgetron, storage=storage, **state)
finally:
instance.kill()
def use_external_gadgetron_action(cont, **state):
return cont(gadgetron=gadgetron, **state)
if args.external:
yield use_external_gadgetron_action
else:
yield start_gadgetron_action
def copy_input_data(args, config, section):
destination_file = os.path.join(args.test_folder, section + '.copied.mrd')
def copy_input_action(cont, **state):
source_file = os.path.join(args.data_folder, config[section]['source'])
print("Copying prepared ISMRMRD data: {} -> {}".format(source_file, destination_file))
shutil.copyfile(source_file, destination_file)
state.update(client_input=destination_file)
return cont(**state)
yield copy_input_action
def convert_siemens_data(args, config, section):
destination_file = os.path.join(args.test_folder, section + '.converted.mrd')
def convert_siemens_data_action(cont, **state):
source_file = os.path.join(args.data_folder, config[section]['data_file'])
print("Converting Siemens data: {} (measurement {}) -> {}".format(source_file, config[section]['measurement'],
destination_file))
siemens_to_ismrmrd(args.echo_handler,
input=source_file,
output=destination_file,
parameters=config[section]['parameter_xml'],
schema=config[section]['parameter_xsl'],
measurement=config[section]['measurement'],
flag=config[section].get('data_conversion_flag', None))
state.update(client_input=destination_file)
return cont(**state)
yield convert_siemens_data_action
def run_gadgetron_client(args, config, section):
output_file = os.path.join(args.test_folder, section + '.output.mrd')
def prepare_config_action(cont, **state):
state.update(
group=config[section]['configuration'],
configuration=['-c', config[section]['configuration']],
)
return cont(**state)
def prepare_template_action(cont, **state):
template_file = os.path.join(args.template_folder, config[section]['template'])
configuration_file = os.path.join(args.test_folder, section + '.config.xml')
with open(template_file, 'r') as input:
with open(configuration_file, 'w') as output:
output.write(
string.Template(input.read()).substitute(
test_folder=os.path.abspath(args.test_folder),
# Expand substitution list as needed.
)
)
state.update(
group=section,
configuration=['-C', configuration_file],
)
return cont(**state)
def send_data_action(cont, *, gadgetron, client_input, configuration, group, processing_time=0, **state):
with open(os.path.join(args.test_folder, section + '.client.log'), 'w') as log:
start_time = time.time()
try:
additional_args = config[section]['additional_arguments']
except KeyError:
additional_args = None
send_data_to_gadgetron(args.echo_handler,
gadgetron,
input=client_input,
output=output_file,
configuration=configuration,
group=group,
log=log,
additional_arguments=additional_args)
end_time = time.time()
duration = end_time - start_time
print("Gadgetron processing time: {:.2f} s".format(duration))
state.update(
gadgetron=gadgetron,
client_input=client_input,
client_output=output_file,
configuration=configuration,
group=group,
processing_time=processing_time + duration
)
return cont(**state)
yield from (action for key, action in [('configuration', prepare_config_action),
('template', prepare_template_action)]
if key in config[section])
yield send_data_action
def validate_client_output(args, config, section):
reference_file = os.path.join(args.data_folder, config[section]['reference_file'])
def validate_output_action(cont, *, client_output, status=Passed, **state):
result, reason = validate_output(output_file=client_output,
reference_file=reference_file,
output_group=config[section]['output_images'],
reference_group=config[section]['reference_images'],
value_threshold=float(config[section]['value_comparison_threshold']),
scale_threshold=float(config[section]['scale_comparison_threshold']))
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
client_output=client_output,
status=status if result is None else Failure,
**state
)
def validate_meta(validator, cont, *, client_output, status=Passed, **state):
result, reason = validator(output_file=client_output,
reference_file=reference_file,
output_group=config[section]['output_images'],
reference_group=config[section]['reference_images'])
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
client_output=client_output,
status=status if result is None else Failure,
**state
)
yield validate_output_action
if not enabled(config[section]['disable_image_header_test']):
yield functools.partial(validate_meta, validate_image_header)
def validate_dataset_output(args, config, section):
def find_dataset_action(cont, status=Passed, **state):
dataset_prefix = os.path.join(args.test_folder, config[section]['dataset_prefix'])
dataset_files = glob.glob(dataset_prefix + "*")
rules = [(lambda files: len(files) == 0, "Found no dataset with prefix: {}".format(dataset_prefix)),
(lambda files: len(files) > 1, "Too many datasets with prefix: {}".format(dataset_prefix))]
def check_rules():
for test, reason in rules:
if test(dataset_files):
return Failure, reason, None
return None, "Found appropriate dataset", dataset_files[0]
result, reason, dataset_file = check_rules()
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
dataset_file=dataset_file if dataset_files else None,
status=status if result is None else Failure,
**state
)
def validate_dataset_action(cont, *, dataset_file, status=Passed, **state):
if not dataset_file:
return cont(status=status, **state)
reference_file = os.path.join(args.data_folder, config[section]['reference_file'])
result, reason = validate_dataset(dataset_file=dataset_file,
dataset_group=config[section]['dataset_group'],
reference_file=reference_file,
reference_group=config[section]['reference_group'])
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
status=status if result is None else Failure,
**state
)
yield find_dataset_action
yield validate_dataset_action
def prepare_sequence_actions(args, config):
action_factories = {
'copy': lambda section: copy_input_data(args, config, section),
'siemens': lambda section: convert_siemens_data(args, config, section),
'client': lambda section: run_gadgetron_client(args, config, section),
'equals': lambda section: validate_dataset_output(args, config, section),
'test': lambda section: validate_client_output(args, config, section),
}
pattern = re.compile(r"(?P<sequence_key>\w+)\.(?P<action_key>(copy)|(siemens)|(client)|(equals)|(test))(\.\w+)*")
def prepare_sequence_action(section):
m = re.match(pattern, section)
return action_factories.get(m['action_key'])(section)
for section in config.sections():
if re.match(pattern, section):
yield from prepare_sequence_action(section)
def output_stats(args, config):
def output_stats_action(cont, **state):
stats = {
'test': state.get('name'),
'processing_time': state.get('processing_time'),
'status': state.get('status')[0]
}
with open(os.path.join(args.test_folder, 'stats.json'), 'w') as f:
json.dump(stats, f)
return cont(**state)
yield output_stats_action
def build_actions(args, config):
yield from error_handlers(args, config)
yield from clear_test_folder(args, config)
yield from ensure_storage_server(args, config)
yield from start_additional_nodes(args, config)
yield from ensure_gadgetron_instance(args, config)
yield from prepare_sequence_actions(args, config)
yield from output_stats(args, config)
def chain_actions(actions):
try:
action = next(actions)
return lambda **state: action(chain_actions(actions), **state)
except StopIteration:
return lambda **state: state.get('status')
def main():
parser = argparse.ArgumentParser(description="Gadgetron Integration Test",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-G', '--gadgetron-home',
default=os.environ.get('GADGETRON_HOME'),
help="Gadgetron installation home")
parser.add_argument('-I', '--ismrmrd-home',
default=os.environ.get('ISMRMRD_HOME'),
help="ISMRMRD installation home")
parser.add_argument('-p', '--port', type=int, default=9003, help="Port used by Gadgetron")
parser.add_argument('-a', '--host', type=str, default="localhost", help="Address of (external) Gadgetron host")
parser.add_argument('-s', '--storage_port', type=int, default=9113, help="Port used by Gadgetron Storage Server")
parser.add_argument('-e', '--external', action='store_true', default=False,
help="External, do not start Gadgetron")
parser.add_argument('-c', '--template-folder',
type=str, default='config',
help="Look for test configuration templates in the specified folder")
parser.add_argument('-d', '--data-folder',
type=str, default='data',
help="Look for test data in the specified folder")
parser.add_argument('-t', '--test-folder',
type=str, default='test',
help="Save Gadgetron output and client logs to specified folder")
parser.add_argument('--force', action='store_true', default=False,
help="Do not query Gadgetron capabilities; just run the test.")
parser.add_argument('--disable-color', dest='color_handler', action='store_const',
const=_colors_disabled, default=_colors_enabled,
help="Disable colors in the test script output.")
parser.add_argument('--echo-commands', dest='echo_handler', action='store_const',
const=lambda cmd: print(' '.join(cmd)), default=lambda *_: None,
help="Echo the commands issued while running the test.")
parser.add_argument('test', help="Test case file", type=pathlib.Path)
args = parser.parse_args()
print("Running Gadgetron test {} with:".format(args.test))
print(" -- ISMRMRD_HOME : {}".format(args.ismrmrd_home))
print(" -- GADGETRON_HOME : {}".format(args.gadgetron_home))
print(" -- TEST CASE : {}".format(args.test))
config_parser = configparser.ConfigParser()
config_parser.read_dict(default_config_values)
config_parser.read(args.test)
action_chain = chain_actions(build_actions(args, config_parser))
result, return_code = action_chain(test=args.test, name=args.test.stem)
print("Test status: {}".format(args.color_handler(result, 'red' if return_code else 'green')))
return return_code
if __name__ == "__main__":
sys.exit(main())
| 37.418478 | 119 | 0.607444 |
import os
environment = dict(os.environ)
import sys
import glob
import shutil
import argparse
import configparser
import re
import time
import functools
import json
import h5py
import numpy
import string
import ismrmrd
import pathlib
import tempfile
import itertools
import subprocess
import urllib.request
import urllib.error
default_config_values = {
"DEFAULT": {
'parameter_xml': 'IsmrmrdParameterMap_Siemens.xml',
'parameter_xsl': 'IsmrmrdParameterMap_Siemens.xsl',
'value_comparison_threshold': '0.01',
'scale_comparison_threshold': '0.01',
'node_port_base': '9050',
'dataset_group': 'dataset',
'reference_group': 'dataset',
'disable_image_header_test': 'false',
'disable_image_meta_test': 'false',
}
}
Passed = "Passed", 0
Failure = "Failure", 1
_codes = {
'red': '\033[91m',
'green': '\033[92m',
'cyan': '\033[96m',
'end': '\033[0m',
}
def _colors_disabled(text, color):
return text
def _colors_enabled(text, color):
return "{begin}{text}{end}".format(
begin=_codes.get(color),
text=text,
end=_codes.get('end'),
)
def enabled(option):
return option.lower() in ['true', 'yes', '1', 'enabled']
def report_test(*, color_handler, section, result, reason):
print("{section:<26} [{status}] ({reason})".format(
section=section,
status=color_handler("FAILURE", 'red') if result else color_handler("OK", 'green'),
reason=reason,
))
def siemens_to_ismrmrd(echo_handler, *, input, output, parameters, schema, measurement, flag=None):
command = ["siemens_to_ismrmrd", "-X",
"-f", input,
"-m", parameters,
"-x", schema,
"-o", output,
"-z", measurement] + ([flag] if flag else [])
echo_handler(command)
subprocess.run(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def send_data_to_gadgetron(echo_handler, gadgetron, *, input, output, configuration, group, log, additional_arguments):
print("Passing data to Gadgetron: {} -> {}".format(input, output))
command = ["gadgetron_ismrmrd_client",
"-a", gadgetron.host,
"-p", gadgetron.port,
"-f", input,
"-o", output,
"-G", group] + configuration
if additional_arguments:
command = command + additional_arguments.split()
echo_handler(command)
subprocess.run(command,
env=environment,
stdout=log,
stderr=log)
def wait_for_storage_server(port, proc, retries=20):
for i in range(retries):
try:
urllib.request.urlopen(f"http://localhost:{port}/healthcheck")
return
except (urllib.error.URLError, urllib.error.HTTPError) as e:
if i == retries - 1 or proc.poll() is not None:
raise RuntimeError("Unable to get a successful response from storage server.") from e
time.sleep(0.2)
def start_storage_server(*, log, port, storage_folder):
storage_server_environment = environment.copy()
storage_server_environment["MRD_STORAGE_SERVER_PORT"] = port
storage_server_environment["MRD_STORAGE_SERVER_STORAGE_CONNECTION_STRING"] = storage_folder
storage_server_environment["MRD_STORAGE_SERVER_DATABASE_CONNECTION_STRING"] = storage_folder + "/metadata.db"
retries = 5
for i in range(retries):
print("Starting MRD Storage Server on port", port)
proc = subprocess.Popen(["mrd-storage-server", "--require-parent-pid", str(os.getpid())],
stdout=log,
stderr=log,
env=storage_server_environment)
try:
wait_for_storage_server(port, proc)
return proc
except:
if proc.poll() is not None and i < retries:
time.sleep(1)
else:
proc.kill()
raise
def start_gadgetron_instance(*, log, port, storage_address, env=environment):
print("Starting Gadgetron instance on port", port)
proc = subprocess.Popen(["gadgetron", "-p", port, "-E", storage_address],
stdout=log,
stderr=log,
env=env)
return proc
def validate_dataset(*, dataset_file, reference_file, dataset_group, reference_group):
try:
dataset_file = ismrmrd.File(dataset_file, 'r')
except OSError as e:
return Failure, "Failed to read dataset file '{}'".format(dataset_file)
try:
reference_file = ismrmrd.File(reference_file, 'r')
except OSError as e:
return Failure, "Failed to read reference file '{}'".format(reference_file)
header = dataset_file[dataset_group].header
ref_header = reference_file[reference_group].header
if not dataset_file[dataset_group].header == reference_file[reference_group].header:
import deepdiff
diff = deepdiff.diff.DeepDiff(header, ref_header)
print(diff.pretty())
return Failure, "Dataset header did not match reference header"
for attribute in ['acquisitions', 'waveforms', 'images']:
dataset = getattr(dataset_file[dataset_group], attribute) or []
reference = getattr(reference_file[reference_group], attribute) or []
if not list(dataset) == list(reference):
return Failure, "Dataset {attr} did not match reference {attr}".format(attr=attribute)
return None, "Dataset matched reference"
def validate_output(*, output_file, reference_file, output_group, reference_group, value_threshold, scale_threshold):
try:
def get_group_data(file, group):
with h5py.File(file, mode='r') as f:
try:
group = group + '/data'
return numpy.squeeze(f[group])
except KeyError:
raise RuntimeError("Did not find group '{}' in file {}".format(group, file))
output_data = get_group_data(output_file, output_group)
reference_data = get_group_data(reference_file, reference_group)
except OSError as e:
return Failure, str(e)
except RuntimeError as e:
return Failure, str(e)
output = output_data[...].flatten().astype('float32')
reference = reference_data[...].flatten().astype('float32')
norm_diff = numpy.linalg.norm(output - reference) / numpy.linalg.norm(reference)
scale = numpy.dot(output, output) / numpy.dot(output, reference)
if value_threshold < norm_diff:
return Failure, "Comparing values, norm diff: {} (threshold: {})".format(norm_diff, value_threshold)
if value_threshold < abs(1 - scale):
return Failure, "Comparing image scales, ratio: {} ({}) (threshold: {})".format(scale, abs(1 - scale),
scale_threshold)
return None, "Norm: {:.1e} [{}] Scale: {:.1e} [{}]".format(norm_diff, value_threshold, abs(1 - scale),
scale_threshold)
def validate_image_header(*, output_file, reference_file, output_group, reference_group):
def equals():
return lambda out, ref: out == ref
def approx(threshold=1e-6):
return lambda out, ref: abs(out - ref) <= threshold
def ignore():
return lambda out, ref: True
def each(rule):
return lambda out, ref: all(rule(out, ref) for out, ref in itertools.zip_longest(out, ref))
header_rules = {
'version': equals(),
'data_type': equals(),
'flags': equals(),
'measurement_uid': equals(),
'matrix_size': each(equals()),
'field_of_view': each(approx()),
'channels': equals(),
'position': each(approx()),
'read_dir': each(approx()),
'phase_dir': each(approx()),
'slice_dir': each(approx()),
'patient_table_position': each(approx()),
'average': equals(),
'slice': equals(),
'contrast': equals(),
'phase': equals(),
'repetition': equals(),
'set': equals(),
'acquisition_time_stamp': ignore(),
'physiology_time_stamp': each(ignore()),
'image_type': equals(),
'image_index': equals(),
'image_series_index': ignore(),
'user_int': each(equals()),
'user_float': each(approx()),
'attribute_string_len': ignore()
}
def check_image_header(output, reference):
if not output:
raise RuntimeError("Missing output")
if not reference:
raise RuntimeError("Missing reference")
output = output.getHead()
reference = reference.getHead()
for attribute, rule in header_rules.items():
if not rule(getattr(output, attribute), getattr(reference, attribute)):
print(output)
print(reference)
raise RuntimeError(
"Image header '{}' does not match reference. [index {}, series {}]".format(
attribute,
output.image_index,
output.image_series_index
)
)
try:
with ismrmrd.File(output_file, 'r') as output_file:
with ismrmrd.File(reference_file, 'r') as reference_file:
output_images = output_file[output_group].images or []
reference_images = reference_file[reference_group].images or []
for output_image, reference_image in itertools.zip_longest(output_images, reference_images):
check_image_header(output_image, reference_image)
except OSError as e:
return Failure, str(e)
except RuntimeError as e:
return Failure, str(e)
return None, "Output headers matched reference"
def error_handlers(args, config):
def handle_subprocess_errors(cont, **state):
try:
return cont(**state)
except subprocess.CalledProcessError as e:
print("An error occurred in a subprocess with the following command:")
print(' '.join(e.cmd))
return Failure
yield handle_subprocess_errors
def clear_test_folder(args, config):
def clear_test_folder_action(cont, **state):
if os.path.exists(args.test_folder):
shutil.rmtree(args.test_folder)
os.makedirs(args.test_folder, exist_ok=True)
return cont(**state)
yield clear_test_folder_action
def ensure_storage_server(args, config):
class Storage:
def __init__(self, address):
self.address = address
if args.external:
return
def start_storage_server_action(cont, **state):
with open(os.path.join(args.test_folder, 'storage.log'), 'w') as log:
with tempfile.TemporaryDirectory() as storage_folder:
with start_storage_server(
log=log,
port=str(args.storage_port),
storage_folder=storage_folder
) as proc:
try:
return cont(storage=Storage("http://localhost:" + str(args.storage_port)), **state)
finally:
proc.kill()
yield start_storage_server_action
def start_additional_nodes(args, config):
if args.external:
return
if not config.has_section('distributed'):
return
def set_distributed_environment_action(cont, *, worker_list=[], env=dict(environment), **state):
if sys.platform.startswith('win32'):
env['GADGETRON_REMOTE_WORKER_COMMAND'] = 'cmd /k echo ' + json.dumps(worker_list) + ' & exit'
else:
env["GADGETRON_REMOTE_WORKER_COMMAND"] = "echo " + json.dumps(worker_list)
print("Setting env to", env["GADGETRON_REMOTE_WORKER_COMMAND"])
return cont(env=env, **state)
base_port = int(config['distributed']['node_port_base'])
number_of_nodes = int(config['distributed']['nodes'])
def create_worker_ports_action(ids, cont, **state):
print("Will start additional Gadgetron workers on ports:", *map(lambda idx: base_port + idx, ids))
return cont(**state)
def start_additional_worker_action(port, cont, *, storage, worker_list=[], **state):
with open(os.path.join(args.test_folder, 'gadgetron_worker' + port + '.log'), 'w') as log:
with start_gadgetron_instance(log=log, port=port, storage_address=storage.address) as instance:
try:
return cont(worker_list=worker_list + ['localhost:' + port], storage=storage, **state)
finally:
instance.kill()
yield functools.partial(create_worker_ports_action, range(number_of_nodes))
yield from (functools.partial(start_additional_worker_action, str(base_port + idx))
for idx in range(number_of_nodes))
yield set_distributed_environment_action
def ensure_gadgetron_instance(args, config):
class Gadgetron:
def __init__(self, *, host, port):
self.host = host
self.port = port
gadgetron = Gadgetron(host=str(args.host), port=str(args.port))
def start_gadgetron_action(cont, *, storage, env=environment, **state):
with open(os.path.join(args.test_folder, 'gadgetron.log'), 'w') as log:
with start_gadgetron_instance(log=log, port=gadgetron.port, storage_address=storage.address,
env=env) as instance:
try:
return cont(gadgetron=gadgetron, storage=storage, **state)
finally:
instance.kill()
def use_external_gadgetron_action(cont, **state):
return cont(gadgetron=gadgetron, **state)
if args.external:
yield use_external_gadgetron_action
else:
yield start_gadgetron_action
def copy_input_data(args, config, section):
destination_file = os.path.join(args.test_folder, section + '.copied.mrd')
def copy_input_action(cont, **state):
source_file = os.path.join(args.data_folder, config[section]['source'])
print("Copying prepared ISMRMRD data: {} -> {}".format(source_file, destination_file))
shutil.copyfile(source_file, destination_file)
state.update(client_input=destination_file)
return cont(**state)
yield copy_input_action
def convert_siemens_data(args, config, section):
destination_file = os.path.join(args.test_folder, section + '.converted.mrd')
def convert_siemens_data_action(cont, **state):
source_file = os.path.join(args.data_folder, config[section]['data_file'])
print("Converting Siemens data: {} (measurement {}) -> {}".format(source_file, config[section]['measurement'],
destination_file))
siemens_to_ismrmrd(args.echo_handler,
input=source_file,
output=destination_file,
parameters=config[section]['parameter_xml'],
schema=config[section]['parameter_xsl'],
measurement=config[section]['measurement'],
flag=config[section].get('data_conversion_flag', None))
state.update(client_input=destination_file)
return cont(**state)
yield convert_siemens_data_action
def run_gadgetron_client(args, config, section):
output_file = os.path.join(args.test_folder, section + '.output.mrd')
def prepare_config_action(cont, **state):
state.update(
group=config[section]['configuration'],
configuration=['-c', config[section]['configuration']],
)
return cont(**state)
def prepare_template_action(cont, **state):
template_file = os.path.join(args.template_folder, config[section]['template'])
configuration_file = os.path.join(args.test_folder, section + '.config.xml')
with open(template_file, 'r') as input:
with open(configuration_file, 'w') as output:
output.write(
string.Template(input.read()).substitute(
test_folder=os.path.abspath(args.test_folder),
)
)
state.update(
group=section,
configuration=['-C', configuration_file],
)
return cont(**state)
def send_data_action(cont, *, gadgetron, client_input, configuration, group, processing_time=0, **state):
with open(os.path.join(args.test_folder, section + '.client.log'), 'w') as log:
start_time = time.time()
try:
additional_args = config[section]['additional_arguments']
except KeyError:
additional_args = None
send_data_to_gadgetron(args.echo_handler,
gadgetron,
input=client_input,
output=output_file,
configuration=configuration,
group=group,
log=log,
additional_arguments=additional_args)
end_time = time.time()
duration = end_time - start_time
print("Gadgetron processing time: {:.2f} s".format(duration))
state.update(
gadgetron=gadgetron,
client_input=client_input,
client_output=output_file,
configuration=configuration,
group=group,
processing_time=processing_time + duration
)
return cont(**state)
yield from (action for key, action in [('configuration', prepare_config_action),
('template', prepare_template_action)]
if key in config[section])
yield send_data_action
def validate_client_output(args, config, section):
reference_file = os.path.join(args.data_folder, config[section]['reference_file'])
def validate_output_action(cont, *, client_output, status=Passed, **state):
result, reason = validate_output(output_file=client_output,
reference_file=reference_file,
output_group=config[section]['output_images'],
reference_group=config[section]['reference_images'],
value_threshold=float(config[section]['value_comparison_threshold']),
scale_threshold=float(config[section]['scale_comparison_threshold']))
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
client_output=client_output,
status=status if result is None else Failure,
**state
)
def validate_meta(validator, cont, *, client_output, status=Passed, **state):
result, reason = validator(output_file=client_output,
reference_file=reference_file,
output_group=config[section]['output_images'],
reference_group=config[section]['reference_images'])
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
client_output=client_output,
status=status if result is None else Failure,
**state
)
yield validate_output_action
if not enabled(config[section]['disable_image_header_test']):
yield functools.partial(validate_meta, validate_image_header)
def validate_dataset_output(args, config, section):
def find_dataset_action(cont, status=Passed, **state):
dataset_prefix = os.path.join(args.test_folder, config[section]['dataset_prefix'])
dataset_files = glob.glob(dataset_prefix + "*")
rules = [(lambda files: len(files) == 0, "Found no dataset with prefix: {}".format(dataset_prefix)),
(lambda files: len(files) > 1, "Too many datasets with prefix: {}".format(dataset_prefix))]
def check_rules():
for test, reason in rules:
if test(dataset_files):
return Failure, reason, None
return None, "Found appropriate dataset", dataset_files[0]
result, reason, dataset_file = check_rules()
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
dataset_file=dataset_file if dataset_files else None,
status=status if result is None else Failure,
**state
)
def validate_dataset_action(cont, *, dataset_file, status=Passed, **state):
if not dataset_file:
return cont(status=status, **state)
reference_file = os.path.join(args.data_folder, config[section]['reference_file'])
result, reason = validate_dataset(dataset_file=dataset_file,
dataset_group=config[section]['dataset_group'],
reference_file=reference_file,
reference_group=config[section]['reference_group'])
report_test(color_handler=args.color_handler, section=section, result=result, reason=reason)
return cont(
status=status if result is None else Failure,
**state
)
yield find_dataset_action
yield validate_dataset_action
def prepare_sequence_actions(args, config):
action_factories = {
'copy': lambda section: copy_input_data(args, config, section),
'siemens': lambda section: convert_siemens_data(args, config, section),
'client': lambda section: run_gadgetron_client(args, config, section),
'equals': lambda section: validate_dataset_output(args, config, section),
'test': lambda section: validate_client_output(args, config, section),
}
pattern = re.compile(r"(?P<sequence_key>\w+)\.(?P<action_key>(copy)|(siemens)|(client)|(equals)|(test))(\.\w+)*")
def prepare_sequence_action(section):
m = re.match(pattern, section)
return action_factories.get(m['action_key'])(section)
for section in config.sections():
if re.match(pattern, section):
yield from prepare_sequence_action(section)
def output_stats(args, config):
def output_stats_action(cont, **state):
stats = {
'test': state.get('name'),
'processing_time': state.get('processing_time'),
'status': state.get('status')[0]
}
with open(os.path.join(args.test_folder, 'stats.json'), 'w') as f:
json.dump(stats, f)
return cont(**state)
yield output_stats_action
def build_actions(args, config):
yield from error_handlers(args, config)
yield from clear_test_folder(args, config)
yield from ensure_storage_server(args, config)
yield from start_additional_nodes(args, config)
yield from ensure_gadgetron_instance(args, config)
yield from prepare_sequence_actions(args, config)
yield from output_stats(args, config)
def chain_actions(actions):
try:
action = next(actions)
return lambda **state: action(chain_actions(actions), **state)
except StopIteration:
return lambda **state: state.get('status')
def main():
parser = argparse.ArgumentParser(description="Gadgetron Integration Test",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-G', '--gadgetron-home',
default=os.environ.get('GADGETRON_HOME'),
help="Gadgetron installation home")
parser.add_argument('-I', '--ismrmrd-home',
default=os.environ.get('ISMRMRD_HOME'),
help="ISMRMRD installation home")
parser.add_argument('-p', '--port', type=int, default=9003, help="Port used by Gadgetron")
parser.add_argument('-a', '--host', type=str, default="localhost", help="Address of (external) Gadgetron host")
parser.add_argument('-s', '--storage_port', type=int, default=9113, help="Port used by Gadgetron Storage Server")
parser.add_argument('-e', '--external', action='store_true', default=False,
help="External, do not start Gadgetron")
parser.add_argument('-c', '--template-folder',
type=str, default='config',
help="Look for test configuration templates in the specified folder")
parser.add_argument('-d', '--data-folder',
type=str, default='data',
help="Look for test data in the specified folder")
parser.add_argument('-t', '--test-folder',
type=str, default='test',
help="Save Gadgetron output and client logs to specified folder")
parser.add_argument('--force', action='store_true', default=False,
help="Do not query Gadgetron capabilities; just run the test.")
parser.add_argument('--disable-color', dest='color_handler', action='store_const',
const=_colors_disabled, default=_colors_enabled,
help="Disable colors in the test script output.")
parser.add_argument('--echo-commands', dest='echo_handler', action='store_const',
const=lambda cmd: print(' '.join(cmd)), default=lambda *_: None,
help="Echo the commands issued while running the test.")
parser.add_argument('test', help="Test case file", type=pathlib.Path)
args = parser.parse_args()
print("Running Gadgetron test {} with:".format(args.test))
print(" -- ISMRMRD_HOME : {}".format(args.ismrmrd_home))
print(" -- GADGETRON_HOME : {}".format(args.gadgetron_home))
print(" -- TEST CASE : {}".format(args.test))
config_parser = configparser.ConfigParser()
config_parser.read_dict(default_config_values)
config_parser.read(args.test)
action_chain = chain_actions(build_actions(args, config_parser))
result, return_code = action_chain(test=args.test, name=args.test.stem)
print("Test status: {}".format(args.color_handler(result, 'red' if return_code else 'green')))
return return_code
if __name__ == "__main__":
sys.exit(main())
| true | true |
1c3d5ce8337d153b0bd294bc07abaa6df5186e88 | 9,570 | py | Python | braintree/customer.py | whitemike889/braintree_python | fc98c738f9e74736a7d1e82cfb4e37f6e493c3c4 | [
"MIT"
] | 1 | 2020-06-14T20:47:02.000Z | 2020-06-14T20:47:02.000Z | braintree/customer.py | whitemike889/braintree_python | fc98c738f9e74736a7d1e82cfb4e37f6e493c3c4 | [
"MIT"
] | null | null | null | braintree/customer.py | whitemike889/braintree_python | fc98c738f9e74736a7d1e82cfb4e37f6e493c3c4 | [
"MIT"
] | null | null | null | import warnings
from braintree.util.http import Http
from braintree.successful_result import SuccessfulResult
from braintree.error_result import ErrorResult
from braintree.resource import Resource
from braintree.apple_pay_card import ApplePayCard
from braintree.android_pay_card import AndroidPayCard
from braintree.amex_express_checkout_card import AmexExpressCheckoutCard
from braintree.credit_card import CreditCard
from braintree.paypal_account import PayPalAccount
from braintree.europe_bank_account import EuropeBankAccount
from braintree.us_bank_account import UsBankAccount
from braintree.venmo_account import VenmoAccount
from braintree.visa_checkout_card import VisaCheckoutCard
from braintree.masterpass_card import MasterpassCard
from braintree.address import Address
from braintree.configuration import Configuration
from braintree.ids_search import IdsSearch
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource_collection import ResourceCollection
from braintree.samsung_pay_card import SamsungPayCard
class Customer(Resource):
"""
A class representing a customer.
An example of creating an customer with all available fields::
result = braintree.Customer.create({
"id": "my_customer_id",
"company": "Some company",
"email": "john.doe@example.com",
"fax": "123-555-1212",
"first_name": "John",
"last_name": "Doe",
"phone": "123-555-1221",
"website": "http://www.example.com",
"credit_card": {
"cardholder_name": "John Doe",
"cvv": "123",
"expiration_date": "12/2012",
"number": "4111111111111111",
"token": "my_token",
"billing_address": {
"first_name": "John",
"last_name": "Doe",
"company": "Braintree",
"street_address": "111 First Street",
"extended_address": "Unit 1",
"locality": "Chicago",
"postal_code": "60606",
"region": "IL",
"country_name": "United States of America"
},
"options": {
"verify_card": True,
"verification_amount": "2.00"
}
},
"custom_fields": {
"my_key": "some value"
}
})
print(result.customer.id)
print(result.customer.first_name)
For more information on Customers, see https://developers.braintreepayments.com/reference/request/customer/create/python
"""
def __repr__(self):
detail_list = [
"id",
"graphql_id",
"company",
"created_at",
"email",
"fax",
"first_name",
"last_name",
"merchant_id",
"phone",
"updated_at",
"website",
]
return super(Customer, self).__repr__(detail_list)
@staticmethod
def all():
""" Return a collection of all customers. """
return Configuration.gateway().customer.all()
@staticmethod
def create(params=None):
"""
Create a Customer
No field is required::
result = braintree.Customer.create({
"company": "Some company",
"first_name": "John"
})
"""
if params is None:
params = {}
return Configuration.gateway().customer.create(params)
@staticmethod
def delete(customer_id):
"""
Delete a customer
Given a customer_id::
result = braintree.Customer.delete("my_customer_id")
"""
return Configuration.gateway().customer.delete(customer_id)
@staticmethod
def find(customer_id, association_filter_id=None):
"""
Find an customer, given a customer_id. This does not return a result
object. This will raise a :class:`NotFoundError <braintree.exceptions.not_found_error.NotFoundError>` if the provided customer_id
is not found. ::
customer = braintree.Customer.find("my_customer_id")
"""
return Configuration.gateway().customer.find(customer_id, association_filter_id)
@staticmethod
def search(*query):
return Configuration.gateway().customer.search(*query)
@staticmethod
def update(customer_id, params=None):
"""
Update an existing Customer
By customer_id. The params are similar to create::
result = braintree.Customer.update("my_customer_id", {
"last_name": "Smith"
})
"""
if params is None:
params = {}
return Configuration.gateway().customer.update(customer_id, params)
@staticmethod
def create_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce",
{"risk_data": ["customer_browser", "customer_ip"]},
{"credit_card": CreditCard.create_signature()},
{"custom_fields": ["__any_key__"]},
{"three_d_secure_pass_thru": [
"cavv",
"ds_transaction_id",
"eci_flag",
"three_d_secure_version",
"xid",
]},
{"options": [{"paypal": [
"payee_email",
"order_id",
"custom_field",
"description",
"amount",
{ "shipping": Address.create_signature() }
]}]},
]
@staticmethod
def update_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce", "default_payment_method_token",
{"credit_card": CreditCard.signature("update_via_customer")},
{"three_d_secure_pass_thru": [
"cavv",
"ds_transaction_id",
"eci_flag",
"three_d_secure_version",
"xid",
]},
{"custom_fields": ["__any_key__"]},
{"options": [{"paypal": [
"payee_email",
"order_id",
"custom_field",
"description",
"amount",
{ "shipping": Address.create_signature() }
]}]},
]
def __init__(self, gateway, attributes):
Resource.__init__(self, gateway, attributes)
self.payment_methods = []
if "credit_cards" in attributes:
self.credit_cards = [CreditCard(gateway, credit_card) for credit_card in self.credit_cards]
self.payment_methods += self.credit_cards
if "addresses" in attributes:
self.addresses = [Address(gateway, address) for address in self.addresses]
if "paypal_accounts" in attributes:
self.paypal_accounts = [PayPalAccount(gateway, paypal_account) for paypal_account in self.paypal_accounts]
self.payment_methods += self.paypal_accounts
if "apple_pay_cards" in attributes:
self.apple_pay_cards = [ApplePayCard(gateway, apple_pay_card) for apple_pay_card in self.apple_pay_cards]
self.payment_methods += self.apple_pay_cards
if "android_pay_cards" in attributes:
self.android_pay_cards = [AndroidPayCard(gateway, android_pay_card) for android_pay_card in self.android_pay_cards]
self.payment_methods += self.android_pay_cards
if "amex_express_checkout_cards" in attributes:
self.amex_express_checkout_cards = [AmexExpressCheckoutCard(gateway, amex_express_checkout_card) for amex_express_checkout_card in self.amex_express_checkout_cards]
self.payment_methods += self.amex_express_checkout_cards
if "europe_bank_accounts" in attributes:
self.europe_bank_accounts = [EuropeBankAccount(gateway, europe_bank_account) for europe_bank_account in self.europe_bank_accounts]
self.payment_methods += self.europe_bank_accounts
if "venmo_accounts" in attributes:
self.venmo_accounts = [VenmoAccount(gateway, venmo_account) for venmo_account in self.venmo_accounts]
self.payment_methods += self.venmo_accounts
if "us_bank_accounts" in attributes:
self.us_bank_accounts = [UsBankAccount(gateway, us_bank_account) for us_bank_account in self.us_bank_accounts]
self.payment_methods += self.us_bank_accounts
if "visa_checkout_cards" in attributes:
self.visa_checkout_cards = [VisaCheckoutCard(gateway, visa_checkout_card) for visa_checkout_card in self.visa_checkout_cards]
self.payment_methods += self.visa_checkout_cards
if "masterpass_cards" in attributes:
self.masterpass_cards = [MasterpassCard(gateway, masterpass_card) for masterpass_card in self.masterpass_cards]
self.payment_methods += self.masterpass_cards
if "samsung_pay_cards" in attributes:
self.samsung_pay_cards = [SamsungPayCard(gateway, samsung_pay_card) for samsung_pay_card in self.samsung_pay_cards]
self.payment_methods += self.samsung_pay_cards
| 37.677165 | 204 | 0.611285 | import warnings
from braintree.util.http import Http
from braintree.successful_result import SuccessfulResult
from braintree.error_result import ErrorResult
from braintree.resource import Resource
from braintree.apple_pay_card import ApplePayCard
from braintree.android_pay_card import AndroidPayCard
from braintree.amex_express_checkout_card import AmexExpressCheckoutCard
from braintree.credit_card import CreditCard
from braintree.paypal_account import PayPalAccount
from braintree.europe_bank_account import EuropeBankAccount
from braintree.us_bank_account import UsBankAccount
from braintree.venmo_account import VenmoAccount
from braintree.visa_checkout_card import VisaCheckoutCard
from braintree.masterpass_card import MasterpassCard
from braintree.address import Address
from braintree.configuration import Configuration
from braintree.ids_search import IdsSearch
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource_collection import ResourceCollection
from braintree.samsung_pay_card import SamsungPayCard
class Customer(Resource):
def __repr__(self):
detail_list = [
"id",
"graphql_id",
"company",
"created_at",
"email",
"fax",
"first_name",
"last_name",
"merchant_id",
"phone",
"updated_at",
"website",
]
return super(Customer, self).__repr__(detail_list)
@staticmethod
def all():
return Configuration.gateway().customer.all()
@staticmethod
def create(params=None):
if params is None:
params = {}
return Configuration.gateway().customer.create(params)
@staticmethod
def delete(customer_id):
return Configuration.gateway().customer.delete(customer_id)
@staticmethod
def find(customer_id, association_filter_id=None):
return Configuration.gateway().customer.find(customer_id, association_filter_id)
@staticmethod
def search(*query):
return Configuration.gateway().customer.search(*query)
@staticmethod
def update(customer_id, params=None):
if params is None:
params = {}
return Configuration.gateway().customer.update(customer_id, params)
@staticmethod
def create_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce",
{"risk_data": ["customer_browser", "customer_ip"]},
{"credit_card": CreditCard.create_signature()},
{"custom_fields": ["__any_key__"]},
{"three_d_secure_pass_thru": [
"cavv",
"ds_transaction_id",
"eci_flag",
"three_d_secure_version",
"xid",
]},
{"options": [{"paypal": [
"payee_email",
"order_id",
"custom_field",
"description",
"amount",
{ "shipping": Address.create_signature() }
]}]},
]
@staticmethod
def update_signature():
return [
"company", "email", "fax", "first_name", "id", "last_name", "phone", "website", "device_data", "device_session_id", "fraud_merchant_id", "payment_method_nonce", "default_payment_method_token",
{"credit_card": CreditCard.signature("update_via_customer")},
{"three_d_secure_pass_thru": [
"cavv",
"ds_transaction_id",
"eci_flag",
"three_d_secure_version",
"xid",
]},
{"custom_fields": ["__any_key__"]},
{"options": [{"paypal": [
"payee_email",
"order_id",
"custom_field",
"description",
"amount",
{ "shipping": Address.create_signature() }
]}]},
]
def __init__(self, gateway, attributes):
Resource.__init__(self, gateway, attributes)
self.payment_methods = []
if "credit_cards" in attributes:
self.credit_cards = [CreditCard(gateway, credit_card) for credit_card in self.credit_cards]
self.payment_methods += self.credit_cards
if "addresses" in attributes:
self.addresses = [Address(gateway, address) for address in self.addresses]
if "paypal_accounts" in attributes:
self.paypal_accounts = [PayPalAccount(gateway, paypal_account) for paypal_account in self.paypal_accounts]
self.payment_methods += self.paypal_accounts
if "apple_pay_cards" in attributes:
self.apple_pay_cards = [ApplePayCard(gateway, apple_pay_card) for apple_pay_card in self.apple_pay_cards]
self.payment_methods += self.apple_pay_cards
if "android_pay_cards" in attributes:
self.android_pay_cards = [AndroidPayCard(gateway, android_pay_card) for android_pay_card in self.android_pay_cards]
self.payment_methods += self.android_pay_cards
if "amex_express_checkout_cards" in attributes:
self.amex_express_checkout_cards = [AmexExpressCheckoutCard(gateway, amex_express_checkout_card) for amex_express_checkout_card in self.amex_express_checkout_cards]
self.payment_methods += self.amex_express_checkout_cards
if "europe_bank_accounts" in attributes:
self.europe_bank_accounts = [EuropeBankAccount(gateway, europe_bank_account) for europe_bank_account in self.europe_bank_accounts]
self.payment_methods += self.europe_bank_accounts
if "venmo_accounts" in attributes:
self.venmo_accounts = [VenmoAccount(gateway, venmo_account) for venmo_account in self.venmo_accounts]
self.payment_methods += self.venmo_accounts
if "us_bank_accounts" in attributes:
self.us_bank_accounts = [UsBankAccount(gateway, us_bank_account) for us_bank_account in self.us_bank_accounts]
self.payment_methods += self.us_bank_accounts
if "visa_checkout_cards" in attributes:
self.visa_checkout_cards = [VisaCheckoutCard(gateway, visa_checkout_card) for visa_checkout_card in self.visa_checkout_cards]
self.payment_methods += self.visa_checkout_cards
if "masterpass_cards" in attributes:
self.masterpass_cards = [MasterpassCard(gateway, masterpass_card) for masterpass_card in self.masterpass_cards]
self.payment_methods += self.masterpass_cards
if "samsung_pay_cards" in attributes:
self.samsung_pay_cards = [SamsungPayCard(gateway, samsung_pay_card) for samsung_pay_card in self.samsung_pay_cards]
self.payment_methods += self.samsung_pay_cards
| true | true |
1c3d5d91e7c478f3d324f0825e4b7d9274a80813 | 6,582 | py | Python | tests/attacks/test_pixel_attack.py | monshri/adversarial-robustness-toolbox | 6465240cb6a71bc376dae52459a7133e403df8d2 | [
"MIT"
] | 1,350 | 2020-07-14T08:06:55.000Z | 2022-03-31T19:22:25.000Z | tests/attacks/test_pixel_attack.py | monshri/adversarial-robustness-toolbox | 6465240cb6a71bc376dae52459a7133e403df8d2 | [
"MIT"
] | 936 | 2020-07-14T03:33:00.000Z | 2022-03-31T23:05:29.000Z | tests/attacks/test_pixel_attack.py | monshri/adversarial-robustness-toolbox | 6465240cb6a71bc376dae52459a7133e403df8d2 | [
"MIT"
] | 413 | 2020-07-16T16:00:16.000Z | 2022-03-29T10:31:12.000Z | # MIT License
#
# Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2020
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This module tests the Pixel Attack.
The Pixel Attack is a generalisation of One Pixel Attack.
| One Pixel Attack Paper link:
https://ieeexplore.ieee.org/abstract/document/8601309/citations#citations
(arXiv link: https://arxiv.org/pdf/1710.08864.pdf)
| Pixel Attack Paper link:
https://arxiv.org/abs/1906.06026
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import unittest
import numpy as np
from art.attacks.evasion.pixel_threshold import PixelAttack
from art.estimators.estimator import BaseEstimator, NeuralNetworkMixin
from art.estimators.classification.classifier import ClassifierMixin
from tests.utils import TestBase
from tests.utils import get_image_classifier_tf, get_image_classifier_pt # , get_image_classifier_kr
from tests.attacks.utils import backend_test_classifier_type_check_fail
logger = logging.getLogger(__name__)
class TestPixelAttack(TestBase):
"""
A unittest class for testing the Pixel Attack.
This module tests the Pixel Attack.
The Pixel Attack is a generalisation of One Pixel Attack.
| One Pixel Attack Paper link:
https://ieeexplore.ieee.org/abstract/document/8601309/citations#citations
(arXiv link: https://arxiv.org/pdf/1710.08864.pdf)
| Pixel Attack Paper link:
https://arxiv.org/abs/1906.06026
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.n_test = 2
cls.x_test_mnist = cls.x_test_mnist[0 : cls.n_test]
cls.y_test_mnist = cls.y_test_mnist[0 : cls.n_test]
# def test_6_keras_mnist(self):
# """
# Test with the KerasClassifier. (Untargeted Attack)
# :return:
# """
#
# classifier = get_image_classifier_kr()
# self._test_attack(classifier, self.x_test_mnist, self.y_test_mnist, False)
# def test_2_tensorflow_mnist(self):
# """
# Test with the TensorFlowClassifier. (Untargeted Attack)
# :return:
# """
# classifier, sess = get_image_classifier_tf()
# self._test_attack(classifier, self.x_test_mnist, self.y_test_mnist, False)
def test_4_pytorch_mnist(self):
"""
Test with the PyTorchClassifier. (Untargeted Attack)
:return:
"""
x_test = np.reshape(self.x_test_mnist, (self.x_test_mnist.shape[0], 1, 28, 28)).astype(np.float32)
classifier = get_image_classifier_pt()
self._test_attack(classifier, x_test, self.y_test_mnist, False)
# def test_7_keras_mnist_targeted(self):
# """
# Test with the KerasClassifier. (Targeted Attack)
# :return:
# """
# classifier = get_image_classifier_kr()
# self._test_attack(classifier, self.x_test_mnist, self.y_test_mnist, True)
def test_3_tensorflow_mnist_targeted(self):
"""
Test with the TensorFlowClassifier. (Targeted Attack)
:return:
"""
classifier, sess = get_image_classifier_tf()
self._test_attack(classifier, self.x_test_mnist, self.y_test_mnist, True)
# def test_5_pytorch_mnist_targeted(self):
# """
# Test with the PyTorchClassifier. (Targeted Attack)
# :return:
# """
# x_test = np.reshape(self.x_test_mnist, (self.x_test_mnist.shape[0], 1, 28, 28)).astype(np.float32)
# classifier = get_image_classifier_pt()
# self._test_attack(classifier, x_test, self.y_test_mnist, True)
def _test_attack(self, classifier, x_test, y_test, targeted):
"""
Test with the Pixel Attack
:return:
"""
x_test_original = x_test.copy()
if targeted:
# Generate random target classes
class_y_test = np.argmax(y_test, axis=1)
nb_classes = np.unique(class_y_test).shape[0]
targets = np.random.randint(nb_classes, size=self.n_test)
for i in range(self.n_test):
if class_y_test[i] == targets[i]:
targets[i] -= 1
else:
targets = y_test
for th in [None, 128]:
for es in [0, 1]:
df = PixelAttack(classifier, th=th, es=es, max_iter=20, targeted=targeted, verbose=False)
x_test_adv = df.generate(x_test_original, targets)
np.testing.assert_raises(AssertionError, np.testing.assert_array_equal, x_test, x_test_adv)
self.assertFalse((0.0 == x_test_adv).all())
# Check that x_test has not been modified by attack and classifier
self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001)
def test_check_params(self):
ptc = get_image_classifier_pt(from_logits=True)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, th=-1)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, es=1.0)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, targeted="true")
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, verbose="true")
with self.assertRaises(ValueError):
ptc._clip_values = None
_ = PixelAttack(ptc)
def test_1_classifier_type_check_fail(self):
backend_test_classifier_type_check_fail(PixelAttack, [BaseEstimator, NeuralNetworkMixin, ClassifierMixin])
if __name__ == "__main__":
unittest.main()
| 37.827586 | 120 | 0.679277 |
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import unittest
import numpy as np
from art.attacks.evasion.pixel_threshold import PixelAttack
from art.estimators.estimator import BaseEstimator, NeuralNetworkMixin
from art.estimators.classification.classifier import ClassifierMixin
from tests.utils import TestBase
from tests.utils import get_image_classifier_tf, get_image_classifier_pt
from tests.attacks.utils import backend_test_classifier_type_check_fail
logger = logging.getLogger(__name__)
class TestPixelAttack(TestBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.n_test = 2
cls.x_test_mnist = cls.x_test_mnist[0 : cls.n_test]
cls.y_test_mnist = cls.y_test_mnist[0 : cls.n_test]
# Test with the KerasClassifier. (Untargeted Attack)
# :return:
# """
# Test with the TensorFlowClassifier. (Untargeted Attack)
# :return:
# """
def test_4_pytorch_mnist(self):
x_test = np.reshape(self.x_test_mnist, (self.x_test_mnist.shape[0], 1, 28, 28)).astype(np.float32)
classifier = get_image_classifier_pt()
self._test_attack(classifier, x_test, self.y_test_mnist, False)
# Test with the KerasClassifier. (Targeted Attack)
# :return:
# """
def test_3_tensorflow_mnist_targeted(self):
classifier, sess = get_image_classifier_tf()
self._test_attack(classifier, self.x_test_mnist, self.y_test_mnist, True)
# Test with the PyTorchClassifier. (Targeted Attack)
# :return:
# """
def _test_attack(self, classifier, x_test, y_test, targeted):
x_test_original = x_test.copy()
if targeted:
class_y_test = np.argmax(y_test, axis=1)
nb_classes = np.unique(class_y_test).shape[0]
targets = np.random.randint(nb_classes, size=self.n_test)
for i in range(self.n_test):
if class_y_test[i] == targets[i]:
targets[i] -= 1
else:
targets = y_test
for th in [None, 128]:
for es in [0, 1]:
df = PixelAttack(classifier, th=th, es=es, max_iter=20, targeted=targeted, verbose=False)
x_test_adv = df.generate(x_test_original, targets)
np.testing.assert_raises(AssertionError, np.testing.assert_array_equal, x_test, x_test_adv)
self.assertFalse((0.0 == x_test_adv).all())
self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001)
def test_check_params(self):
ptc = get_image_classifier_pt(from_logits=True)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, th=-1)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, es=1.0)
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, targeted="true")
with self.assertRaises(ValueError):
_ = PixelAttack(ptc, verbose="true")
with self.assertRaises(ValueError):
ptc._clip_values = None
_ = PixelAttack(ptc)
def test_1_classifier_type_check_fail(self):
backend_test_classifier_type_check_fail(PixelAttack, [BaseEstimator, NeuralNetworkMixin, ClassifierMixin])
if __name__ == "__main__":
unittest.main()
| true | true |
1c3d5eb7c80c1912eeacea21525eb51e77e0be39 | 643 | py | Python | aether-kernel/aether/kernel/api/migrations/0018_schema_family.py | lordmallam/aether | 7ceb71d2ef8b09d704d94dfcb243dbbdf8356135 | [
"Apache-2.0"
] | 14 | 2018-08-09T20:57:16.000Z | 2020-10-11T12:22:18.000Z | aether-kernel/aether/kernel/api/migrations/0018_schema_family.py | lordmallam/aether | 7ceb71d2ef8b09d704d94dfcb243dbbdf8356135 | [
"Apache-2.0"
] | 148 | 2018-07-24T10:52:29.000Z | 2022-02-10T09:06:44.000Z | aether-kernel/aether/kernel/api/migrations/0018_schema_family.py | lordmallam/aether | 7ceb71d2ef8b09d704d94dfcb243dbbdf8356135 | [
"Apache-2.0"
] | 6 | 2018-07-25T13:33:10.000Z | 2019-09-23T03:02:09.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-12 08:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kernel', '0017_mappingset_schema'),
]
operations = [
migrations.AddField(
model_name='schema',
name='family',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='schema',
name='type',
field=models.CharField(max_length=50, default='org.ehealthafrica.aether'),
),
]
| 24.730769 | 86 | 0.598756 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kernel', '0017_mappingset_schema'),
]
operations = [
migrations.AddField(
model_name='schema',
name='family',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='schema',
name='type',
field=models.CharField(max_length=50, default='org.ehealthafrica.aether'),
),
]
| true | true |
1c3d603eafc033d2dc37bac7a9d0c37fbf367d54 | 38,041 | py | Python | test/functional/test_framework/messages.py | bitcoinpostquantum/bitcoinpq | 28a1f3ce998e5b37b52e0505e1f7ab18a4b785a7 | [
"MIT"
] | 1 | 2020-09-29T20:01:39.000Z | 2020-09-29T20:01:39.000Z | test/functional/test_framework/messages.py | bitcoinpostquantum/bitcoinpq | 28a1f3ce998e5b37b52e0505e1f7ab18a4b785a7 | [
"MIT"
] | null | null | null | test/functional/test_framework/messages.py | bitcoinpostquantum/bitcoinpq | 28a1f3ce998e5b37b52e0505e1f7ab18a4b785a7 | [
"MIT"
] | 3 | 2022-01-09T03:01:47.000Z | 2022-02-18T08:20:08.000Z | #!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""BPQ test framework primitive and message strcutures
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
data structures that should map to corresponding structures in
bpq/primitives
msg_block, msg_tx, msg_headers, etc.:
data structures that represent network messages
ser_*, deser_*: functions that handle serialization/deserialization."""
from codecs import encode
import copy
import hashlib
from io import BytesIO
import random
import socket
import struct
import time
from test_framework.siphash import siphash256
from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
MIN_VERSION_SUPPORTED = 60001
MY_VERSION = 70014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000 # 1 BPQ in satoshis
NODE_NETWORK = (1 << 0)
# NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5)
NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7)
NODE_NETWORK_LIMITED = (1 << 10)
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
# ser_function_name: Allow for an alternate serialization function on the
# entries in the vector (we use this for serializing the vector of transactions
# for a witness block).
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
# Objects that map to bpqd objects, which can be serialized/deserialized
class CAddress():
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv():
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator():
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint():
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn():
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
self.nSequence)
class CTxOut():
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
bytes_to_hex_str(self.scriptPubKey))
class CScriptWitness():
def __init__(self):
# stack is a vector of strings
self.stack = []
def __repr__(self):
return "CScriptWitness(%s)" % \
(",".join([bytes_to_hex_str(x) for x in self.stack]))
def is_null(self):
if self.stack:
return False
return True
class CTxInWitness():
def __init__(self):
self.scriptWitness = CScriptWitness()
def deserialize(self, f):
self.scriptWitness.stack = deser_string_vector(f)
def serialize(self):
return ser_string_vector(self.scriptWitness.stack)
def __repr__(self):
return repr(self.scriptWitness)
def is_null(self):
return self.scriptWitness.is_null()
class CTxWitness():
def __init__(self):
self.vtxinwit = []
def deserialize(self, f):
for i in range(len(self.vtxinwit)):
self.vtxinwit[i].deserialize(f)
def serialize(self):
r = b""
# This is different than the usual vector serialization --
# we omit the length of the vector, which is required to be
# the same length as the transaction's vin vector.
for x in self.vtxinwit:
r += x.serialize()
return r
def __repr__(self):
return "CTxWitness(%s)" % \
(';'.join([repr(x) for x in self.vtxinwit]))
def is_null(self):
for x in self.vtxinwit:
if not x.is_null():
return False
return True
class CTransaction():
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.wit = CTxWitness()
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = tx.sha256
self.hash = tx.hash
self.wit = copy.deepcopy(tx.wit)
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
flags = 0
if len(self.vin) == 0:
flags = struct.unpack("<B", f.read(1))[0]
# Not sure why flags can't be zero, but this
# matches the implementation in bpqd
if (flags != 0):
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
else:
self.vout = deser_vector(f, CTxOut)
if flags != 0:
self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
self.wit.deserialize(f)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize_without_witness(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
# Only serialize with witness when explicitly called for
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
if (len(self.wit.vtxinwit) != len(self.vin)):
# vtxinwit must have the same length as vin
self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
for i in range(len(self.wit.vtxinwit), len(self.vin)):
self.wit.vtxinwit.append(CTxInWitness())
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
# Regular serialization is with witness -- must explicitly
# call serialize_without_witness to exclude witness data.
def serialize(self):
return self.serialize_with_witness()
# Recalculate the txid (transaction hash without witness)
def rehash(self):
self.sha256 = None
self.calc_sha256()
# We will only cache the serialization without witness in
# self.sha256 and self.hash -- those are expected to be the txid.
def calc_sha256(self, with_witness=False):
if with_witness:
# Don't cache the result, just return it
return uint256_from_str(hash256(self.serialize_with_witness()))
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
self.hash = encode(hash256(self.serialize_without_witness())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
class CBlockHeader():
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += super(CBlock, self).serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
r += ser_vector(self.vtx, "serialize_without_witness")
return r
# Calculate the merkle root given a vector of transaction hashes
@classmethod
def get_merkle_root(cls, hashes):
while len(hashes) > 1:
newhashes = []
for i in range(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
return self.get_merkle_root(hashes)
def calc_witness_merkle_root(self):
# For witness root purposes, the hash of the
# coinbase, with witness, is defined to be 0...0
hashes = [ser_uint256(0)]
for tx in self.vtx[1:]:
# Calculate the hashes with witness data
hashes.append(ser_uint256(tx.calc_sha256(True)))
return self.get_merkle_root(hashes)
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class PrefilledTransaction():
def __init__(self, index=0, tx = None):
self.index = index
self.tx = tx
def deserialize(self, f):
self.index = deser_compact_size(f)
self.tx = CTransaction()
self.tx.deserialize(f)
def serialize(self, with_witness=True):
r = b""
r += ser_compact_size(self.index)
if with_witness:
r += self.tx.serialize_with_witness()
else:
r += self.tx.serialize_without_witness()
return r
def serialize_without_witness(self):
return self.serialize(with_witness=False)
def serialize_with_witness(self):
return self.serialize(with_witness=True)
def __repr__(self):
return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
# This is what we send on the wire, in a cmpctblock message.
class P2PHeaderAndShortIDs():
def __init__(self):
self.header = CBlockHeader()
self.nonce = 0
self.shortids_length = 0
self.shortids = []
self.prefilled_txn_length = 0
self.prefilled_txn = []
def deserialize(self, f):
self.header.deserialize(f)
self.nonce = struct.unpack("<Q", f.read(8))[0]
self.shortids_length = deser_compact_size(f)
for i in range(self.shortids_length):
# shortids are defined to be 6 bytes in the spec, so append
# two zero bytes and read it in as an 8-byte number
self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
self.prefilled_txn = deser_vector(f, PrefilledTransaction)
self.prefilled_txn_length = len(self.prefilled_txn)
# When using version 2 compact blocks, we must serialize with_witness.
def serialize(self, with_witness=False):
r = b""
r += self.header.serialize()
r += struct.pack("<Q", self.nonce)
r += ser_compact_size(self.shortids_length)
for x in self.shortids:
# We only want the first 6 bytes
r += struct.pack("<Q", x)[0:6]
if with_witness:
r += ser_vector(self.prefilled_txn, "serialize_with_witness")
else:
r += ser_vector(self.prefilled_txn, "serialize_without_witness")
return r
def __repr__(self):
return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
# P2P version of the above that will use witness serialization (for compact
# block version 2)
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
def serialize(self):
return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
expected_shortid = siphash256(k0, k1, tx_hash)
expected_shortid &= 0x0000ffffffffffff
return expected_shortid
# This version gets rid of the array lengths, and reinterprets the differential
# encoding into indices that can be used for lookup.
class HeaderAndShortIDs():
def __init__(self, p2pheaders_and_shortids = None):
self.header = CBlockHeader()
self.nonce = 0
self.shortids = []
self.prefilled_txn = []
self.use_witness = False
if p2pheaders_and_shortids != None:
self.header = p2pheaders_and_shortids.header
self.nonce = p2pheaders_and_shortids.nonce
self.shortids = p2pheaders_and_shortids.shortids
last_index = -1
for x in p2pheaders_and_shortids.prefilled_txn:
self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
last_index = self.prefilled_txn[-1].index
def to_p2p(self):
if self.use_witness:
ret = P2PHeaderAndShortWitnessIDs()
else:
ret = P2PHeaderAndShortIDs()
ret.header = self.header
ret.nonce = self.nonce
ret.shortids_length = len(self.shortids)
ret.shortids = self.shortids
ret.prefilled_txn_length = len(self.prefilled_txn)
ret.prefilled_txn = []
last_index = -1
for x in self.prefilled_txn:
ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
last_index = x.index
return ret
def get_siphash_keys(self):
header_nonce = self.header.serialize()
header_nonce += struct.pack("<Q", self.nonce)
hash_header_nonce_as_str = sha256(header_nonce)
key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
return [ key0, key1 ]
# Version 2 compact blocks use wtxid in shortids (rather than txid)
def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
self.header = CBlockHeader(block)
self.nonce = nonce
self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
self.shortids = []
self.use_witness = use_witness
[k0, k1] = self.get_siphash_keys()
for i in range(len(block.vtx)):
if i not in prefill_list:
tx_hash = block.vtx[i].sha256
if use_witness:
tx_hash = block.vtx[i].calc_sha256(with_witness=True)
self.shortids.append(calculate_shortid(k0, k1, tx_hash))
def __repr__(self):
return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
class BlockTransactionsRequest():
def __init__(self, blockhash=0, indexes = None):
self.blockhash = blockhash
self.indexes = indexes if indexes != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
indexes_length = deser_compact_size(f)
for i in range(indexes_length):
self.indexes.append(deser_compact_size(f))
def serialize(self):
r = b""
r += ser_uint256(self.blockhash)
r += ser_compact_size(len(self.indexes))
for x in self.indexes:
r += ser_compact_size(x)
return r
# helper to set the differentially encoded indexes from absolute ones
def from_absolute(self, absolute_indexes):
self.indexes = []
last_index = -1
for x in absolute_indexes:
self.indexes.append(x-last_index-1)
last_index = x
def to_absolute(self):
absolute_indexes = []
last_index = -1
for x in self.indexes:
absolute_indexes.append(x+last_index+1)
last_index = absolute_indexes[-1]
return absolute_indexes
def __repr__(self):
return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
class BlockTransactions():
def __init__(self, blockhash=0, transactions = None):
self.blockhash = blockhash
self.transactions = transactions if transactions != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
self.transactions = deser_vector(f, CTransaction)
def serialize(self, with_witness=True):
r = b""
r += ser_uint256(self.blockhash)
if with_witness:
r += ser_vector(self.transactions, "serialize_with_witness")
else:
r += ser_vector(self.transactions, "serialize_without_witness")
return r
def __repr__(self):
return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
# Objects that correspond to messages on the wire
class msg_version():
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = NODE_NETWORK | NODE_WITNESS
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
self.nRelay = MY_RELAY
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
if self.nVersion >= 70001:
# Relay field is optional for version 70001 onwards
try:
self.nRelay = struct.unpack("<b", f.read(1))[0]
except:
self.nRelay = 0
else:
self.nRelay = 0
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
r += struct.pack("<b", self.nRelay)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight, self.nRelay)
class msg_verack():
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr():
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_inv():
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata():
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks():
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx():
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize_without_witness()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_witness_tx(msg_tx):
def serialize(self):
return self.tx.serialize_with_witness()
class msg_block():
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize(with_witness=False)
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
# for cases where a user needs tighter control over what is sent over the wire
# note that the user must supply the name of the command, and the data
class msg_generic():
def __init__(self, command, data=None):
self.command = command
self.data = data
def serialize(self):
return self.data
def __repr__(self):
return "msg_generic()"
class msg_witness_block(msg_block):
def serialize(self):
r = self.block.serialize(with_witness=True)
return r
class msg_getaddr():
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping():
command = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong():
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool():
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders():
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders():
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers():
command = b"headers"
def __init__(self, headers=None):
self.headers = headers if headers is not None else []
def deserialize(self, f):
# comment in bpqd indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject():
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
class msg_feefilter():
command = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
def deserialize(self, f):
self.feerate = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.feerate)
return r
def __repr__(self):
return "msg_feefilter(feerate=%08x)" % self.feerate
class msg_sendcmpct():
command = b"sendcmpct"
def __init__(self):
self.announce = False
self.version = 1
def deserialize(self, f):
self.announce = struct.unpack("<?", f.read(1))[0]
self.version = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<?", self.announce)
r += struct.pack("<Q", self.version)
return r
def __repr__(self):
return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
class msg_cmpctblock():
command = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
def deserialize(self, f):
self.header_and_shortids = P2PHeaderAndShortIDs()
self.header_and_shortids.deserialize(f)
def serialize(self):
r = b""
r += self.header_and_shortids.serialize()
return r
def __repr__(self):
return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
class msg_getblocktxn():
command = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
def deserialize(self, f):
self.block_txn_request = BlockTransactionsRequest()
self.block_txn_request.deserialize(f)
def serialize(self):
r = b""
r += self.block_txn_request.serialize()
return r
def __repr__(self):
return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
class msg_blocktxn():
command = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
def deserialize(self, f):
self.block_transactions.deserialize(f)
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=False)
return r
def __repr__(self):
return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
class msg_witness_blocktxn(msg_blocktxn):
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=True)
return r
| 29.061115 | 262 | 0.598144 |
from codecs import encode
import copy
import hashlib
from io import BytesIO
import random
import socket
import struct
import time
from test_framework.siphash import siphash256
from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
MIN_VERSION_SUPPORTED = 60001
MY_VERSION = 70014
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000
NODE_NETWORK = (1 << 0)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5)
NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7)
NODE_NETWORK_LIMITED = (1 << 10)
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
class CAddress():
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv():
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator():
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint():
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn():
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
self.nSequence)
class CTxOut():
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
bytes_to_hex_str(self.scriptPubKey))
class CScriptWitness():
def __init__(self):
self.stack = []
def __repr__(self):
return "CScriptWitness(%s)" % \
(",".join([bytes_to_hex_str(x) for x in self.stack]))
def is_null(self):
if self.stack:
return False
return True
class CTxInWitness():
def __init__(self):
self.scriptWitness = CScriptWitness()
def deserialize(self, f):
self.scriptWitness.stack = deser_string_vector(f)
def serialize(self):
return ser_string_vector(self.scriptWitness.stack)
def __repr__(self):
return repr(self.scriptWitness)
def is_null(self):
return self.scriptWitness.is_null()
class CTxWitness():
def __init__(self):
self.vtxinwit = []
def deserialize(self, f):
for i in range(len(self.vtxinwit)):
self.vtxinwit[i].deserialize(f)
def serialize(self):
r = b""
for x in self.vtxinwit:
r += x.serialize()
return r
def __repr__(self):
return "CTxWitness(%s)" % \
(';'.join([repr(x) for x in self.vtxinwit]))
def is_null(self):
for x in self.vtxinwit:
if not x.is_null():
return False
return True
class CTransaction():
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.wit = CTxWitness()
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = tx.sha256
self.hash = tx.hash
self.wit = copy.deepcopy(tx.wit)
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
flags = 0
if len(self.vin) == 0:
flags = struct.unpack("<B", f.read(1))[0]
# Not sure why flags can't be zero, but this
if (flags != 0):
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
else:
self.vout = deser_vector(f, CTxOut)
if flags != 0:
self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
self.wit.deserialize(f)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize_without_witness(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
if (len(self.wit.vtxinwit) != len(self.vin)):
self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
for i in range(len(self.wit.vtxinwit), len(self.vin)):
self.wit.vtxinwit.append(CTxInWitness())
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
def serialize(self):
return self.serialize_with_witness()
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self, with_witness=False):
if with_witness:
return uint256_from_str(hash256(self.serialize_with_witness()))
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
self.hash = encode(hash256(self.serialize_without_witness())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
class CBlockHeader():
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += super(CBlock, self).serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
r += ser_vector(self.vtx, "serialize_without_witness")
return r
# Calculate the merkle root given a vector of transaction hashes
@classmethod
def get_merkle_root(cls, hashes):
while len(hashes) > 1:
newhashes = []
for i in range(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
return self.get_merkle_root(hashes)
def calc_witness_merkle_root(self):
# For witness root purposes, the hash of the
# coinbase, with witness, is defined to be 0...0
hashes = [ser_uint256(0)]
for tx in self.vtx[1:]:
# Calculate the hashes with witness data
hashes.append(ser_uint256(tx.calc_sha256(True)))
return self.get_merkle_root(hashes)
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class PrefilledTransaction():
def __init__(self, index=0, tx = None):
self.index = index
self.tx = tx
def deserialize(self, f):
self.index = deser_compact_size(f)
self.tx = CTransaction()
self.tx.deserialize(f)
def serialize(self, with_witness=True):
r = b""
r += ser_compact_size(self.index)
if with_witness:
r += self.tx.serialize_with_witness()
else:
r += self.tx.serialize_without_witness()
return r
def serialize_without_witness(self):
return self.serialize(with_witness=False)
def serialize_with_witness(self):
return self.serialize(with_witness=True)
def __repr__(self):
return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
# This is what we send on the wire, in a cmpctblock message.
class P2PHeaderAndShortIDs():
def __init__(self):
self.header = CBlockHeader()
self.nonce = 0
self.shortids_length = 0
self.shortids = []
self.prefilled_txn_length = 0
self.prefilled_txn = []
def deserialize(self, f):
self.header.deserialize(f)
self.nonce = struct.unpack("<Q", f.read(8))[0]
self.shortids_length = deser_compact_size(f)
for i in range(self.shortids_length):
# shortids are defined to be 6 bytes in the spec, so append
# two zero bytes and read it in as an 8-byte number
self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
self.prefilled_txn = deser_vector(f, PrefilledTransaction)
self.prefilled_txn_length = len(self.prefilled_txn)
# When using version 2 compact blocks, we must serialize with_witness.
def serialize(self, with_witness=False):
r = b""
r += self.header.serialize()
r += struct.pack("<Q", self.nonce)
r += ser_compact_size(self.shortids_length)
for x in self.shortids:
# We only want the first 6 bytes
r += struct.pack("<Q", x)[0:6]
if with_witness:
r += ser_vector(self.prefilled_txn, "serialize_with_witness")
else:
r += ser_vector(self.prefilled_txn, "serialize_without_witness")
return r
def __repr__(self):
return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
# P2P version of the above that will use witness serialization (for compact
# block version 2)
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
def serialize(self):
return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
expected_shortid = siphash256(k0, k1, tx_hash)
expected_shortid &= 0x0000ffffffffffff
return expected_shortid
# This version gets rid of the array lengths, and reinterprets the differential
# encoding into indices that can be used for lookup.
class HeaderAndShortIDs():
def __init__(self, p2pheaders_and_shortids = None):
self.header = CBlockHeader()
self.nonce = 0
self.shortids = []
self.prefilled_txn = []
self.use_witness = False
if p2pheaders_and_shortids != None:
self.header = p2pheaders_and_shortids.header
self.nonce = p2pheaders_and_shortids.nonce
self.shortids = p2pheaders_and_shortids.shortids
last_index = -1
for x in p2pheaders_and_shortids.prefilled_txn:
self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
last_index = self.prefilled_txn[-1].index
def to_p2p(self):
if self.use_witness:
ret = P2PHeaderAndShortWitnessIDs()
else:
ret = P2PHeaderAndShortIDs()
ret.header = self.header
ret.nonce = self.nonce
ret.shortids_length = len(self.shortids)
ret.shortids = self.shortids
ret.prefilled_txn_length = len(self.prefilled_txn)
ret.prefilled_txn = []
last_index = -1
for x in self.prefilled_txn:
ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
last_index = x.index
return ret
def get_siphash_keys(self):
header_nonce = self.header.serialize()
header_nonce += struct.pack("<Q", self.nonce)
hash_header_nonce_as_str = sha256(header_nonce)
key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
return [ key0, key1 ]
# Version 2 compact blocks use wtxid in shortids (rather than txid)
def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
self.header = CBlockHeader(block)
self.nonce = nonce
self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
self.shortids = []
self.use_witness = use_witness
[k0, k1] = self.get_siphash_keys()
for i in range(len(block.vtx)):
if i not in prefill_list:
tx_hash = block.vtx[i].sha256
if use_witness:
tx_hash = block.vtx[i].calc_sha256(with_witness=True)
self.shortids.append(calculate_shortid(k0, k1, tx_hash))
def __repr__(self):
return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
class BlockTransactionsRequest():
def __init__(self, blockhash=0, indexes = None):
self.blockhash = blockhash
self.indexes = indexes if indexes != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
indexes_length = deser_compact_size(f)
for i in range(indexes_length):
self.indexes.append(deser_compact_size(f))
def serialize(self):
r = b""
r += ser_uint256(self.blockhash)
r += ser_compact_size(len(self.indexes))
for x in self.indexes:
r += ser_compact_size(x)
return r
# helper to set the differentially encoded indexes from absolute ones
def from_absolute(self, absolute_indexes):
self.indexes = []
last_index = -1
for x in absolute_indexes:
self.indexes.append(x-last_index-1)
last_index = x
def to_absolute(self):
absolute_indexes = []
last_index = -1
for x in self.indexes:
absolute_indexes.append(x+last_index+1)
last_index = absolute_indexes[-1]
return absolute_indexes
def __repr__(self):
return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
class BlockTransactions():
def __init__(self, blockhash=0, transactions = None):
self.blockhash = blockhash
self.transactions = transactions if transactions != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
self.transactions = deser_vector(f, CTransaction)
def serialize(self, with_witness=True):
r = b""
r += ser_uint256(self.blockhash)
if with_witness:
r += ser_vector(self.transactions, "serialize_with_witness")
else:
r += ser_vector(self.transactions, "serialize_without_witness")
return r
def __repr__(self):
return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
# Objects that correspond to messages on the wire
class msg_version():
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = NODE_NETWORK | NODE_WITNESS
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
self.nRelay = MY_RELAY
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
if self.nVersion >= 70001:
# Relay field is optional for version 70001 onwards
try:
self.nRelay = struct.unpack("<b", f.read(1))[0]
except:
self.nRelay = 0
else:
self.nRelay = 0
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
r += struct.pack("<b", self.nRelay)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight, self.nRelay)
class msg_verack():
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr():
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_inv():
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata():
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks():
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx():
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize_without_witness()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_witness_tx(msg_tx):
def serialize(self):
return self.tx.serialize_with_witness()
class msg_block():
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize(with_witness=False)
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
# for cases where a user needs tighter control over what is sent over the wire
# note that the user must supply the name of the command, and the data
class msg_generic():
def __init__(self, command, data=None):
self.command = command
self.data = data
def serialize(self):
return self.data
def __repr__(self):
return "msg_generic()"
class msg_witness_block(msg_block):
def serialize(self):
r = self.block.serialize(with_witness=True)
return r
class msg_getaddr():
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping():
command = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong():
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool():
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders():
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders():
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers():
command = b"headers"
def __init__(self, headers=None):
self.headers = headers if headers is not None else []
def deserialize(self, f):
# comment in bpqd indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject():
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
class msg_feefilter():
command = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
def deserialize(self, f):
self.feerate = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.feerate)
return r
def __repr__(self):
return "msg_feefilter(feerate=%08x)" % self.feerate
class msg_sendcmpct():
command = b"sendcmpct"
def __init__(self):
self.announce = False
self.version = 1
def deserialize(self, f):
self.announce = struct.unpack("<?", f.read(1))[0]
self.version = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<?", self.announce)
r += struct.pack("<Q", self.version)
return r
def __repr__(self):
return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
class msg_cmpctblock():
command = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
def deserialize(self, f):
self.header_and_shortids = P2PHeaderAndShortIDs()
self.header_and_shortids.deserialize(f)
def serialize(self):
r = b""
r += self.header_and_shortids.serialize()
return r
def __repr__(self):
return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
class msg_getblocktxn():
command = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
def deserialize(self, f):
self.block_txn_request = BlockTransactionsRequest()
self.block_txn_request.deserialize(f)
def serialize(self):
r = b""
r += self.block_txn_request.serialize()
return r
def __repr__(self):
return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
class msg_blocktxn():
command = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
def deserialize(self, f):
self.block_transactions.deserialize(f)
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=False)
return r
def __repr__(self):
return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
class msg_witness_blocktxn(msg_blocktxn):
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=True)
return r
| true | true |
1c3d60eea03b50d337e15984b0d2dd59edf67ff1 | 46,275 | py | Python | src/auto-posture-evaluator/testers/elb_tester.py | harikarthickm/coralogix-aws-serverless | c49dddacf1fcf0806c519246b6cd8d3bbc65e347 | [
"Apache-2.0"
] | null | null | null | src/auto-posture-evaluator/testers/elb_tester.py | harikarthickm/coralogix-aws-serverless | c49dddacf1fcf0806c519246b6cd8d3bbc65e347 | [
"Apache-2.0"
] | null | null | null | src/auto-posture-evaluator/testers/elb_tester.py | harikarthickm/coralogix-aws-serverless | c49dddacf1fcf0806c519246b6cd8d3bbc65e347 | [
"Apache-2.0"
] | null | null | null | import os
from datetime import datetime
import time
from typing import Dict, List
import interfaces
import boto3
import jmespath
class Tester(interfaces.TesterInterface):
def __init__(self) -> None:
self.user_id = boto3.client('sts').get_caller_identity().get('UserId')
self.account_arn = boto3.client('sts').get_caller_identity().get('Arn')
self.account_id = boto3.client('sts').get_caller_identity().get('Account')
self.aws_elbs_client = boto3.client('elb')
self.aws_elbsv2_client = boto3.client('elbv2')
self.elbs = self._get_all_elb()
self.elbsv2 = self._get_all_elbv2()
self.cipher_suites = self._get_cipher_suite_details()
self.latest_security_policies = self._get_aws_latest_security_policies()
self.aws_acm_client = boto3.client('acm')
self.aws_iam_client = boto3.client('iam')
self.ssl_certificate_age = os.environ.get('AUTOPOSTURE_ALB_SSL_CERTIFICATE_AGE')
def declare_tested_service(self) -> str:
return "elb"
def declare_tested_provider(self) -> str:
return "aws"
def run_tests(self) -> list:
return \
self.get_elbv2_internet_facing() + \
self.get_elbv2_generating_access_log() + \
self.get_alb_using_secure_listener() + \
self.get_elb_generating_access_log() + \
self.get_elb_listeners_using_tls() + \
self.get_elb_listeners_securely_configured() + \
self.get_elb_has_secure_ssl_protocol() + \
self.get_elb_security_policy_secure_ciphers() + \
self.get_elbv2_using_latest_security_policy() + \
self.get_elbv2_has_deletion_protection() + \
self.get_elbv2_allows_https_traffic_only() + \
self.get_alb_using_tls12_or_higher() + \
self.get_nlb_using_tls12_or_higher() + \
self.get_elb_internet_facing() + \
self.get_nlb_support_insecure_negotiation_policy() + \
self.get_alb_certificate_should_be_renewed()
def _get_all_elbv2(self) -> List:
elbs = self.aws_elbsv2_client.describe_load_balancers()
return elbs['LoadBalancers']
def _get_all_elb(self) -> List:
elbs = self.aws_elbs_client.describe_load_balancers()
return elbs['LoadBalancerDescriptions']
def _get_aws_latest_security_policies(self) -> List:
policies = ['ELBSecurityPolicy-2016-08', 'ELBSecurityPolicy-FS-2018-06']
return policies
def _get_cipher_suite_details(self) -> Dict:
cipher_suites = {
'AES128-GCM-SHA256' : 'weak', 'ECDHE-ECDSA-AES256-SHA': 'weak', 'ECDHE-ECDSA-AES256-GCM-SHA384': 'recommended', 'AES128-SHA': 'weak',
'ECDHE-RSA-AES128-SHA': 'weak', 'ECDHE-ECDSA-AES128-SHA256': 'weak', 'ECDHE-RSA-AES128-GCM-SHA256': 'secure', 'ECDHE-RSA-AES256-SHA384': 'weak',
'AES256-GCM-SHA384': 'weak', 'ECDHE-RSA-AES128-SHA256': 'weak', 'AES256-SHA256' : 'weak', 'ECDHE-ECDSA-AES256-SHA384': 'weak',
'AES128-SHA256' : 'weak', 'ECDHE-RSA-AES256-GCM-SHA384': 'secure', 'ECDHE-ECDSA-AES128-SHA': 'weak', 'AES256-SHA': 'weak', ''
'ECDHE-ECDSA-AES128-GCM-SHA256': 'recommended', 'ECDHE-RSA-AES256-SHA': 'weak'
}
return cipher_suites
def get_elbv2_internet_facing(self) -> List:
elbs = self.elbsv2
test_name = "elbv2_is_not_internet_facing"
result = []
for elb in elbs:
load_balancer_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
if elb['Scheme'] == 'internet-facing':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_generating_access_log(self) -> List:
elbs = self.elbs
test_name = "elb_is_generating_access_log"
result = []
for elb in elbs:
load_balancer_name = elb['LoadBalancerName']
response = self.aws_elbs_client.describe_load_balancer_attributes(LoadBalancerName=load_balancer_name)
if response['LoadBalancerAttributes']['AccessLog']['Enabled']:
# no issue
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
# issue
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_alb_using_secure_listener(self) -> List:
test_name = "alb_is_using_secure_listeners"
elbs = self.elbsv2
result = []
for elb in elbs:
# check elbv2 type and only let ALB pass
if elb['Type'] == "application":
load_balancer_arn = elb['LoadBalancerArn']
response = self.aws_elbsv2_client.describe_listeners(LoadBalancerArn=load_balancer_arn)
listeners = response['Listeners']
secure_listener_count = 0
for listener in listeners:
if listener['Protocol'] == "HTTPS":
secure_listener_count += 1
if secure_listener_count == len(listeners):
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
continue
return result
def get_elbv2_generating_access_log(self) -> List:
test_name = "elbv2_is_generating_access_logs"
result = []
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
elb_attributes = self.aws_elbsv2_client.describe_load_balancer_attributes(LoadBalancerArn=elb_arn)
attributes = elb_attributes['Attributes']
for i in attributes:
if i['Key'] == 'access_logs.s3.enabled':
if i['Value'] == 'false':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
break
else: pass
else:
# access log / vpc flow logs
arn_split = elb_arn.split(':')
temp = arn_split[-1]
description_temp = temp.split('loadbalancer/')
network_interface_description = 'ELB' + ' ' + description_temp[-1]
ec2_client = boto3.client('ec2')
response = ec2_client.describe_network_interfaces(Filters=[{'Name' : 'description', 'Values' : [network_interface_description]}])
network_interfaces = response['NetworkInterfaces']
interface_ids = []
for interface in network_interfaces:
interface_ids.append(interface['NetworkInterfaceId'])
has_flow_logs = 0
for id in interface_ids:
response = ec2_client.describe_flow_logs(Filters=[{'Name': 'resource-id', 'Values' : [id]}])
flow_logs = response['FlowLogs']
if len(flow_logs) > 0:
has_flow_logs += 1
if len(interface_ids) == has_flow_logs:
# no issue
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
# issue
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_listeners_using_tls(self) -> List:
test_name = "elb_listeners_using_tls_v1.2"
result = []
elbs = self.elbs
for elb in elbs:
elb_name = elb['LoadBalancerName']
listeners = elb['ListenerDescriptions']
secure_listeners_count = 0
for listener in listeners:
policy_names = listener['PolicyNames']
if len(policy_names) > 0:
response = self.aws_elbs_client.describe_load_balancer_policies(PolicyNames=policy_names, LoadBalancerName=elb_name)
policy_descriptions = response['PolicyDescriptions']
found_tls_v12_count = 0
# look into policy attrs
for policy_description in policy_descriptions:
policy_attrs = policy_description['PolicyAttributeDescriptions']
for attr in policy_attrs:
if attr['AttributeName'] == 'Protocol-TLSv1.2' and attr['AttributeValue'] == 'true':
found_tls_v12_count += 1
break
if found_tls_v12_count == len(policy_descriptions):
secure_listeners_count += 1
else: pass
if secure_listeners_count == len(listeners):
# secure
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
# issue found
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_listeners_securely_configured(self) -> List:
test_name = "elb_listeners_securely_configurd"
result = []
elbs = self.elbs
for elb in elbs:
listeners = elb['ListenerDescriptions']
loab_balancer_name = elb['LoadBalancerName']
secure_listeners = 0
for i in listeners:
listener = i['Listener']
if listener['InstanceProtocol'] == 'HTTPS' and listener['Protocol'] == 'HTTPS':
# secure
secure_listeners += 1
elif listener['InstanceProtocol'] == 'SSL' and listener['Protocol'] == 'SSL':
# secure
secure_listeners += 1
elif listener['InstanceProtocol'] == 'HTTPS' and listener['Protocol'] == 'SSL':
# secure
secure_listeners += 1
elif listener['InstanceProtocol'] == 'SSL' and listener['Protocol'] == 'HTTPS':
# secure
secure_listeners += 1
else: pass
if len(listeners) == secure_listeners:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": loab_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": loab_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_security_policy_secure_ciphers(self) -> List:
elbs = self.elbs
test_name = "elb_security_policy_does_not_contain_any_insecure_ciphers"
result = []
elb_with_issue = []
all_elbs = []
for elb in elbs:
# get policies
load_balancer_name = elb['LoadBalancerName']
all_elbs.append(load_balancer_name)
listeners = elb['ListenerDescriptions']
listener_policies = []
for listener in listeners:
listener_policies.extend(listener['PolicyNames'])
if len(listener_policies) > 0:
response = self.aws_elbs_client.describe_load_balancer_policies(PolicyNames=listener_policies)
query_result = jmespath.search("PolicyDescriptions[].PolicyAttributeDescriptions[?AttributeValue=='true'].AttributeName", response)
all_attrs = []
for i in query_result:
all_attrs.extend(i)
unique_set = list(set(all_attrs))
cipher_suites = self.cipher_suites
for i in unique_set:
if i.startswith('Protocol') or i.startswith('protocol'): pass
elif i == 'Server-Defined-Cipher-Order': pass
elif cipher_suites[i] == 'insecure':
elb_with_issue.append(load_balancer_name)
break
else: pass
else:
elb_with_issue.append(load_balancer_name)
all_elbs_set = set(all_elbs)
elb_with_issue_set = set(elb_with_issue)
elb_with_no_issue_set = all_elbs_set.difference(elb_with_issue)
for i in elb_with_issue_set:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": i,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
for i in elb_with_no_issue_set:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": i,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_has_secure_ssl_protocol(self) -> List:
test_name = "elb_has_secure_ssl_protocol"
elbs = self.elbs
result = []
for elb in elbs:
load_balancer_name = elb['LoadBalancerName']
ssl_policies_count = len(elb['Policies']['OtherPolicies'])
response = self.aws_elbs_client.describe_load_balancer_policies(LoadBalancerName=load_balancer_name)
query_result = jmespath.search("PolicyDescriptions[].PolicyAttributeDescriptions[?AttributeValue=='true'].AttributeName", response)
ssl_with_issue = 0
for attrs in query_result:
for attr in attrs:
if attr.startswith('Protocol'): pass
elif attr == 'Server-Defined-Cipher-Order': pass
else:
if self.cipher_suites[attr] == 'insecure':
ssl_with_issue += 1
break
if ssl_policies_count == ssl_with_issue:
# insecure
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elbv2_using_latest_security_policy(self) -> List:
test_name = "elbv2_using_latest_security_policy"
elbv2 = self.elbsv2
latest_security_policies = self.latest_security_policies
result = []
for elb in elbv2:
response = self.aws_elbsv2_client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn'])
listeners = response['Listeners']
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
secure_listeners = 0
for listener in listeners:
ssl_policy = listener.get('SslPolicy')
if ssl_policy in latest_security_policies:
secure_listeners += 1
if secure_listeners == len(listeners):
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
# GWLB
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elbv2_has_deletion_protection(self) -> List:
result = []
test_name = "elbv2_has_deletion_protection_enabled"
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
response = self.aws_elbsv2_client.describe_load_balancer_attributes(LoadBalancerArn=elb_arn)
attrs = response['Attributes']
for attr in attrs:
if attr['Key'] == 'deletion_protection.enabled':
if attr['Value'] == 'true':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
break
else: pass
return result
def get_elbv2_allows_https_traffic_only(self) -> List:
result = []
test_name = "elbv2_should_allow_https_traffic_only"
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listerner in listerners:
protocol = listerner['Protocol']
listener_wo_https = False
if protocol == 'HTTPS' or protocol == "TLS" or protocol == "GENEVE":
pass
else:
listener_wo_https = True
break
if listener_wo_https:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_alb_using_tls12_or_higher(self) -> List:
result = []
test_name = "application_load_balancer_should_allow_TLSv1.2_or_higher"
hash_map = {}
elbs = self.elbsv2
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
ssl_version_12 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_policy != 'no_ssl_policy':
if ssl_version_12 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x >= 1.2, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
listener_with_issue = True
break
else: hash_map[ssl_policy] = True
elif ssl_version_12: listener_with_issue = False
else:
listener_with_issue = True
break
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_alb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_nlb_using_tls12_or_higher(self) -> List:
result = []
test_name = "network_load_balancer_should_allow_TLSv1.2_or_higher"
hash_map = {}
elbs = self.elbsv2
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'network':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
ssl_version_12 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_policy != 'no_ssl_policy':
if ssl_version_12 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x >= 1.2, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
listener_with_issue = True
break
else: hash_map[ssl_policy] = True
elif ssl_version_12: listener_with_issue = False
else:
listener_with_issue = True
break
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_internet_facing(self) -> List:
elbs = self.elbs
test_name = "internet_facing_elbv1"
result = []
if len(elbs) > 0:
for elb in elbs:
load_balancere_name = elb['LoadBalancerName']
if elb['Scheme'] == 'internet-facing':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancere_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancere_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_nlb_support_insecure_negotiation_policy(self) -> List:
test_name = "network_load_balancer_should_not_support_insecure_negotiation_policy"
result = []
elbs = self.elbsv2
hash_map = {}
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'network':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
if ssl_policy != 'no_ssl_policy':
ssl_version_11 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_version_11 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x == 1.0 or x == 1.1, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
else:
listener_with_issue = True
hash_map[ssl_policy] = True
break
elif ssl_version_11:
listener_with_issue = True
break
else: listener_with_issue = False
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_alb_certificate_should_be_renewed(self):
test_name = "application_load_balancer_ssl_certificate_should_be_renewed_30_days_in_advance"
result = []
elbs = self.elbsv2
ssl_certificate_age = int(self.ssl_certificate_age) if self.ssl_certificate_age else 30
if len(elbs) > 0:
for elb in elbs:
elb_type = elb['Type']
elb_arn = elb['LoadBalancerArn']
if elb_type == 'application':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
elb_certificates = []
for listener in listerners:
certificates = listener.get('Certificates')
if certificates is not None:
elb_certificates.extend(certificates)
else:
elb_certificates.append(certificates)
elb_with_issue = False
for cert in elb_certificates:
if cert is not None:
cert_arn = cert['CertificateArn']
filtered_result = list(filter(lambda x: x == 'acm', cert_arn.split(':')))
if len(filtered_result) > 0:
response = self.aws_acm_client.describe_certificate(CertificateArn=cert_arn)
expire_date = datetime.date(response['Certificate']['NotAfter'])
current_date = datetime.date(datetime.now())
time_diff = (expire_date - current_date).days
if time_diff > ssl_certificate_age:
elb_with_issue = False
else:
elb_with_issue = True
break
else:
pass
else:
elb_with_issue = True
break
if elb_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_alb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
| 43.614515 | 156 | 0.453982 | import os
from datetime import datetime
import time
from typing import Dict, List
import interfaces
import boto3
import jmespath
class Tester(interfaces.TesterInterface):
def __init__(self) -> None:
self.user_id = boto3.client('sts').get_caller_identity().get('UserId')
self.account_arn = boto3.client('sts').get_caller_identity().get('Arn')
self.account_id = boto3.client('sts').get_caller_identity().get('Account')
self.aws_elbs_client = boto3.client('elb')
self.aws_elbsv2_client = boto3.client('elbv2')
self.elbs = self._get_all_elb()
self.elbsv2 = self._get_all_elbv2()
self.cipher_suites = self._get_cipher_suite_details()
self.latest_security_policies = self._get_aws_latest_security_policies()
self.aws_acm_client = boto3.client('acm')
self.aws_iam_client = boto3.client('iam')
self.ssl_certificate_age = os.environ.get('AUTOPOSTURE_ALB_SSL_CERTIFICATE_AGE')
def declare_tested_service(self) -> str:
return "elb"
def declare_tested_provider(self) -> str:
return "aws"
def run_tests(self) -> list:
return \
self.get_elbv2_internet_facing() + \
self.get_elbv2_generating_access_log() + \
self.get_alb_using_secure_listener() + \
self.get_elb_generating_access_log() + \
self.get_elb_listeners_using_tls() + \
self.get_elb_listeners_securely_configured() + \
self.get_elb_has_secure_ssl_protocol() + \
self.get_elb_security_policy_secure_ciphers() + \
self.get_elbv2_using_latest_security_policy() + \
self.get_elbv2_has_deletion_protection() + \
self.get_elbv2_allows_https_traffic_only() + \
self.get_alb_using_tls12_or_higher() + \
self.get_nlb_using_tls12_or_higher() + \
self.get_elb_internet_facing() + \
self.get_nlb_support_insecure_negotiation_policy() + \
self.get_alb_certificate_should_be_renewed()
def _get_all_elbv2(self) -> List:
elbs = self.aws_elbsv2_client.describe_load_balancers()
return elbs['LoadBalancers']
def _get_all_elb(self) -> List:
elbs = self.aws_elbs_client.describe_load_balancers()
return elbs['LoadBalancerDescriptions']
def _get_aws_latest_security_policies(self) -> List:
policies = ['ELBSecurityPolicy-2016-08', 'ELBSecurityPolicy-FS-2018-06']
return policies
def _get_cipher_suite_details(self) -> Dict:
cipher_suites = {
'AES128-GCM-SHA256' : 'weak', 'ECDHE-ECDSA-AES256-SHA': 'weak', 'ECDHE-ECDSA-AES256-GCM-SHA384': 'recommended', 'AES128-SHA': 'weak',
'ECDHE-RSA-AES128-SHA': 'weak', 'ECDHE-ECDSA-AES128-SHA256': 'weak', 'ECDHE-RSA-AES128-GCM-SHA256': 'secure', 'ECDHE-RSA-AES256-SHA384': 'weak',
'AES256-GCM-SHA384': 'weak', 'ECDHE-RSA-AES128-SHA256': 'weak', 'AES256-SHA256' : 'weak', 'ECDHE-ECDSA-AES256-SHA384': 'weak',
'AES128-SHA256' : 'weak', 'ECDHE-RSA-AES256-GCM-SHA384': 'secure', 'ECDHE-ECDSA-AES128-SHA': 'weak', 'AES256-SHA': 'weak', ''
'ECDHE-ECDSA-AES128-GCM-SHA256': 'recommended', 'ECDHE-RSA-AES256-SHA': 'weak'
}
return cipher_suites
def get_elbv2_internet_facing(self) -> List:
elbs = self.elbsv2
test_name = "elbv2_is_not_internet_facing"
result = []
for elb in elbs:
load_balancer_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
if elb['Scheme'] == 'internet-facing':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_generating_access_log(self) -> List:
elbs = self.elbs
test_name = "elb_is_generating_access_log"
result = []
for elb in elbs:
load_balancer_name = elb['LoadBalancerName']
response = self.aws_elbs_client.describe_load_balancer_attributes(LoadBalancerName=load_balancer_name)
if response['LoadBalancerAttributes']['AccessLog']['Enabled']:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_alb_using_secure_listener(self) -> List:
test_name = "alb_is_using_secure_listeners"
elbs = self.elbsv2
result = []
for elb in elbs:
if elb['Type'] == "application":
load_balancer_arn = elb['LoadBalancerArn']
response = self.aws_elbsv2_client.describe_listeners(LoadBalancerArn=load_balancer_arn)
listeners = response['Listeners']
secure_listener_count = 0
for listener in listeners:
if listener['Protocol'] == "HTTPS":
secure_listener_count += 1
if secure_listener_count == len(listeners):
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
continue
return result
def get_elbv2_generating_access_log(self) -> List:
test_name = "elbv2_is_generating_access_logs"
result = []
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
elb_attributes = self.aws_elbsv2_client.describe_load_balancer_attributes(LoadBalancerArn=elb_arn)
attributes = elb_attributes['Attributes']
for i in attributes:
if i['Key'] == 'access_logs.s3.enabled':
if i['Value'] == 'false':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
break
else: pass
else:
arn_split = elb_arn.split(':')
temp = arn_split[-1]
description_temp = temp.split('loadbalancer/')
network_interface_description = 'ELB' + ' ' + description_temp[-1]
ec2_client = boto3.client('ec2')
response = ec2_client.describe_network_interfaces(Filters=[{'Name' : 'description', 'Values' : [network_interface_description]}])
network_interfaces = response['NetworkInterfaces']
interface_ids = []
for interface in network_interfaces:
interface_ids.append(interface['NetworkInterfaceId'])
has_flow_logs = 0
for id in interface_ids:
response = ec2_client.describe_flow_logs(Filters=[{'Name': 'resource-id', 'Values' : [id]}])
flow_logs = response['FlowLogs']
if len(flow_logs) > 0:
has_flow_logs += 1
if len(interface_ids) == has_flow_logs:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_listeners_using_tls(self) -> List:
test_name = "elb_listeners_using_tls_v1.2"
result = []
elbs = self.elbs
for elb in elbs:
elb_name = elb['LoadBalancerName']
listeners = elb['ListenerDescriptions']
secure_listeners_count = 0
for listener in listeners:
policy_names = listener['PolicyNames']
if len(policy_names) > 0:
response = self.aws_elbs_client.describe_load_balancer_policies(PolicyNames=policy_names, LoadBalancerName=elb_name)
policy_descriptions = response['PolicyDescriptions']
found_tls_v12_count = 0
for policy_description in policy_descriptions:
policy_attrs = policy_description['PolicyAttributeDescriptions']
for attr in policy_attrs:
if attr['AttributeName'] == 'Protocol-TLSv1.2' and attr['AttributeValue'] == 'true':
found_tls_v12_count += 1
break
if found_tls_v12_count == len(policy_descriptions):
secure_listeners_count += 1
else: pass
if secure_listeners_count == len(listeners):
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_listeners_securely_configured(self) -> List:
test_name = "elb_listeners_securely_configurd"
result = []
elbs = self.elbs
for elb in elbs:
listeners = elb['ListenerDescriptions']
loab_balancer_name = elb['LoadBalancerName']
secure_listeners = 0
for i in listeners:
listener = i['Listener']
if listener['InstanceProtocol'] == 'HTTPS' and listener['Protocol'] == 'HTTPS':
secure_listeners += 1
elif listener['InstanceProtocol'] == 'SSL' and listener['Protocol'] == 'SSL':
secure_listeners += 1
elif listener['InstanceProtocol'] == 'HTTPS' and listener['Protocol'] == 'SSL':
secure_listeners += 1
elif listener['InstanceProtocol'] == 'SSL' and listener['Protocol'] == 'HTTPS':
secure_listeners += 1
else: pass
if len(listeners) == secure_listeners:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": loab_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": loab_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
return result
def get_elb_security_policy_secure_ciphers(self) -> List:
elbs = self.elbs
test_name = "elb_security_policy_does_not_contain_any_insecure_ciphers"
result = []
elb_with_issue = []
all_elbs = []
for elb in elbs:
load_balancer_name = elb['LoadBalancerName']
all_elbs.append(load_balancer_name)
listeners = elb['ListenerDescriptions']
listener_policies = []
for listener in listeners:
listener_policies.extend(listener['PolicyNames'])
if len(listener_policies) > 0:
response = self.aws_elbs_client.describe_load_balancer_policies(PolicyNames=listener_policies)
query_result = jmespath.search("PolicyDescriptions[].PolicyAttributeDescriptions[?AttributeValue=='true'].AttributeName", response)
all_attrs = []
for i in query_result:
all_attrs.extend(i)
unique_set = list(set(all_attrs))
cipher_suites = self.cipher_suites
for i in unique_set:
if i.startswith('Protocol') or i.startswith('protocol'): pass
elif i == 'Server-Defined-Cipher-Order': pass
elif cipher_suites[i] == 'insecure':
elb_with_issue.append(load_balancer_name)
break
else: pass
else:
elb_with_issue.append(load_balancer_name)
all_elbs_set = set(all_elbs)
elb_with_issue_set = set(elb_with_issue)
elb_with_no_issue_set = all_elbs_set.difference(elb_with_issue)
for i in elb_with_issue_set:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": i,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
for i in elb_with_no_issue_set:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": i,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_has_secure_ssl_protocol(self) -> List:
test_name = "elb_has_secure_ssl_protocol"
elbs = self.elbs
result = []
for elb in elbs:
load_balancer_name = elb['LoadBalancerName']
ssl_policies_count = len(elb['Policies']['OtherPolicies'])
response = self.aws_elbs_client.describe_load_balancer_policies(LoadBalancerName=load_balancer_name)
query_result = jmespath.search("PolicyDescriptions[].PolicyAttributeDescriptions[?AttributeValue=='true'].AttributeName", response)
ssl_with_issue = 0
for attrs in query_result:
for attr in attrs:
if attr.startswith('Protocol'): pass
elif attr == 'Server-Defined-Cipher-Order': pass
else:
if self.cipher_suites[attr] == 'insecure':
ssl_with_issue += 1
break
if ssl_policies_count == ssl_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancer_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elbv2_using_latest_security_policy(self) -> List:
test_name = "elbv2_using_latest_security_policy"
elbv2 = self.elbsv2
latest_security_policies = self.latest_security_policies
result = []
for elb in elbv2:
response = self.aws_elbsv2_client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn'])
listeners = response['Listeners']
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application' or elb_type == 'network':
secure_listeners = 0
for listener in listeners:
ssl_policy = listener.get('SslPolicy')
if ssl_policy in latest_security_policies:
secure_listeners += 1
if secure_listeners == len(listeners):
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elbv2_has_deletion_protection(self) -> List:
result = []
test_name = "elbv2_has_deletion_protection_enabled"
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
response = self.aws_elbsv2_client.describe_load_balancer_attributes(LoadBalancerArn=elb_arn)
attrs = response['Attributes']
for attr in attrs:
if attr['Key'] == 'deletion_protection.enabled':
if attr['Value'] == 'true':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
break
else: pass
return result
def get_elbv2_allows_https_traffic_only(self) -> List:
result = []
test_name = "elbv2_should_allow_https_traffic_only"
elbs = self.elbsv2
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listerner in listerners:
protocol = listerner['Protocol']
listener_wo_https = False
if protocol == 'HTTPS' or protocol == "TLS" or protocol == "GENEVE":
pass
else:
listener_wo_https = True
break
if listener_wo_https:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_alb_using_tls12_or_higher(self) -> List:
result = []
test_name = "application_load_balancer_should_allow_TLSv1.2_or_higher"
hash_map = {}
elbs = self.elbsv2
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'application':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
ssl_version_12 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_policy != 'no_ssl_policy':
if ssl_version_12 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x >= 1.2, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
listener_with_issue = True
break
else: hash_map[ssl_policy] = True
elif ssl_version_12: listener_with_issue = False
else:
listener_with_issue = True
break
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_alb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_nlb_using_tls12_or_higher(self) -> List:
result = []
test_name = "network_load_balancer_should_allow_TLSv1.2_or_higher"
hash_map = {}
elbs = self.elbsv2
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'network':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
ssl_version_12 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_policy != 'no_ssl_policy':
if ssl_version_12 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x >= 1.2, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
listener_with_issue = True
break
else: hash_map[ssl_policy] = True
elif ssl_version_12: listener_with_issue = False
else:
listener_with_issue = True
break
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb_found@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_elb_internet_facing(self) -> List:
elbs = self.elbs
test_name = "internet_facing_elbv1"
result = []
if len(elbs) > 0:
for elb in elbs:
load_balancere_name = elb['LoadBalancerName']
if elb['Scheme'] == 'internet-facing':
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancere_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": load_balancere_name,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elb",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_nlb_support_insecure_negotiation_policy(self) -> List:
test_name = "network_load_balancer_should_not_support_insecure_negotiation_policy"
result = []
elbs = self.elbsv2
hash_map = {}
elb_count = len(elbs)
if elb_count > 0:
for elb in elbs:
elb_arn = elb['LoadBalancerArn']
elb_type = elb['Type']
if elb_type == 'network':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
for listener in listerners:
ssl_policy = listener['SslPolicy'] if listener.get('SslPolicy') else 'no_ssl_policy'
if ssl_policy != 'no_ssl_policy':
ssl_version_11 = hash_map.get(ssl_policy, None)
listener_with_issue = False
if ssl_version_11 is None:
response = self.aws_elbsv2_client.describe_ssl_policies(
Names=[ssl_policy]
)
policy_details = response['SslPolicies'][0]
ssl_protocols = policy_details['SslProtocols']
ssl_versions = list(map(lambda x: float(x), list(map(lambda x: x.split('v')[-1], ssl_protocols))))
required_versions = list(filter(lambda x: x == 1.0 or x == 1.1, ssl_versions))
if len(required_versions) == 0:
hash_map[ssl_policy] = False
else:
listener_with_issue = True
hash_map[ssl_policy] = True
break
elif ssl_version_11:
listener_with_issue = True
break
else: listener_with_issue = False
else:
listener_with_issue = True
break
if listener_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_nlb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
def get_alb_certificate_should_be_renewed(self):
test_name = "application_load_balancer_ssl_certificate_should_be_renewed_30_days_in_advance"
result = []
elbs = self.elbsv2
ssl_certificate_age = int(self.ssl_certificate_age) if self.ssl_certificate_age else 30
if len(elbs) > 0:
for elb in elbs:
elb_type = elb['Type']
elb_arn = elb['LoadBalancerArn']
if elb_type == 'application':
paginator = self.aws_elbsv2_client.get_paginator('describe_listeners')
response_iterator = paginator.paginate(LoadBalancerArn=elb_arn)
listerners = []
for page in response_iterator:
listerners.extend(page['Listeners'])
elb_certificates = []
for listener in listerners:
certificates = listener.get('Certificates')
if certificates is not None:
elb_certificates.extend(certificates)
else:
elb_certificates.append(certificates)
elb_with_issue = False
for cert in elb_certificates:
if cert is not None:
cert_arn = cert['CertificateArn']
filtered_result = list(filter(lambda x: x == 'acm', cert_arn.split(':')))
if len(filtered_result) > 0:
response = self.aws_acm_client.describe_certificate(CertificateArn=cert_arn)
expire_date = datetime.date(response['Certificate']['NotAfter'])
current_date = datetime.date(datetime.now())
time_diff = (expire_date - current_date).days
if time_diff > ssl_certificate_age:
elb_with_issue = False
else:
elb_with_issue = True
break
else:
pass
else:
elb_with_issue = True
break
if elb_with_issue:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "issue_found"
})
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": elb_arn,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
if len(result) == 0:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_alb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
else: pass
else:
result.append({
"user": self.user_id,
"account_arn": self.account_arn,
"account": self.account_id,
"timestamp": time.time(),
"item": "no_elb@@" + self.account_id,
"item_type": "aws_elbv2",
"test_name": test_name,
"test_result": "no_issue_found"
})
return result
| true | true |
1c3d616d9b31e5f507f82458b800ff768536db33 | 2,044 | py | Python | chatserver/views.py | unix2315/redis_test | ebb63c6a91b50e7c86671722bcea6ea317f834fb | [
"MIT"
] | 815 | 2015-01-04T03:02:03.000Z | 2022-03-18T21:48:48.000Z | chatserver/views.py | unix2315/redis_test | ebb63c6a91b50e7c86671722bcea6ea317f834fb | [
"MIT"
] | 213 | 2015-01-06T14:15:32.000Z | 2022-01-28T17:35:45.000Z | chatserver/views.py | unix2315/redis_test | ebb63c6a91b50e7c86671722bcea6ea317f834fb | [
"MIT"
] | 253 | 2015-01-02T16:38:15.000Z | 2022-03-30T20:43:58.000Z | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User, Group
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
from ws4redis.redis_store import RedisMessage
from ws4redis.publisher import RedisPublisher
class BroadcastChatView(TemplateView):
template_name = 'broadcast_chat.html'
def get(self, request, *args, **kwargs):
welcome = RedisMessage('Hello everybody') # create a welcome message to be sent to everybody
RedisPublisher(facility='foobar', broadcast=True).publish_message(welcome)
return super(BroadcastChatView, self).get(request, *args, **kwargs)
class UserChatView(TemplateView):
template_name = 'user_chat.html'
def get_context_data(self, **kwargs):
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=User.objects.all())
return context
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(UserChatView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
redis_publisher = RedisPublisher(facility='foobar', users=[request.POST.get('user')])
message = RedisMessage(request.POST.get('message'))
redis_publisher.publish_message(message)
return HttpResponse('OK')
class GroupChatView(TemplateView):
template_name = 'group_chat.html'
def get_context_data(self, **kwargs):
context = super(GroupChatView, self).get_context_data(**kwargs)
context.update(groups=Group.objects.all())
return context
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(GroupChatView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
redis_publisher = RedisPublisher(facility='foobar', groups=[request.POST.get('group')])
message = RedisMessage(request.POST.get('message'))
redis_publisher.publish_message(message)
return HttpResponse('OK')
| 37.163636 | 101 | 0.707436 |
from django.contrib.auth.models import User, Group
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
from ws4redis.redis_store import RedisMessage
from ws4redis.publisher import RedisPublisher
class BroadcastChatView(TemplateView):
template_name = 'broadcast_chat.html'
def get(self, request, *args, **kwargs):
welcome = RedisMessage('Hello everybody')
RedisPublisher(facility='foobar', broadcast=True).publish_message(welcome)
return super(BroadcastChatView, self).get(request, *args, **kwargs)
class UserChatView(TemplateView):
template_name = 'user_chat.html'
def get_context_data(self, **kwargs):
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=User.objects.all())
return context
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(UserChatView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
redis_publisher = RedisPublisher(facility='foobar', users=[request.POST.get('user')])
message = RedisMessage(request.POST.get('message'))
redis_publisher.publish_message(message)
return HttpResponse('OK')
class GroupChatView(TemplateView):
template_name = 'group_chat.html'
def get_context_data(self, **kwargs):
context = super(GroupChatView, self).get_context_data(**kwargs)
context.update(groups=Group.objects.all())
return context
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(GroupChatView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
redis_publisher = RedisPublisher(facility='foobar', groups=[request.POST.get('group')])
message = RedisMessage(request.POST.get('message'))
redis_publisher.publish_message(message)
return HttpResponse('OK')
| true | true |
1c3d63160ebc0604481f017ea29772c45ece71be | 19,104 | py | Python | tests/test_tamper.py | tgragnato/geneva | 2fc5b2f2f4766278902cff25af50b753d1d26a76 | [
"BSD-3-Clause"
] | 1,182 | 2019-11-15T02:56:47.000Z | 2022-03-30T16:09:04.000Z | tests/test_tamper.py | Nekotekina/geneva | 3eb6b7342f9afd7add1f4aba9e2aadf0b9a5f196 | [
"BSD-3-Clause"
] | 21 | 2019-11-15T15:08:02.000Z | 2022-01-03T16:22:45.000Z | tests/test_tamper.py | Nekotekina/geneva | 3eb6b7342f9afd7add1f4aba9e2aadf0b9a5f196 | [
"BSD-3-Clause"
] | 102 | 2019-11-15T15:01:07.000Z | 2022-03-30T13:52:47.000Z | import copy
import sys
import pytest
import random
# Include the root of the project
sys.path.append("..")
import evolve
import evaluator
import actions.strategy
import layers.packet
import actions.utils
import actions.tamper
import layers.layer
import layers.ip_layer
from scapy.all import IP, TCP, UDP, DNS, DNSQR, sr1
def test_tamper(logger):
"""
Tests tampering with replace
"""
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[TCP].flags == "R", "Tamper did not replace flags."
new_value = packet[TCP].flags
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].flags == new_value, "Replaced value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["flags"])
# Confirm tamper didn't corrupt anything in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_tamper_ip(logger):
"""
Tests tampering with IP
"""
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="src", tamper_type="replace", tamper_value="192.168.1.1", tamper_proto="IP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[IP].src == "192.168.1.1", "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, TCP, [])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, ["src"])
def test_tamper_udp(logger):
"""
Tests tampering with UDP
"""
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/UDP(sport=2222, dport=53))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="chksum", tamper_type="replace", tamper_value=4444, tamper_proto="UDP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[UDP].chksum == 4444, "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, UDP, ["chksum"])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_tamper_ip_ident(logger):
"""
Tests tampering with IP and that the checksum is correctly changed
"""
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field='id', tamper_type='replace', tamper_value=3333, tamper_proto="IP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[IP].id == 3333, "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, TCP, [])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, ["id"])
def confirm_unchanged(packet, original, protocol, changed):
"""
Checks that no other field besides the given array of changed fields
are different between these two packets.
"""
for header in packet.layers:
if packet.layers[header].protocol != protocol:
continue
for field in packet.layers[header].fields:
# Skip checking the field we just changed
if field in changed or field == "load":
continue
assert packet.get(protocol.__name__, field) == original.get(protocol.__name__, field), "Tamper changed %s field %s." % (str(protocol), field)
return True
@pytest.mark.parametrize("use_canary", [False, True], ids=["without_canary", "with_canary"])
def test_mutate(logger, use_canary):
"""
Tests the tamper 'replace' primitive.
"""
logger.setLevel("ERROR")
canary_id = None
# Create an evaluator
if use_canary:
cmd = [
"--test-type", "echo",
"--censor", "censor2",
"--log", actions.utils.CONSOLE_LOG_LEVEL,
"--no-skip-empty",
"--bad-word", "facebook",
"--output-directory", actions.utils.RUN_DIRECTORY
]
tester = evaluator.Evaluator(cmd, logger)
canary_id = evolve.run_collection_phase(logger, tester)
for _ in range(0, 25):
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R", tamper_proto="TCP")
# Test mutation 200 times to ensure it remains stable
for _ in range(0, 200):
tamper._mutate(canary_id)
tamper2 = actions.tamper.TamperAction(None)
# Confirm tamper value was properly ._fix()-ed
val = tamper.tamper_value
for _ in range(0, 5):
assert tamper.tamper_value == val, "Tamper value is not stable."
# Create a test packet to ensure the field/proto choice was safe
if random.random() < 0.5:
test_packet = layers.packet.Packet(IP()/TCP())
else:
test_packet = layers.packet.Packet(IP()/UDP())
# Check that tamper can run safely after mutation
try:
tamper.run(test_packet, logger)
except:
print(str(tamper))
raise
tamper._mutate_tamper_type()
# Test that parsing tamper works - note we have to remove the tamper{} to make a call directly using tamper's parse.
tamper2.parse(str(tamper)[7:-1], logger)
assert str(tamper2) == str(tamper)
def test_parse_parameters(logger):
"""
Tests that tamper properly rejects malformed tamper actions
"""
with pytest.raises(Exception):
actions.tamper.TamperAction().parse("this:has:too:many:parameters", logger)
with pytest.raises(Exception):
actions.tamper.TamperAction().parse("not:enough", logger)
def test_corrupt(logger):
"""
Tests the tamper 'corrupt' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="corrupt", tamper_value="R")
assert tamper.field == "flags", "Tamper action changed fields."
assert tamper.tamper_type == "corrupt", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:flags:corrupt}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
new_value = packet[TCP].flags
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].flags == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["flags"])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_add(logger):
"""
Tests the tamper 'add' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="seq", tamper_type="add", tamper_value=10)
assert tamper.field == "seq", "Tamper action changed fields."
assert tamper.tamper_type == "add", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:seq:add:10}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
new_value = packet[TCP].seq
assert new_value == 110, "Tamper did not add"
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].seq == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["seq"])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_decompress(logger):
"""
Tests the tamper 'decompress' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="qd", tamper_type="compress", tamper_value=10, tamper_proto="DNS")
assert tamper.field == "qd", "Tamper action changed fields."
assert tamper.tamper_type == "compress", "Tamper action changed types."
assert str(tamper) == "tamper{DNS:qd:compress}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(dst="8.8.8.8")/UDP(dport=53)/DNS(qd=DNSQR(qname="minghui.ca.")))
original = packet.copy()
tamper.tamper(packet, logger)
assert bytes(packet["DNS"]) == b'\x00\x00\x01\x00\x00\x02\x00\x00\x00\x00\x00\x00\x07minghui\xc0\x1a\x00\x01\x00\x01\x02ca\x00\x00\x01\x00\x01'
resp = sr1(packet.packet)
assert resp["DNS"]
assert resp["DNS"].rcode != 1
assert resp["DNSQR"]
assert resp["DNSRR"].rdata
assert confirm_unchanged(packet, original, IP, ["len"])
print(resp.summary())
packet = layers.packet.Packet(IP(dst="8.8.8.8")/UDP(dport=53)/DNS(qd=DNSQR(qname="maps.google.com")))
original = packet.copy()
tamper.tamper(packet, logger)
assert bytes(packet["DNS"]) == b'\x00\x00\x01\x00\x00\x02\x00\x00\x00\x00\x00\x00\x04maps\xc0\x17\x00\x01\x00\x01\x06google\x03com\x00\x00\x01\x00\x01'
resp = sr1(packet.packet)
assert resp["DNS"]
assert resp["DNS"].rcode != 1
assert resp["DNSQR"]
assert resp["DNSRR"].rdata
assert confirm_unchanged(packet, original, IP, ["len"])
print(resp.summary())
# Confirm this is a NOP on normal packets
packet = layers.packet.Packet(IP()/UDP())
original = packet.copy()
tamper.tamper(packet, logger)
assert packet.packet.summary() == original.packet.summary()
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, UDP, [])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_corrupt_chksum(logger):
"""
Tests the tamper 'replace' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="chksum", tamper_type="corrupt", tamper_value="R")
assert tamper.field == "chksum", "Tamper action changed checksum."
assert tamper.tamper_type == "corrupt", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:chksum:corrupt}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
# Confirm tamper actually corrupted the checksum
assert packet[TCP].chksum != 0
new_value = packet[TCP].chksum
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].chksum == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["chksum"])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_corrupt_dataofs(logger):
"""
Tests the tamper 'replace' primitive.
"""
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S", dataofs="6L"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="dataofs", tamper_type="corrupt")
tamper.tamper(packet, logger)
# Confirm tamper actually corrupted the checksum
assert packet[TCP].dataofs != "0"
new_value = packet[TCP].dataofs
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].dataofs == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["dataofs"])
# Confirm tamper didn't corrupt anything in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_replace(logger):
"""
Tests the tamper 'replace' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R")
assert tamper.field == "flags", "Tamper action changed fields."
assert tamper.tamper_type == "replace", "Tamper action changed types."
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
# Confirm tamper replaced the field it was supposed to
assert packet[TCP].flags == "R", "Tamper did not replace flags."
# Confirm tamper didn't replace anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["flags"])
# Confirm tamper didn't replace anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
# chksums must be handled specially by tamper, so run a second check on this value
tamper.field = "chksum"
tamper.tamper_value = 0x4444
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
assert packet[TCP].chksum == 0x4444, "Tamper failed to change chksum."
# Confirm tamper didn't replace anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["chksum"])
# Confirm tamper didn't replace anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_init():
"""
Tests initializing with no parameters
"""
tamper = actions.tamper.TamperAction(None)
assert tamper.field
assert tamper.tamper_proto
assert tamper.tamper_value is not None
def test_parse_flags(logger):
"""
Tests the tamper 'replace' primitive.
"""
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="FRAPUN")
assert tamper.field == "flags", "Tamper action changed checksum."
assert tamper.tamper_type == "replace", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:flags:replace:FRAPUN}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
tamper.tamper(packet, logger)
assert packet[TCP].flags == "FRAPUN", "Tamper failed to change flags."
@pytest.mark.parametrize("test_type", ["parsed", "direct"])
@pytest.mark.parametrize("value", ["EOL", "NOP", "Timestamp", "MSS", "WScale", "SAckOK", "SAck", "Timestamp", "AltChkSum", "AltChkSumOpt", "UTO"])
def test_options(logger, value, test_type):
"""
Tests tampering options
"""
if test_type == "direct":
tamper = actions.tamper.TamperAction(None, field="options-%s" % value.lower(), tamper_type="corrupt", tamper_value=bytes([12]))
else:
tamper = actions.tamper.TamperAction(None)
assert tamper.parse("TCP:options-%s:corrupt" % value.lower(), logger)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
tamper.run(packet, logger)
opts_dict_lookup = value.lower().replace(" ", "_")
for optname, optval in packet["TCP"].options:
if optname == value:
break
elif optname == layers.ip_layer.TCPLayer.options_names[opts_dict_lookup]:
break
else:
pytest.fail("Failed to find %s in options" % value)
assert len(packet["TCP"].options) == 1
raw_p = bytes(packet)
assert raw_p, "options broke scapy bytes"
p2 = layers.packet.Packet(IP(bytes(raw_p)))
assert p2.haslayer("IP")
assert p2.haslayer("TCP")
# EOLs might be added for padding, so just check >= 1
assert len(p2["TCP"].options) >= 1
for optname, optval in p2["TCP"].options:
if optname == value:
break
elif optname == layers.ip_layer.TCPLayer.options_names[opts_dict_lookup]:
break
else:
pytest.fail("Failed to find %s in options" % value)
def test_tamper_mutate_compress(logger):
"""
Tests that compress is handled right if its enabled
"""
backup = copy.deepcopy(actions.tamper.ACTIVATED_PRIMITIVES)
actions.tamper.ACTIVATED_PRIMITIVES = ["compress"]
try:
tamper = actions.tamper.TamperAction(None)
assert tamper.parse("TCP:flags:corrupt", logger)
tamper._mutate_tamper_type()
assert tamper.tamper_type == "compress"
assert tamper.tamper_proto_str == "DNS"
assert tamper.field == "qd"
packet = layers.packet.Packet(IP()/TCP()/DNS()/DNSQR())
packet2 = tamper.tamper(packet, logger)
assert packet2 == packet
finally:
actions.tamper.ACTIVATED_PRIMITIVES = backup
| 40.907923 | 155 | 0.674361 | import copy
import sys
import pytest
import random
sys.path.append("..")
import evolve
import evaluator
import actions.strategy
import layers.packet
import actions.utils
import actions.tamper
import layers.layer
import layers.ip_layer
from scapy.all import IP, TCP, UDP, DNS, DNSQR, sr1
def test_tamper(logger):
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
assert packet[TCP].flags == "R", "Tamper did not replace flags."
new_value = packet[TCP].flags
for _ in range(0, 5):
assert packet[TCP].flags == new_value, "Replaced value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["flags"])
assert confirm_unchanged(packet, original, IP, [])
def test_tamper_ip(logger):
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="src", tamper_type="replace", tamper_value="192.168.1.1", tamper_proto="IP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[IP].src == "192.168.1.1", "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, TCP, [])
assert confirm_unchanged(packet, original, IP, ["src"])
def test_tamper_udp(logger):
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/UDP(sport=2222, dport=53))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="chksum", tamper_type="replace", tamper_value=4444, tamper_proto="UDP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[UDP].chksum == 4444, "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, UDP, ["chksum"])
assert confirm_unchanged(packet, original, IP, [])
def test_tamper_ip_ident(logger):
packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field='id', tamper_type='replace', tamper_value=3333, tamper_proto="IP")
lpacket, rpacket = tamper.run(packet, logger)
assert not rpacket, "Tamper must not return right child"
assert lpacket, "Tamper must give a left child"
assert id(lpacket) == id(packet), "Tamper must edit in place"
# Confirm tamper replaced the field it was supposed to
assert packet[IP].id == 3333, "Tamper did not replace flags."
# Confirm tamper didn't corrupt anything in the TCP header
assert confirm_unchanged(packet, original, TCP, [])
assert confirm_unchanged(packet, original, IP, ["id"])
def confirm_unchanged(packet, original, protocol, changed):
for header in packet.layers:
if packet.layers[header].protocol != protocol:
continue
for field in packet.layers[header].fields:
# Skip checking the field we just changed
if field in changed or field == "load":
continue
assert packet.get(protocol.__name__, field) == original.get(protocol.__name__, field), "Tamper changed %s field %s." % (str(protocol), field)
return True
@pytest.mark.parametrize("use_canary", [False, True], ids=["without_canary", "with_canary"])
def test_mutate(logger, use_canary):
logger.setLevel("ERROR")
canary_id = None
# Create an evaluator
if use_canary:
cmd = [
"--test-type", "echo",
"--censor", "censor2",
"--log", actions.utils.CONSOLE_LOG_LEVEL,
"--no-skip-empty",
"--bad-word", "facebook",
"--output-directory", actions.utils.RUN_DIRECTORY
]
tester = evaluator.Evaluator(cmd, logger)
canary_id = evolve.run_collection_phase(logger, tester)
for _ in range(0, 25):
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R", tamper_proto="TCP")
# Test mutation 200 times to ensure it remains stable
for _ in range(0, 200):
tamper._mutate(canary_id)
tamper2 = actions.tamper.TamperAction(None)
# Confirm tamper value was properly ._fix()-ed
val = tamper.tamper_value
for _ in range(0, 5):
assert tamper.tamper_value == val, "Tamper value is not stable."
# Create a test packet to ensure the field/proto choice was safe
if random.random() < 0.5:
test_packet = layers.packet.Packet(IP()/TCP())
else:
test_packet = layers.packet.Packet(IP()/UDP())
# Check that tamper can run safely after mutation
try:
tamper.run(test_packet, logger)
except:
print(str(tamper))
raise
tamper._mutate_tamper_type()
# Test that parsing tamper works - note we have to remove the tamper{} to make a call directly using tamper's parse.
tamper2.parse(str(tamper)[7:-1], logger)
assert str(tamper2) == str(tamper)
def test_parse_parameters(logger):
with pytest.raises(Exception):
actions.tamper.TamperAction().parse("this:has:too:many:parameters", logger)
with pytest.raises(Exception):
actions.tamper.TamperAction().parse("not:enough", logger)
def test_corrupt(logger):
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="corrupt", tamper_value="R")
assert tamper.field == "flags", "Tamper action changed fields."
assert tamper.tamper_type == "corrupt", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:flags:corrupt}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
new_value = packet[TCP].flags
for _ in range(0, 5):
assert packet[TCP].flags == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["flags"])
assert confirm_unchanged(packet, original, IP, [])
def test_add(logger):
tamper = actions.tamper.TamperAction(None, field="seq", tamper_type="add", tamper_value=10)
assert tamper.field == "seq", "Tamper action changed fields."
assert tamper.tamper_type == "add", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:seq:add:10}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
new_value = packet[TCP].seq
assert new_value == 110, "Tamper did not add"
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].seq == new_value, "Corrupted value is not stable"
assert confirm_unchanged(packet, original, TCP, ["seq"])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_decompress(logger):
tamper = actions.tamper.TamperAction(None, field="qd", tamper_type="compress", tamper_value=10, tamper_proto="DNS")
assert tamper.field == "qd", "Tamper action changed fields."
assert tamper.tamper_type == "compress", "Tamper action changed types."
assert str(tamper) == "tamper{DNS:qd:compress}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(dst="8.8.8.8")/UDP(dport=53)/DNS(qd=DNSQR(qname="minghui.ca.")))
original = packet.copy()
tamper.tamper(packet, logger)
assert bytes(packet["DNS"]) == b'\x00\x00\x01\x00\x00\x02\x00\x00\x00\x00\x00\x00\x07minghui\xc0\x1a\x00\x01\x00\x01\x02ca\x00\x00\x01\x00\x01'
resp = sr1(packet.packet)
assert resp["DNS"]
assert resp["DNS"].rcode != 1
assert resp["DNSQR"]
assert resp["DNSRR"].rdata
assert confirm_unchanged(packet, original, IP, ["len"])
print(resp.summary())
packet = layers.packet.Packet(IP(dst="8.8.8.8")/UDP(dport=53)/DNS(qd=DNSQR(qname="maps.google.com")))
original = packet.copy()
tamper.tamper(packet, logger)
assert bytes(packet["DNS"]) == b'\x00\x00\x01\x00\x00\x02\x00\x00\x00\x00\x00\x00\x04maps\xc0\x17\x00\x01\x00\x01\x06google\x03com\x00\x00\x01\x00\x01'
resp = sr1(packet.packet)
assert resp["DNS"]
assert resp["DNS"].rcode != 1
assert resp["DNSQR"]
assert resp["DNSRR"].rdata
assert confirm_unchanged(packet, original, IP, ["len"])
print(resp.summary())
packet = layers.packet.Packet(IP()/UDP())
original = packet.copy()
tamper.tamper(packet, logger)
assert packet.packet.summary() == original.packet.summary()
assert confirm_unchanged(packet, original, UDP, [])
# Confirm tamper didn't corrupt anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_corrupt_chksum(logger):
tamper = actions.tamper.TamperAction(None, field="chksum", tamper_type="corrupt", tamper_value="R")
assert tamper.field == "chksum", "Tamper action changed checksum."
assert tamper.tamper_type == "corrupt", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:chksum:corrupt}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
assert packet[TCP].chksum != 0
new_value = packet[TCP].chksum
for _ in range(0, 5):
assert packet[TCP].chksum == new_value, "Corrupted value is not stable"
# Confirm tamper didn't corrupt anything else in the TCP header
assert confirm_unchanged(packet, original, TCP, ["chksum"])
assert confirm_unchanged(packet, original, IP, [])
def test_corrupt_dataofs(logger):
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S", dataofs="6L"))
original = copy.deepcopy(packet)
tamper = actions.tamper.TamperAction(None, field="dataofs", tamper_type="corrupt")
tamper.tamper(packet, logger)
# Confirm tamper actually corrupted the checksum
assert packet[TCP].dataofs != "0"
new_value = packet[TCP].dataofs
# Must run this check repeatedly - if a scapy fuzz-ed value is not properly
# ._fix()-ed, it will return different values each time it's requested
for _ in range(0, 5):
assert packet[TCP].dataofs == new_value, "Corrupted value is not stable"
assert confirm_unchanged(packet, original, TCP, ["dataofs"])
# Confirm tamper didn't corrupt anything in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_replace(logger):
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="R")
assert tamper.field == "flags", "Tamper action changed fields."
assert tamper.tamper_type == "replace", "Tamper action changed types."
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
assert packet[TCP].flags == "R", "Tamper did not replace flags."
assert confirm_unchanged(packet, original, TCP, ["flags"])
# Confirm tamper didn't replace anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
tamper.field = "chksum"
tamper.tamper_value = 0x4444
original = copy.deepcopy(packet)
tamper.tamper(packet, logger)
assert packet[TCP].chksum == 0x4444, "Tamper failed to change chksum."
assert confirm_unchanged(packet, original, TCP, ["chksum"])
# Confirm tamper didn't replace anything else in the IP header
assert confirm_unchanged(packet, original, IP, [])
def test_init():
tamper = actions.tamper.TamperAction(None)
assert tamper.field
assert tamper.tamper_proto
assert tamper.tamper_value is not None
def test_parse_flags(logger):
tamper = actions.tamper.TamperAction(None, field="flags", tamper_type="replace", tamper_value="FRAPUN")
assert tamper.field == "flags", "Tamper action changed checksum."
assert tamper.tamper_type == "replace", "Tamper action changed types."
assert str(tamper) == "tamper{TCP:flags:replace:FRAPUN}", "Tamper returned incorrect string representation: %s" % str(tamper)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
tamper.tamper(packet, logger)
assert packet[TCP].flags == "FRAPUN", "Tamper failed to change flags."
@pytest.mark.parametrize("test_type", ["parsed", "direct"])
@pytest.mark.parametrize("value", ["EOL", "NOP", "Timestamp", "MSS", "WScale", "SAckOK", "SAck", "Timestamp", "AltChkSum", "AltChkSumOpt", "UTO"])
def test_options(logger, value, test_type):
if test_type == "direct":
tamper = actions.tamper.TamperAction(None, field="options-%s" % value.lower(), tamper_type="corrupt", tamper_value=bytes([12]))
else:
tamper = actions.tamper.TamperAction(None)
assert tamper.parse("TCP:options-%s:corrupt" % value.lower(), logger)
packet = layers.packet.Packet(IP(src="127.0.0.1", dst="127.0.0.1")/TCP(sport=2222, dport=3333, seq=100, ack=100, flags="S"))
tamper.run(packet, logger)
opts_dict_lookup = value.lower().replace(" ", "_")
for optname, optval in packet["TCP"].options:
if optname == value:
break
elif optname == layers.ip_layer.TCPLayer.options_names[opts_dict_lookup]:
break
else:
pytest.fail("Failed to find %s in options" % value)
assert len(packet["TCP"].options) == 1
raw_p = bytes(packet)
assert raw_p, "options broke scapy bytes"
p2 = layers.packet.Packet(IP(bytes(raw_p)))
assert p2.haslayer("IP")
assert p2.haslayer("TCP")
assert len(p2["TCP"].options) >= 1
for optname, optval in p2["TCP"].options:
if optname == value:
break
elif optname == layers.ip_layer.TCPLayer.options_names[opts_dict_lookup]:
break
else:
pytest.fail("Failed to find %s in options" % value)
def test_tamper_mutate_compress(logger):
backup = copy.deepcopy(actions.tamper.ACTIVATED_PRIMITIVES)
actions.tamper.ACTIVATED_PRIMITIVES = ["compress"]
try:
tamper = actions.tamper.TamperAction(None)
assert tamper.parse("TCP:flags:corrupt", logger)
tamper._mutate_tamper_type()
assert tamper.tamper_type == "compress"
assert tamper.tamper_proto_str == "DNS"
assert tamper.field == "qd"
packet = layers.packet.Packet(IP()/TCP()/DNS()/DNSQR())
packet2 = tamper.tamper(packet, logger)
assert packet2 == packet
finally:
actions.tamper.ACTIVATED_PRIMITIVES = backup
| true | true |
1c3d634009757d91c5cb50e92fdaecc80830d012 | 9,568 | py | Python | predict/predict_3D.py | vcarlosrb/STRAPS-3DHumanShapePose | a62853a7c0831d5a54c56e707d231f5300d20fda | [
"MIT"
] | 1 | 2021-02-02T16:17:52.000Z | 2021-02-02T16:17:52.000Z | predict/predict_3D.py | vcarlosrb/STRAPS-3DHumanShapePose | a62853a7c0831d5a54c56e707d231f5300d20fda | [
"MIT"
] | null | null | null | predict/predict_3D.py | vcarlosrb/STRAPS-3DHumanShapePose | a62853a7c0831d5a54c56e707d231f5300d20fda | [
"MIT"
] | null | null | null | import os
import cv2
import numpy as np
import torch
from smplx.lbs import batch_rodrigues
from detectron2.config import get_cfg
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from PointRend.point_rend import add_pointrend_config
from DensePose.densepose import add_densepose_config
from bodyMeasurement.body_measurement_from_smpl import getBodyMeasurement
import config
from predict.predict_joints2D import predict_joints2D
from predict.predict_silhouette_pointrend import predict_silhouette_pointrend
from predict.predict_densepose import predict_densepose
from models.smpl_official import SMPL
from renderers.weak_perspective_pyrender_renderer import Renderer
from utils.image_utils import pad_to_square
from utils.cam_utils import orthographic_project_torch
from utils.reposed_utils import getReposedRotmats
from utils.joints2d_utils import undo_keypoint_normalisation
from utils.label_conversions import convert_multiclass_to_binary_labels, \
convert_2Djoints_to_gaussian_heatmaps
from utils.rigid_transform_utils import rot6d_to_rotmat
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
def setup_detectron2_predictors(silhouettes_from='densepose'):
# Keypoint-RCNN
kprcnn_config_file = "COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml"
kprcnn_cfg = get_cfg()
kprcnn_cfg.merge_from_file(model_zoo.get_config_file(kprcnn_config_file))
kprcnn_cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7 # set threshold for this model
kprcnn_cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(kprcnn_config_file)
kprcnn_cfg.freeze()
joints2D_predictor = DefaultPredictor(kprcnn_cfg)
if silhouettes_from == 'pointrend':
# PointRend-RCNN-R50-FPN
pointrend_config_file = "PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_coco.yaml"
pointrend_cfg = get_cfg()
add_pointrend_config(pointrend_cfg)
pointrend_cfg.merge_from_file(pointrend_config_file)
pointrend_cfg.MODEL.WEIGHTS = "checkpoints/pointrend_rcnn_R_50_fpn.pkl"
pointrend_cfg.freeze()
silhouette_predictor = DefaultPredictor(pointrend_cfg)
elif silhouettes_from == 'densepose':
# DensePose-RCNN-R101-FPN
densepose_config_file = "DensePose/configs/densepose_rcnn_R_101_FPN_s1x.yaml"
densepose_cfg = get_cfg()
add_densepose_config(densepose_cfg)
densepose_cfg.merge_from_file(densepose_config_file)
densepose_cfg.MODEL.WEIGHTS = "checkpoints/densepose_rcnn_R_101_fpn_s1x.pkl"
densepose_cfg.freeze()
silhouette_predictor = DefaultPredictor(densepose_cfg)
return joints2D_predictor, silhouette_predictor
def create_proxy_representation(silhouette,
joints2D,
in_wh,
out_wh):
silhouette = cv2.resize(silhouette, (out_wh, out_wh),
interpolation=cv2.INTER_NEAREST)
joints2D = joints2D[:, :2]
joints2D = joints2D * np.array([out_wh / float(in_wh),
out_wh / float(in_wh)])
heatmaps = convert_2Djoints_to_gaussian_heatmaps(joints2D.astype(np.int16),
out_wh)
proxy_rep = np.concatenate([silhouette[:, :, None], heatmaps], axis=-1)
proxy_rep = np.transpose(proxy_rep, [2, 0, 1]) # (C, out_wh, out_WH)
return proxy_rep
def predict_3D(input,
regressor,
device,
silhouettes_from='densepose',
proxy_rep_input_wh=512,
save_proxy_vis=True,
render_vis=True):
# Set-up proxy representation predictors.
joints2D_predictor, silhouette_predictor = setup_detectron2_predictors(silhouettes_from=silhouettes_from)
# Set-up SMPL model.
smpl = SMPL(config.SMPL_MODEL_DIR, batch_size=1).to(device)
if render_vis:
# Set-up renderer for visualisation.
wp_renderer = Renderer(resolution=(proxy_rep_input_wh, proxy_rep_input_wh))
if os.path.isdir(input):
image_fnames = [f for f in sorted(os.listdir(input)) if f.endswith('.png') or
f.endswith('.jpg')]
for fname in image_fnames:
print("Predicting on:", fname)
image = cv2.imread(os.path.join(input, fname))
# Pre-process for 2D detectors
image = pad_to_square(image)
image = cv2.resize(image, (proxy_rep_input_wh, proxy_rep_input_wh),
interpolation=cv2.INTER_LINEAR)
# Predict 2D
joints2D, joints2D_vis = predict_joints2D(image, joints2D_predictor)
if silhouettes_from == 'pointrend':
silhouette, silhouette_vis = predict_silhouette_pointrend(image,
silhouette_predictor)
elif silhouettes_from == 'densepose':
silhouette, silhouette_vis = predict_densepose(image, silhouette_predictor)
silhouette = convert_multiclass_to_binary_labels(silhouette)
# Create proxy representation
proxy_rep = create_proxy_representation(silhouette, joints2D,
in_wh=proxy_rep_input_wh,
out_wh=config.REGRESSOR_IMG_WH)
proxy_rep = proxy_rep[None, :, :, :] # add batch dimension
proxy_rep = torch.from_numpy(proxy_rep).float().to(device)
# Predict 3D
regressor.eval()
with torch.no_grad():
pred_cam_wp, pred_pose, pred_shape = regressor(proxy_rep)
# Convert pred pose to rotation matrices
if pred_pose.shape[-1] == 24 * 3:
pred_pose_rotmats = batch_rodrigues(pred_pose.contiguous().view(-1, 3))
pred_pose_rotmats = pred_pose_rotmats.view(-1, 24, 3, 3)
elif pred_pose.shape[-1] == 24 * 6:
pred_pose_rotmats = rot6d_to_rotmat(pred_pose.contiguous()).view(-1, 24, 3, 3)
pred_smpl_output = smpl(body_pose=pred_pose_rotmats[:, 1:],
global_orient=pred_pose_rotmats[:, 0].unsqueeze(1),
betas=pred_shape,
pose2rot=False)
pred_vertices = pred_smpl_output.vertices
pred_vertices2d = orthographic_project_torch(pred_vertices, pred_cam_wp)
pred_vertices2d = undo_keypoint_normalisation(pred_vertices2d,
proxy_rep_input_wh)
reposed_pose_rotmats, reposed_glob_rotmats = getReposedRotmats(1, device)
pred_reposed_smpl_output = smpl(
betas=pred_shape,
body_pose=reposed_pose_rotmats,
global_orient=reposed_glob_rotmats,
pose2rot=False
)
pred_reposed_vertices = pred_reposed_smpl_output.vertices
weight, height, chest_length, hip_length = getBodyMeasurement(pred_reposed_vertices, smpl.faces)
print("WEIGHT=>", weight)
print("HEIGHT=>", height)
print("CHEST=>", chest_length)
print("HIP=>", hip_length)
print("-------------------------------------------")
# Numpy-fying
pred_vertices = pred_vertices.cpu().detach().numpy()[0]
pred_vertices2d = pred_vertices2d.cpu().detach().numpy()[0]
pred_reposed_vertices = pred_reposed_vertices.cpu().detach().numpy()[0]
pred_cam_wp = pred_cam_wp.cpu().detach().numpy()[0]
if not os.path.isdir(os.path.join(input, 'verts_vis')):
os.makedirs(os.path.join(input, 'verts_vis'))
plt.figure()
plt.imshow(image[:,:,::-1])
plt.scatter(pred_vertices2d[:, 0], pred_vertices2d[:, 1], s=0.3)
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
plt.gca().xaxis.set_major_locator(plt.NullLocator())
plt.gca().yaxis.set_major_locator(plt.NullLocator())
plt.savefig(os.path.join(input, 'verts_vis', 'verts_'+fname))
if render_vis:
rend_img = wp_renderer.render(verts=pred_vertices, cam=pred_cam_wp, img=image)
rend_reposed_img = wp_renderer.render(verts=pred_reposed_vertices,
cam=np.array([0.8, 0., -0.2]),
angle=180,
axis=[1, 0, 0])
if not os.path.isdir(os.path.join(input, 'rend_vis')):
os.makedirs(os.path.join(input, 'rend_vis'))
cv2.imwrite(os.path.join(input, 'rend_vis', 'rend_'+fname), rend_img)
cv2.imwrite(os.path.join(input, 'rend_vis', 'reposed_'+fname), rend_reposed_img)
if save_proxy_vis:
if not os.path.isdir(os.path.join(input, 'proxy_vis')):
os.makedirs(os.path.join(input, 'proxy_vis'))
cv2.imwrite(os.path.join(input, 'proxy_vis', 'silhouette_'+fname), silhouette_vis)
cv2.imwrite(os.path.join(input, 'proxy_vis', 'joints2D_'+fname), joints2D_vis)
| 48.080402 | 112 | 0.621342 | import os
import cv2
import numpy as np
import torch
from smplx.lbs import batch_rodrigues
from detectron2.config import get_cfg
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from PointRend.point_rend import add_pointrend_config
from DensePose.densepose import add_densepose_config
from bodyMeasurement.body_measurement_from_smpl import getBodyMeasurement
import config
from predict.predict_joints2D import predict_joints2D
from predict.predict_silhouette_pointrend import predict_silhouette_pointrend
from predict.predict_densepose import predict_densepose
from models.smpl_official import SMPL
from renderers.weak_perspective_pyrender_renderer import Renderer
from utils.image_utils import pad_to_square
from utils.cam_utils import orthographic_project_torch
from utils.reposed_utils import getReposedRotmats
from utils.joints2d_utils import undo_keypoint_normalisation
from utils.label_conversions import convert_multiclass_to_binary_labels, \
convert_2Djoints_to_gaussian_heatmaps
from utils.rigid_transform_utils import rot6d_to_rotmat
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
def setup_detectron2_predictors(silhouettes_from='densepose'):
kprcnn_config_file = "COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml"
kprcnn_cfg = get_cfg()
kprcnn_cfg.merge_from_file(model_zoo.get_config_file(kprcnn_config_file))
kprcnn_cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7
kprcnn_cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(kprcnn_config_file)
kprcnn_cfg.freeze()
joints2D_predictor = DefaultPredictor(kprcnn_cfg)
if silhouettes_from == 'pointrend':
pointrend_config_file = "PointRend/configs/InstanceSegmentation/pointrend_rcnn_R_50_FPN_3x_coco.yaml"
pointrend_cfg = get_cfg()
add_pointrend_config(pointrend_cfg)
pointrend_cfg.merge_from_file(pointrend_config_file)
pointrend_cfg.MODEL.WEIGHTS = "checkpoints/pointrend_rcnn_R_50_fpn.pkl"
pointrend_cfg.freeze()
silhouette_predictor = DefaultPredictor(pointrend_cfg)
elif silhouettes_from == 'densepose':
densepose_config_file = "DensePose/configs/densepose_rcnn_R_101_FPN_s1x.yaml"
densepose_cfg = get_cfg()
add_densepose_config(densepose_cfg)
densepose_cfg.merge_from_file(densepose_config_file)
densepose_cfg.MODEL.WEIGHTS = "checkpoints/densepose_rcnn_R_101_fpn_s1x.pkl"
densepose_cfg.freeze()
silhouette_predictor = DefaultPredictor(densepose_cfg)
return joints2D_predictor, silhouette_predictor
def create_proxy_representation(silhouette,
joints2D,
in_wh,
out_wh):
silhouette = cv2.resize(silhouette, (out_wh, out_wh),
interpolation=cv2.INTER_NEAREST)
joints2D = joints2D[:, :2]
joints2D = joints2D * np.array([out_wh / float(in_wh),
out_wh / float(in_wh)])
heatmaps = convert_2Djoints_to_gaussian_heatmaps(joints2D.astype(np.int16),
out_wh)
proxy_rep = np.concatenate([silhouette[:, :, None], heatmaps], axis=-1)
proxy_rep = np.transpose(proxy_rep, [2, 0, 1])
return proxy_rep
def predict_3D(input,
regressor,
device,
silhouettes_from='densepose',
proxy_rep_input_wh=512,
save_proxy_vis=True,
render_vis=True):
joints2D_predictor, silhouette_predictor = setup_detectron2_predictors(silhouettes_from=silhouettes_from)
smpl = SMPL(config.SMPL_MODEL_DIR, batch_size=1).to(device)
if render_vis:
wp_renderer = Renderer(resolution=(proxy_rep_input_wh, proxy_rep_input_wh))
if os.path.isdir(input):
image_fnames = [f for f in sorted(os.listdir(input)) if f.endswith('.png') or
f.endswith('.jpg')]
for fname in image_fnames:
print("Predicting on:", fname)
image = cv2.imread(os.path.join(input, fname))
image = pad_to_square(image)
image = cv2.resize(image, (proxy_rep_input_wh, proxy_rep_input_wh),
interpolation=cv2.INTER_LINEAR)
joints2D, joints2D_vis = predict_joints2D(image, joints2D_predictor)
if silhouettes_from == 'pointrend':
silhouette, silhouette_vis = predict_silhouette_pointrend(image,
silhouette_predictor)
elif silhouettes_from == 'densepose':
silhouette, silhouette_vis = predict_densepose(image, silhouette_predictor)
silhouette = convert_multiclass_to_binary_labels(silhouette)
proxy_rep = create_proxy_representation(silhouette, joints2D,
in_wh=proxy_rep_input_wh,
out_wh=config.REGRESSOR_IMG_WH)
proxy_rep = proxy_rep[None, :, :, :]
proxy_rep = torch.from_numpy(proxy_rep).float().to(device)
regressor.eval()
with torch.no_grad():
pred_cam_wp, pred_pose, pred_shape = regressor(proxy_rep)
if pred_pose.shape[-1] == 24 * 3:
pred_pose_rotmats = batch_rodrigues(pred_pose.contiguous().view(-1, 3))
pred_pose_rotmats = pred_pose_rotmats.view(-1, 24, 3, 3)
elif pred_pose.shape[-1] == 24 * 6:
pred_pose_rotmats = rot6d_to_rotmat(pred_pose.contiguous()).view(-1, 24, 3, 3)
pred_smpl_output = smpl(body_pose=pred_pose_rotmats[:, 1:],
global_orient=pred_pose_rotmats[:, 0].unsqueeze(1),
betas=pred_shape,
pose2rot=False)
pred_vertices = pred_smpl_output.vertices
pred_vertices2d = orthographic_project_torch(pred_vertices, pred_cam_wp)
pred_vertices2d = undo_keypoint_normalisation(pred_vertices2d,
proxy_rep_input_wh)
reposed_pose_rotmats, reposed_glob_rotmats = getReposedRotmats(1, device)
pred_reposed_smpl_output = smpl(
betas=pred_shape,
body_pose=reposed_pose_rotmats,
global_orient=reposed_glob_rotmats,
pose2rot=False
)
pred_reposed_vertices = pred_reposed_smpl_output.vertices
weight, height, chest_length, hip_length = getBodyMeasurement(pred_reposed_vertices, smpl.faces)
print("WEIGHT=>", weight)
print("HEIGHT=>", height)
print("CHEST=>", chest_length)
print("HIP=>", hip_length)
print("-------------------------------------------")
pred_vertices = pred_vertices.cpu().detach().numpy()[0]
pred_vertices2d = pred_vertices2d.cpu().detach().numpy()[0]
pred_reposed_vertices = pred_reposed_vertices.cpu().detach().numpy()[0]
pred_cam_wp = pred_cam_wp.cpu().detach().numpy()[0]
if not os.path.isdir(os.path.join(input, 'verts_vis')):
os.makedirs(os.path.join(input, 'verts_vis'))
plt.figure()
plt.imshow(image[:,:,::-1])
plt.scatter(pred_vertices2d[:, 0], pred_vertices2d[:, 1], s=0.3)
plt.gca().set_axis_off()
plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)
plt.margins(0, 0)
plt.gca().xaxis.set_major_locator(plt.NullLocator())
plt.gca().yaxis.set_major_locator(plt.NullLocator())
plt.savefig(os.path.join(input, 'verts_vis', 'verts_'+fname))
if render_vis:
rend_img = wp_renderer.render(verts=pred_vertices, cam=pred_cam_wp, img=image)
rend_reposed_img = wp_renderer.render(verts=pred_reposed_vertices,
cam=np.array([0.8, 0., -0.2]),
angle=180,
axis=[1, 0, 0])
if not os.path.isdir(os.path.join(input, 'rend_vis')):
os.makedirs(os.path.join(input, 'rend_vis'))
cv2.imwrite(os.path.join(input, 'rend_vis', 'rend_'+fname), rend_img)
cv2.imwrite(os.path.join(input, 'rend_vis', 'reposed_'+fname), rend_reposed_img)
if save_proxy_vis:
if not os.path.isdir(os.path.join(input, 'proxy_vis')):
os.makedirs(os.path.join(input, 'proxy_vis'))
cv2.imwrite(os.path.join(input, 'proxy_vis', 'silhouette_'+fname), silhouette_vis)
cv2.imwrite(os.path.join(input, 'proxy_vis', 'joints2D_'+fname), joints2D_vis)
| true | true |
1c3d634f09c3b24055fd877e50bdd500aed84c8a | 7,102 | py | Python | denonavr/appcommand.py | prcutler/denonavr | f786dedf7a1ba3c4c2374a65737cf18721593c0c | [
"MIT"
] | 121 | 2016-11-26T14:08:08.000Z | 2021-07-14T21:14:55.000Z | denonavr/appcommand.py | prcutler/denonavr | f786dedf7a1ba3c4c2374a65737cf18721593c0c | [
"MIT"
] | 211 | 2016-12-20T17:30:32.000Z | 2021-09-21T13:36:34.000Z | denonavr/appcommand.py | prcutler/denonavr | f786dedf7a1ba3c4c2374a65737cf18721593c0c | [
"MIT"
] | 74 | 2016-12-05T23:47:49.000Z | 2021-08-10T10:21:56.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module implements foundation for AppCommand(0300) of Denon AVR receivers.
:copyright: (c) 2021 by Oliver Goetz.
:license: MIT, see LICENSE for more details.
"""
from typing import Optional, Tuple
import attr
@attr.s(auto_attribs=True, frozen=True)
class AppCommandCmdParam:
"""AppCommand param data type."""
name: str = attr.ib(converter=str)
text: str = attr.ib(converter=str, default="")
@attr.s(auto_attribs=True, frozen=True)
class AppCommandResponsePattern:
"""
AppCommand response pattern data type.
Use it to configure the search pattern in AppCommand response.
"""
update_attribute: str = attr.ib(converter=str)
add_zone: bool = attr.ib(converter=bool, default=True)
suffix: str = attr.ib(converter=str, default="")
get_xml_attribute: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
@attr.s(auto_attribs=True, frozen=True)
class AppCommandCmd:
"""AppCommand data type."""
cmd_id: str = attr.ib(converter=str)
cmd_text: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
name: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
param_list: Optional[Tuple[AppCommandCmdParam]] = attr.ib(
validator=attr.validators.optional(
attr.validators.deep_iterable(
attr.validators.instance_of(AppCommandCmdParam),
attr.validators.instance_of(tuple))),
default=None)
set_command: Optional[AppCommandCmdParam] = attr.ib(
validator=attr.validators.optional(
attr.validators.instance_of(AppCommandCmdParam)),
default=None)
response_pattern: Tuple[AppCommandResponsePattern] = attr.ib(
validator=attr.validators.deep_iterable(
attr.validators.instance_of(AppCommandResponsePattern),
attr.validators.instance_of(tuple)),
default=attr.Factory(tuple))
class AppCommands:
"""Collect known AppCommand.xml tags."""
GetAllZoneMuteStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneMuteStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_muted", add_zone=True, suffix=""),))
GetAllZonePowerStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZonePowerStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_power", add_zone=True, suffix=""),))
GetAllZoneSource = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneSource",
response_pattern=(AppCommandResponsePattern(
update_attribute="_input_func", add_zone=True, suffix="/source"),))
GetAllZoneVolume = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneVolume",
response_pattern=(AppCommandResponsePattern(
update_attribute="_volume", add_zone=True, suffix="/volume"),))
GetSurroundModeStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetSurroundModeStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_sound_mode_raw", add_zone=False,
suffix="/surround"),))
GetToneControl = AppCommandCmd(
cmd_id=1, cmd_text="GetToneControl",
response_pattern=(
AppCommandResponsePattern(
update_attribute="_tone_control_status", add_zone=False,
suffix="/status"),
AppCommandResponsePattern(
update_attribute="_tone_control_adjust", add_zone=False,
suffix="/adjust"),
AppCommandResponsePattern(
update_attribute="_bass_level", add_zone=False,
suffix="/basslevel"),
AppCommandResponsePattern(
update_attribute="_bass", add_zone=False,
suffix="/bassvalue"),
AppCommandResponsePattern(
update_attribute="_treble_level", add_zone=False,
suffix="/treblelevel"),
AppCommandResponsePattern(
update_attribute="_treble", add_zone=False,
suffix="/treblevalue")))
# Replace set command with a real command using attr.evolve
SetToneControl = AppCommandCmd(
cmd_id=1, cmd_text="SetToneControl",
set_command=AppCommandCmdParam(name="REPLACE", text="REPLACE"))
GetRenameSource = AppCommandCmd(cmd_id=1, cmd_text="GetRenameSource")
GetDeletedSource = AppCommandCmd(cmd_id=1, cmd_text="GetDeletedSource")
GetFriendlyName = AppCommandCmd(cmd_id=1, cmd_text="GetFriendlyName")
GetAudyssey = AppCommandCmd(
cmd_id=3,
name="GetAudyssey",
param_list=(
AppCommandCmdParam(name="dynamiceq"),
AppCommandCmdParam(name="reflevoffset"),
AppCommandCmdParam(name="dynamicvol"),
AppCommandCmdParam(name="multeq")),
response_pattern=(
AppCommandResponsePattern(
update_attribute="_multeq", add_zone=False,
suffix="/list/param[@name='multeq']"),
AppCommandResponsePattern(
update_attribute="_multeq_control", add_zone=False,
suffix="/list/param[@name='multeq']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_dynamiceq", add_zone=False,
suffix="/list/param[@name='dynamiceq']"),
AppCommandResponsePattern(
update_attribute="_dynamiceq_control", add_zone=False,
suffix="/list/param[@name='dynamiceq']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_reflevoffset", add_zone=False,
suffix="/list/param[@name='reflevoffset']"),
AppCommandResponsePattern(
update_attribute="_reflevoffset_control", add_zone=False,
suffix="/list/param[@name='reflevoffset']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_dynamicvol", add_zone=False,
suffix="/list/param[@name='dynamicvol']"),
AppCommandResponsePattern(
update_attribute="_dynamicvol_control", add_zone=False,
suffix="/list/param[@name='dynamicvol']",
get_xml_attribute="control")))
SetAudysseyDynamicEQ = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="dynamiceq", text="REPLACE"),))
SetAudysseyMultiEQ = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="multieq", text="REPLACE"),))
SetAudysseyReflevoffset = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="reflevoffset", text="REPLACE"),))
SetAudysseyDynamicvol = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="dynamicvol", text="REPLACE"),))
| 40.816092 | 79 | 0.648268 |
from typing import Optional, Tuple
import attr
@attr.s(auto_attribs=True, frozen=True)
class AppCommandCmdParam:
name: str = attr.ib(converter=str)
text: str = attr.ib(converter=str, default="")
@attr.s(auto_attribs=True, frozen=True)
class AppCommandResponsePattern:
update_attribute: str = attr.ib(converter=str)
add_zone: bool = attr.ib(converter=bool, default=True)
suffix: str = attr.ib(converter=str, default="")
get_xml_attribute: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
@attr.s(auto_attribs=True, frozen=True)
class AppCommandCmd:
cmd_id: str = attr.ib(converter=str)
cmd_text: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
name: Optional[str] = attr.ib(
converter=attr.converters.optional(str), default=None)
param_list: Optional[Tuple[AppCommandCmdParam]] = attr.ib(
validator=attr.validators.optional(
attr.validators.deep_iterable(
attr.validators.instance_of(AppCommandCmdParam),
attr.validators.instance_of(tuple))),
default=None)
set_command: Optional[AppCommandCmdParam] = attr.ib(
validator=attr.validators.optional(
attr.validators.instance_of(AppCommandCmdParam)),
default=None)
response_pattern: Tuple[AppCommandResponsePattern] = attr.ib(
validator=attr.validators.deep_iterable(
attr.validators.instance_of(AppCommandResponsePattern),
attr.validators.instance_of(tuple)),
default=attr.Factory(tuple))
class AppCommands:
GetAllZoneMuteStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneMuteStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_muted", add_zone=True, suffix=""),))
GetAllZonePowerStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZonePowerStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_power", add_zone=True, suffix=""),))
GetAllZoneSource = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneSource",
response_pattern=(AppCommandResponsePattern(
update_attribute="_input_func", add_zone=True, suffix="/source"),))
GetAllZoneVolume = AppCommandCmd(
cmd_id=1, cmd_text="GetAllZoneVolume",
response_pattern=(AppCommandResponsePattern(
update_attribute="_volume", add_zone=True, suffix="/volume"),))
GetSurroundModeStatus = AppCommandCmd(
cmd_id=1, cmd_text="GetSurroundModeStatus",
response_pattern=(AppCommandResponsePattern(
update_attribute="_sound_mode_raw", add_zone=False,
suffix="/surround"),))
GetToneControl = AppCommandCmd(
cmd_id=1, cmd_text="GetToneControl",
response_pattern=(
AppCommandResponsePattern(
update_attribute="_tone_control_status", add_zone=False,
suffix="/status"),
AppCommandResponsePattern(
update_attribute="_tone_control_adjust", add_zone=False,
suffix="/adjust"),
AppCommandResponsePattern(
update_attribute="_bass_level", add_zone=False,
suffix="/basslevel"),
AppCommandResponsePattern(
update_attribute="_bass", add_zone=False,
suffix="/bassvalue"),
AppCommandResponsePattern(
update_attribute="_treble_level", add_zone=False,
suffix="/treblelevel"),
AppCommandResponsePattern(
update_attribute="_treble", add_zone=False,
suffix="/treblevalue")))
SetToneControl = AppCommandCmd(
cmd_id=1, cmd_text="SetToneControl",
set_command=AppCommandCmdParam(name="REPLACE", text="REPLACE"))
GetRenameSource = AppCommandCmd(cmd_id=1, cmd_text="GetRenameSource")
GetDeletedSource = AppCommandCmd(cmd_id=1, cmd_text="GetDeletedSource")
GetFriendlyName = AppCommandCmd(cmd_id=1, cmd_text="GetFriendlyName")
GetAudyssey = AppCommandCmd(
cmd_id=3,
name="GetAudyssey",
param_list=(
AppCommandCmdParam(name="dynamiceq"),
AppCommandCmdParam(name="reflevoffset"),
AppCommandCmdParam(name="dynamicvol"),
AppCommandCmdParam(name="multeq")),
response_pattern=(
AppCommandResponsePattern(
update_attribute="_multeq", add_zone=False,
suffix="/list/param[@name='multeq']"),
AppCommandResponsePattern(
update_attribute="_multeq_control", add_zone=False,
suffix="/list/param[@name='multeq']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_dynamiceq", add_zone=False,
suffix="/list/param[@name='dynamiceq']"),
AppCommandResponsePattern(
update_attribute="_dynamiceq_control", add_zone=False,
suffix="/list/param[@name='dynamiceq']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_reflevoffset", add_zone=False,
suffix="/list/param[@name='reflevoffset']"),
AppCommandResponsePattern(
update_attribute="_reflevoffset_control", add_zone=False,
suffix="/list/param[@name='reflevoffset']",
get_xml_attribute="control"),
AppCommandResponsePattern(
update_attribute="_dynamicvol", add_zone=False,
suffix="/list/param[@name='dynamicvol']"),
AppCommandResponsePattern(
update_attribute="_dynamicvol_control", add_zone=False,
suffix="/list/param[@name='dynamicvol']",
get_xml_attribute="control")))
SetAudysseyDynamicEQ = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="dynamiceq", text="REPLACE"),))
SetAudysseyMultiEQ = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="multieq", text="REPLACE"),))
SetAudysseyReflevoffset = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="reflevoffset", text="REPLACE"),))
SetAudysseyDynamicvol = AppCommandCmd(
cmd_id=3,
name="SetAudyssey",
param_list=(AppCommandCmdParam(name="dynamicvol", text="REPLACE"),))
| true | true |
1c3d63a7d6ce9138a7acce6acf352db5b04edab3 | 873 | py | Python | deployment/generate_parameters.py | adelmer/hipay-fullservice-sdk-ios | 026ed4080b28ed4369735ef778e200f4f6579bf0 | [
"Apache-2.0"
] | null | null | null | deployment/generate_parameters.py | adelmer/hipay-fullservice-sdk-ios | 026ed4080b28ed4369735ef778e200f4f6579bf0 | [
"Apache-2.0"
] | null | null | null | deployment/generate_parameters.py | adelmer/hipay-fullservice-sdk-ios | 026ed4080b28ed4369735ef778e200f4f6579bf0 | [
"Apache-2.0"
] | null | null | null | import plistlib, sys, os
parameters = dict(
hipay=dict(
username = os.environ.get('HIPAY_FULLSERVICE_API_USERNAME', 'xxxxxx'),
password = os.environ.get('HIPAY_FULLSERVICE_API_PASSWORD', 'xxxxxx')
),
hockeyapp=dict(
app_identifier = os.environ.get('HOCKEY_APP_IDENTIFIER', 'xxxxxx'),
)
)
filename = "Example/HiPayFullservice/Resources/Parameters/parameters.plist"
path = "../" + filename
# Merge with current parameters
if os.path.isfile(path):
currentParameters = plistlib.readPlist(path)
parameters["hipay"].update(currentParameters["hipay"])
parameters["hockeyapp"].update(currentParameters["hockeyapp"])
# Save file
plistlib.writePlist(parameters, path)
sys.stdout.write("\n\nA new parameters file was created at:\n")
sys.stdout.write(filename + "\n")
sys.stdout.write("You need add your HiPay parameters in it.\n\n\n")
| 31.178571 | 78 | 0.725086 | import plistlib, sys, os
parameters = dict(
hipay=dict(
username = os.environ.get('HIPAY_FULLSERVICE_API_USERNAME', 'xxxxxx'),
password = os.environ.get('HIPAY_FULLSERVICE_API_PASSWORD', 'xxxxxx')
),
hockeyapp=dict(
app_identifier = os.environ.get('HOCKEY_APP_IDENTIFIER', 'xxxxxx'),
)
)
filename = "Example/HiPayFullservice/Resources/Parameters/parameters.plist"
path = "../" + filename
if os.path.isfile(path):
currentParameters = plistlib.readPlist(path)
parameters["hipay"].update(currentParameters["hipay"])
parameters["hockeyapp"].update(currentParameters["hockeyapp"])
plistlib.writePlist(parameters, path)
sys.stdout.write("\n\nA new parameters file was created at:\n")
sys.stdout.write(filename + "\n")
sys.stdout.write("You need add your HiPay parameters in it.\n\n\n")
| true | true |
1c3d63a82fc8b07265f2f99e702fa2ee5fb103b8 | 33,223 | py | Python | tenable/sc/asset_lists.py | mzpqnxow/pyTenable | 3e38e3fb9ecd24b888defd95784b05ad4c5474ec | [
"MIT"
] | null | null | null | tenable/sc/asset_lists.py | mzpqnxow/pyTenable | 3e38e3fb9ecd24b888defd95784b05ad4c5474ec | [
"MIT"
] | 1 | 2021-08-18T17:26:30.000Z | 2021-08-18T17:26:30.000Z | tenable/sc/asset_lists.py | bsinglet/pyTenable | b0cc6919bdc0d5864e8c6cc62ae276a5a54f31fb | [
"MIT"
] | null | null | null | '''
asset_lists
===========
The following methods allow for interaction into the Tenable.sc
:sc-api:`Assets <Asset.html>` API. These items are typically seen
under the **Assets** section of Tenable.sc.
Methods available on ``sc.asset_lists``:
.. rst-class:: hide-signature
.. autoclass:: AssetListAPI
.. automethod:: create
.. automethod:: delete
.. automethod:: details
.. automethod:: edit
.. automethod:: list
'''
from .base import SCEndpoint
from tenable.errors import UnexpectedValueError
from io import BytesIO
class AssetListAPI(SCEndpoint):
def _dynamic_rules_constructor(self, rule, sub=False):
'''
Handles expanding the tuple format into the JSON formatted request.
'''
if isinstance(rule, dict):
# if the rule is a dictionary, then simply pass it through as-is.
return rule
elif isinstance(rule, tuple):
# if the rule is a tuple, then we will want to convert it into the
# expected dictionary format.
if rule[0] in ['all', 'any']:
# if the first parameter in the tuple is either "any" or "all",
# we will then assume that this is a group of rules, and call
# the rule constructor for every subsequent parameter in the
# tuple.
resp = {
'operator': rule[0],
'children': [self._dynamic_rules_constructor(r, sub=True)
for r in rule[1:]]
}
if sub:
resp['type'] = 'group'
else:
# as the first item was _not_ "all" or "any", we're safe to
# assume that the rule is actually a rule clause. In this case
# we will want to validate the fields based on the potential
# known values that each attribute could have. The rule should
# generally be constructed in the following format:
#
# ('filterName', 'operator', 'value')
#
# or in the case of a plugin constraint, then there will be a
# fourth parameter like so:
#
# ('filterName', 'operator', 'value', int(pluginID))
# or
# ('filterName', 'operator', 'value', list(id1, id2, id3, etc.))
resp = {
'type': 'clause',
'filterName': self._check('rule:name', rule[0], str,
choices=['dns', 'exploitAvailable', 'exploitFrameworks',
'firstseen', 'mac', 'os', 'ip', 'lastseen',
'netbioshost', 'netbiosworkgroup', 'pluginid',
'plugintext', 'port', 'severity', 'sshv1', 'sshv2',
'tcpport', 'udpport', 'xref']),
'operator': self._check('rule:operator', rule[1], str,
choices=['contains', 'eq', 'lt', 'lte', 'ne', 'gt',
'gte', 'regex', 'pcre'])
}
# if the value is an integer, then we will want to ensure that
# we wrap the value within an id dictionary. This is necessary
# for pluginid and severity filter names. In all other cases
# the value should be a string.
if rule[0] in ['pluginid', 'severity']:
resp['value'] = {
'id': self._check('rule:value', rule[2], int)}
else:
resp['value'] = self._check('rule:value', rule[2], str)
# if there is a plugin constraint, then we will want to convert
# the plugin constraint into a string value. If it's a single
# plugin id, then we will simply convert from int to str. If
# a list of values is provided, then we will build a comma-delim
# string with the values that were passed.
if len(rule) == 4:
if isinstance(rule[3], int):
resp['pluginIDConstraint'] = str(rule[3])
elif isinstance(rule[3], list):
resp['pluginIDConstraint'] = ','.join(
[str(r) for r in rule[3]])
else:
raise TypeError(
'rule {} has an invalid plugin constraint.'.format(rule))
else:
raise TypeError('rules {} not a tuple or dict'.format(rule))
return resp
def _constructor(self, **kw):
'''
Handles parsing the keywords and returns a asset-list definition document
'''
if 'type' in kw:
# ensure that they type is a string and is one of the valid values.
self._check('type', kw['type'], str, choices=[
'combination', 'dnsname', 'dnsnameupload', 'dynamic',
'ldapquery', 'static', 'staticeventfilter', 'staticvulnfilter',
'templates', 'upload', 'watchlist', 'watchlisteventfilter',
'watchlistupload'])
if 'prep' in kw:
# ensure that prep is a boolean value and store the string equiv in
# the prepare parameter.
kw['prepare'] = str(self._check('prep', kw['prep'], bool)).lower()
del(kw['prep'])
if 'name' in kw:
# validate that the name param is a string
self._check('name', kw['name'], str)
if 'description' in kw:
# validate that the description param is a string
self._check('description', kw['description'], str)
if 'context' in kw:
# validate that the context param is a string
self._check('context', kw['context'], str)
if 'tags' in kw:
# validate that the tags param is a string
self._check('tags', kw['tags'], str)
if 'template' in kw:
# if the template param is an integer then convert it into a dict
# with the integer value stored in the id attribute. If the
# template attribute is a dictionary, then we will simply assume
# that the information is what we want to pass and allow through.
if isinstance(kw['template'], int):
kw['template'] = {'id': self._check('template', kw['template'], int)}
else:
self._check('template', kw['template'], dict)
if 'filename' in kw:
# Validate that the filename is a string value
self._check('filename', kw['filename'], str)
if 'fobj' in kw:
# Uploads the file object and stores the returned name in filename.
kw['filename'] = self._api.files.upload(kw['fobj'])
del(kw['fobj'])
if 'data_fields' in kw:
# validate that the data_fields parameter is a list and store it
# within the assetDataFields attribute.
kw['assetDataFields'] = self._check(
'data_fields', kw['data_fields'], list)
del(kw['data_fields'])
if 'combinations' in kw:
# if the combinations parameter is a tuple, then send the value to
# the combo_expansion method to convert the tuple to the dictionary
# equivalent. If the value is a dictionary, then simply pass the
# value as-is.
if isinstance(kw['combinations'], tuple):
kw['combinations'] = self._combo_expansion(kw['combinations'])
else:
self._check('combinations', kw['combinations'], dict)
if 'rules' in kw:
# pass the rules parameter to the dynamic rules constructor to
# convert the rules from a tuple to an expanded dictionary or just
# pass through the dictionary value if presented with a dict.
kw['rules'] = self._dynamic_rules_constructor(kw['rules'])
if 'dns_names' in kw:
# validate the dns_names parameter is a list or str value and store
# it within the definedDNSNames attribute.
if isinstance(kw['dns_names'], list):
kw['definedDNSNames'] = ','.join([
self._check('dns:item', i, str) for i in kw['dns_names']])
else:
kw['definedDNSNames'] = self._check(
'dns_names', kw['dns_names'], str)
del(kw['dns_names'])
if 'dn' in kw and 'search_string' in kw and 'ldap_id' in kw:
# if the dn, search_string, and ldap_id attributes are all defined,
# then construct the definedLDAPQuery sub-document with these fields
# and validate that they are the appropriate types.
kw['definedLDAPQuery'] = {
'searchBase': self._check('dn', kw['dn'], str),
'searchString': self._check(
'search_string', kw['search_string'], str),
'ldap': {'id': self._check('ldap_id', kw['ldap_id'], int)}
}
del(kw['dn'])
del(kw['search_string'])
del(kw['ldap_id'])
elif (('dn' in kw and ('search_string' not in kw or 'ldap_id' not in kw))
or ('search_string' in kw and ('dn' not in kw or 'ldap_id' not in kw))
or ('ldap_id' in kw and ('search_string' not in kw or 'dn' not in kw))):
raise UnexpectedValueError(
'dn, search_string, and ldap_id must all be present')
if 'ips' in kw:
# validate that ips is either a list or a string value and store the
# value as a comma-seperated string in definedIPs
if isinstance(kw['ips'], list):
kw['definedIPs'] = ','.join([self._check('ips:item', i, str)
for i in kw['ips']])
else:
kw['definedIPs'] = self._check('ips', kw['ips'], str)
del(kw['ips'])
if 'exclude_managed_ips' in kw:
# validate that exclude managed ips is a boolean value and store the
# value as a string in excludeManagedIPs
kw['excludeManagedIPs'] = str(self._check('exclude_managed_ips',
kw['exclude_managed_ips'], bool)).lower()
del(kw['exclude_managed_ips'])
if 'filters' in kw:
# validate the filters attribute is a list. For each item, we will
# want to convert any tuples to the expanded dictionaries and simply
# pass through any dictionaries.
flist = list()
for f in self._check('filters', kw['filters'], list):
if isinstance(f, tuple):
flist.append({
'filterName': self._check('filter:name', f[0], str),
'operator': self._check('filter:operator', f[1], str),
'value': self._check('filter:value', f[2], str)
})
else:
flist.append(self._check('filter', f, dict))
kw['filters'] = flist
if 'tool' in kw:
# Validate that the tools attribute is a string,
self._check('tool', kw['tool'], str)
if 'source_type' in kw:
# validate that the source_type parameter is a string and store it
# within the camelCase equiv.
kw['sourceType'] = self._check(
'source_type', kw['source_type'], str)
del(kw['source_type'])
if 'start_offset' in kw:
# validate the start offset is an integer value and store it within
# the camelCase equiv.
kw['startOffset'] = self._check(
'start_offset', kw['start_offset'], int)
del(kw['start_offset'])
if 'end_offset' in kw:
# validate that the end offset is an integer value and store it
# the camelCase equiv.
kw['endOffset'] = self._check(
'end_offset', kw['end_offset'], int)
del(kw['end_offset'])
if 'view' in kw:
# validate that the view is a string value.
self._check('view', kw['view'], str)
if 'lce_id' in kw:
# validate that the lce_id is an integer value and store it as a
# dictionary within the lce attribute.
kw['lce'] = {'id': self._check('lce_id', kw['lce_id'], int)}
del(kw['lce_id'])
if 'sort_field' in kw:
# validate that sort_field is a string value and store within the
# camelCase equiv.
kw['sortField'] = self._check('sort_field', kw['sort_field'], str)
del(kw['sort_field'])
if 'sort_dir' in kw:
# validate that sort_dir is a string value of either ASC or DESC and
# store it within the camelCase equiv.
kw['sortDir'] = self._check('sort_dir', kw['sort_dir'], str,
case='upper', choices=['ASC', 'DESC'])
del(kw['sort_dir'])
if 'scan_id' in kw:
# validate that the scan_id value is an integer and store it within
# the camelCase equiv.
kw['scanID'] = self._check('scan_id', kw['scan_id'], int)
del(kw['scan_id'])
return kw
def create(self, name, list_type, **kw):
'''
Creates an asset-list.
:sc-api:`asset-list: create <Asset.html#asset_POST>`
Args:
name (str):
The name for the asset list to create.
list_type (str):
The type of list to create. Supported values are
``combination``, ``dnsname``, ``dnsnameupload``, ``dynamic``,
``ldapquery``, ``static``, ``staticeventfilter``,
``staticvulnfilter``, ``templates``, ``upload``, ``watchlist``,
``watchlisteventfilter``, and ``watchlistupload``.
combinations (tuple, optional):
An asset combination tuple. For further information refer to
the asset combination logic described at
:mod:`tenable.sc.analysis`.
data_fields (list, optional):
A list of data fields as required for a given asset list type.
Each item within the list should be formatted in the following
way: ``{'fieldName': 'name', 'fieldValue': 'value'}``
description (str, optional):
The description for the asset list being created.
dn (str, optional):
The base DN to use for an LDAP query. Must also provide a
``search_string`` and an ``ldap_id``.
dns_names (list, optional):
When defining a DNS asset list, use this attribute to provide
the list of DNS addresses.
exclude_managed_ips (bool, optional):
Determines whether or not managed IPs should be excluded from
the asset list.
filters (list, optional):
A list of filter tuples to use when defining filtered asset
list types. Follows the same format as filters within the rest
of pyTenable.
fobj (FileObject, optional):
A file-like object to use when uploading an asset list.
ips (list, optional):
A list of IP Addresses, CIDRs, and/or IP Address ranges to use
for the purposes of a static asset list.
lce_id (int, optional):
When defining a event-based asset list, which LCE should be used
to generate the asset list query.
ldap_id (int, optional):
The numeric identifier pertaining to the LDAP server to use for
an LDAP query. must also provide a ``dn`` and a
``search_string``.
prep (bool, optional):
Should asset preparation be run after the list is created? If
unspecified, the default action is ``True``.
rules (tuple, optional):
For a dynamic asset list, the tuple definition of the rules to
determine what Ips are associated to this asset list. Rules
follow a similar pattern to the asset combination logic and
are written in a way to follow the same visual methodology as
the UI.
For example, a simple dynamic ruleset may look like:
.. code-block:: python
('any', ('dns', 'contains', 'svc.company.tld'),
('dns', 'contains', 'prod.company.tld'))
Which would match all assets with either svc.company.tld or
prod.company.tld in their DNS names. Rule gropups can be nested
as well, by supplying a new group tuple instead of a rule:
.. code-block:: python
('any', ('dns', 'contains', 'svc.company.tld'),
('dns', 'contains', 'prod.company.tld'),
('any', ('ip', 'contains', '192.168.140'),
('ip', 'contains', '192.168.141')))
In this example we have nested another group requiring that the
ip may contain either of the values in addition to any of the
DNS rules.
It's also possible to constrain the rule to a specific plugin or
plugins as well by adding a 4th element in a rule tuple.
Defining them would look like so:
.. code-block:: python
# Singular Plugin ID
('plugintext', 'contains', 'credentialed', 19506)
# Multiple Plugin IDs
('plugintext', 'contains', 'stuff', [19506, 10180])
* Available rules are ``dns``, ``exploitAvailable``,
``exploitFrameworks``, ``firstseen``, ``mac``, ``os``, ``ip``,
``lastseen``, ``netbioshost``, ``netbiosworkgroup``,
``pluginid``, ``plugintext``, ``port``, ``severity``, ``sshv1``,
``sshv2``, ``tcpport``, ``udpport``, and ``xref``.
* Available operators are ``contains``, ``eq``, ``lt``, ``lte``,
``ne``, ``gt``, ``gte``, ``regex``, ``pcre``.
* Group alauses are either ``any`` or ``all``. Any is a logical
or. All is a logical and.
scan_id (int, optional):
When defining an "individual" source_type, the numeric id of the
scan instance to base the query upon.
search_string (str, optional):
The search string to use as part of an LDAP Query. Must also
provide a ``dn`` and an ``ldap_id``.
sort_dir (str, optional):
When defining a filtered asset list type, determines the
direction of the sort to use. This field must be passed when
defining a sort_field.
sort_field (str, optional):
When defining a filtered asset list type, determines what field
to sort the resulting query on.
source_type (str, optional):
The source of the data to query from when defining a filtered
asset list type.
start_offset (int, optional):
The start offset of the filter to use when defining a filtered
asset list type.
tags (str, optional):
A tag to associate to the asset list.
template (int, optional):
The numeric id of the template to use.
tool (str, optional):
When specifying filtered asset list types, the analysis tool to
use for determining what IPs should be included within the
asset list.
view (str, optional):
When the source_type is "individual", the view defined what
subset of the data to use.
Returns:
dict: The newly created asset-list.
Examples:
>>> asset-list = sc.asset_lists.create()
'''
kw['name'] = name
kw['type'] = list_type
payload = self._constructor(**kw)
return self._api.post('asset', json=payload).json()['response']
def details(self, id, orgID, fields=None):
'''
Returns the details for a specific asset-list.
:sc-api:`asset-list: details<Asset.html#AssetRESTReference-/asset/{id}?orgID={orgID}>`
Args:
id (int): The identifier for the asset-list.
orgID (int): The organizationID for the asset-list.
fields (list, optional): A list of attributes to return.
Returns:
dict: The details of asset id.
Examples:
>>> asset_id_details = sc.asset_lists.details(1,1)
>>> pprint(asset_id_details)
'''
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str) for f in fields])
params['orgID'] = orgID
return self._api.get('asset/{}'.format(self._check('id', id,int)),params=params).json()['response']
def edit(self, id, **kw):
'''
Edits an asset-list.
:sc-api:`asset-list: edit <Asset.html#asset_id_PATCH>`
Args:
id (int):
The numeric id of the asset list to edit.
combinations (tuple, optional):
An asset combination tuple. For further information refer to
the asset combination logic described at
:mod:`tenable.sc.analysis`.
data_fields (list, optional):
A list of data fields as required for a given asset list type.
Each item within the list should be formatted in the following
way: ``{'fieldName': 'name', 'fieldValue': 'value'}``
description (str, optional):
The description for the asset list being created.
dn (str, optional):
The base DN to use for an LDAP query. Must also provide a
``search_string`` and an ``ldap_id``.
dns_names (list, optional):
When defining a DNS asset list, use this attribute to provide
the list of DNS addresses.
exclude_managed_ips (bool, optional):
Determines whether or not managed IPs should be excluded from
the asset list.
filters (list, optional):
A list of filter tuples to use when defining filtered asset
list types. Follows the same format as filters within the rest
of pyTenable.
fobj (FileObject, optional):
A file-like object to use when uploading an asset list.
ips (list, optional):
A list of IP Addresses, CIDRs, and/or IP Address ranges to use
for the purposes of a static asset list.
lce_id (int, optional):
When defining a event-based asset list, which LCE should be used
to generate the asset list query.
ldap_id (int, optional):
The numeric identifier pertaining to the LDAP server to use for
an LDAP query. must also provide a ``dn`` and a
``search_string``.
name (str, optional):
The name for the asset list to create.
prep (bool, optional):
Should asset preparation be run after the list is created? If
unspecified, the default action is ``True``.
rules (tuple, optional):
For a dynamic asset list, the tuple definition of the rules to
determine what Ips are associated to this asset list. Rules
follow a similar pattern to the asset combination logic and
are written in a way to follow the same visual methodology as
the UI.
scan_id (int, optional):
When defining an "individual" source_type, the numeric id of the
scan instance to base the query upon.
search_string (str, optional):
The search string to use as part of an LDAP Query. Must also
provide a ``dn`` and an ``ldap_id``.
sort_dir (str, optional):
When defining a filtered asset list type, determines the
direction of the sort to use. This field must be passed when
defining a sort_field.
sort_field (str, optional):
When defining a filtered asset list type, determines what field
to sort the resulting query on.
source_type (str, optional):
The source of the data to query from when defining a filtered
asset list type.
start_offset (int, optional):
The start offset of the filter to use when defining a filtered
asset list type.
tags (str, optional):
A tag to associate to the asset list.
template (int, optional):
The numeric id of the template to use.
tool (str, optional):
When specifying filtered asset list types, the analysis tool to
use for determining what IPs should be included within the
asset list.
type (str, optional):
The type of list to create. Supported values are
``combination``, ``dnsname``, ``dnsnameupload``, ``dynamic``,
``ldapquery``, ``static``, ``staticeventfilter``,
``staticvulnfilter``, ``templates``, ``upload``, ``watchlist``,
``watchlisteventfilter``, and ``watchlistupload``.
view (str, optional):
When the source_type is "individual", the view defined what
subset of the data to use.
Returns:
dict: The newly updated asset-list.
Examples:
>>> asset-list = sc.asset_lists.edit()
'''
payload = self._constructor(**kw)
return self._api.patch('asset/{}'.format(
self._check('id', id, int)), json=payload).json()['response']
def delete(self, id):
'''
Removes a asset-list.
:sc-api:`asset-list: delete <Asset.html#asset_id_DELETE>`
Args:
id (int): The numeric identifier for the asset-list to remove.
Returns:
dict: The deletion response dict
Examples:
>>> sc.asset_lists.delete(1)
'''
return self._api.delete('asset/{}'.format(
self._check('id', id, int))).json()['response']
def list(self, fields=None):
'''
Retrieves the list of asset list definitions.
:sc-api:`asset-list: list <Asset.html#AssetRESTReference-/asset>`
Args:
fields (list, optional):
A list of attributes to return for each asset-list.
Returns:
list: A list of asset-list resources.
Examples:
>>> for asset-list in sc.asset_lists.list():
... pprint(asset-list)
'''
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str)
for f in fields])
return self._api.get('asset', params=params).json()['response']
def import_definition(self, fobj, name=None):
'''
Imports an asset list definition from an asset list definition XML file.
:sc-api:`asset-list: import <Asset.html#asset_import_POST>`
Args:
name (str): The name of the asset definition to create.
fobj (FileObject):
The file-like object containing the XML definition.
Returns:
:obj:`dict`:
The created asset list from the import.
Examples:
>>> with open('example.xml', 'rb') as fobj:
... sc.asset_lists.import_definition('Example', fobj)
'''
payload = {'filename': self._api.files.upload(fobj)}
if name:
payload['name'] = self._check('name', name, str)
return self._api.post('asset/import', json=payload).json()['response']
def export_definition(self, id, fobj=None):
'''
Exports an asset list definition and stored the data in the file-like
object that was passed.
:sc-api:`asset-list: export <Asset.html#AssetRESTReference-/asset/{id}/export>`
Args:
id (int): The numeric identifier for the asset list to export.
fobj (FileObject):
The file-like object to store the asset list XML definition.
Returns:
:obj:`FileObject`:
The file-like object containing the XML definition.
Examples:
>>> with open('example.xml', 'wb') as fobj:
... sc.asset_lists.export_definition(1, fobj)
'''
resp = self._api.get('asset/{}/export'.format(
self._check('id', id, int)), stream=True)
# if no file-like object was passed, then we will instantiate a BytesIO
# object to push the file into.
if not fobj:
fobj = BytesIO()
# Lets stream the file into the file-like object...
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fobj.write(chunk)
fobj.seek(0)
resp.close()
return fobj
def refresh(self, id, org_id, *repos):
'''
Initiates an on-demand recalculation of the asset list. Note this
endpoint requires being logged in as an admin user.
:sc-api:`asset-list: refresh <Asset.html#AssetRESTReference-/asset/{id}/refresh>`
Args:
id (int): The numeric identifier of the asset list to refresh.
org_id (int): The organization associated to the asset list.
*repos (int): Repository ids to perform the recalculation on.
Returns:
:obj:`dict`:
Response of the items that the asset list is associated to.
Examples:
Perform the refresh against a single repo:
>>> sc.asset_lists.refresh(1, 1, 1)
Perform the refresh against many repos:
>>> sc.asset_lists.refresh(1, 1, 1, 2, 3)
'''
return self._api.post('asset/{}/refresh'.format(
self._check('id', id, int)), json={
'orgID': self._check('org_id', org_id, int),
'repIDs': [{'id': self._check('repo:id', i, int)} for i in repos]
}).json()['response']
def ldap_query(self, ldap_id, dn, search_string):
'''
Performs a LDAP test query on the specified LDAP service configured.
:sc-api:`asset-list: test-ldap-query <Asset.html#AssetRESTReference-/asset/testLDAPQuery>`
Args:
ldap_id (int):
The numeric identifier for the configured LDAP service.
dn (str): The valid search base to use.
search_string(str):
The search string to query the LDAP service with.
Returns:
:obj:`dict`:
The LDAP response.
Examples:
>>> resp = sc.asset_lists.ldap_query(1, 'domain.com', '*')
'''
return self._api.post('asset/testLDAPQuery', json={
'definedLDAPQuery': {
'searchBase': self._check('dn', dn, str),
'searchString': self._check('search_string', search_string, str),
'ldap': {'id': str(self._check('ldap_id', ldap_id, int))}
}}).json()['response']
def tags(self):
'''
Retrieves the list of unique tags associated to asset lists.
:sc-api:`asset-lists: tags <Asset.html#AssetRESTReference-/asset/tag>`
Returns:
:obj:`list`:
List of tags
Examples:
>>> tags = sc.asset_lists.tags()
'''
return self._api.get('asset/tag').json()['response']
def share(self, id, *groups):
'''
Shares the specified asset list to another user group.
:sc-api:`asset-lists: share <Asset.html#AssetRESTReference-/asset/{id}/share>`
Args:
id (int): The numeric id for the credential.
*groups (int): The numeric id of the group(s) to share to.
Returns:
:obj:`dict`:
The updated asset-list resource.
Examples:
>>> sc.asset_lists.share(1, group_1, group_2)
'''
return self._api.post('asset/{}/share'.format(
self._check('id', id, int)), json={
'groups': [{'id': self._check('group:id', i, int)}
for i in groups]}).json()['response']
| 43.428758 | 107 | 0.54086 | from .base import SCEndpoint
from tenable.errors import UnexpectedValueError
from io import BytesIO
class AssetListAPI(SCEndpoint):
def _dynamic_rules_constructor(self, rule, sub=False):
if isinstance(rule, dict):
return rule
elif isinstance(rule, tuple):
if rule[0] in ['all', 'any']:
resp = {
'operator': rule[0],
'children': [self._dynamic_rules_constructor(r, sub=True)
for r in rule[1:]]
}
if sub:
resp['type'] = 'group'
else:
# assume that the rule is actually a rule clause. In this case
# we will want to validate the fields based on the potential
# known values that each attribute could have. The rule should
# generally be constructed in the following format:
#
# ('filterName', 'operator', 'value')
#
# or in the case of a plugin constraint, then there will be a
# fourth parameter like so:
#
# ('filterName', 'operator', 'value', int(pluginID))
# or
# ('filterName', 'operator', 'value', list(id1, id2, id3, etc.))
resp = {
'type': 'clause',
'filterName': self._check('rule:name', rule[0], str,
choices=['dns', 'exploitAvailable', 'exploitFrameworks',
'firstseen', 'mac', 'os', 'ip', 'lastseen',
'netbioshost', 'netbiosworkgroup', 'pluginid',
'plugintext', 'port', 'severity', 'sshv1', 'sshv2',
'tcpport', 'udpport', 'xref']),
'operator': self._check('rule:operator', rule[1], str,
choices=['contains', 'eq', 'lt', 'lte', 'ne', 'gt',
'gte', 'regex', 'pcre'])
}
# if the value is an integer, then we will want to ensure that
# we wrap the value within an id dictionary. This is necessary
# for pluginid and severity filter names. In all other cases
# the value should be a string.
if rule[0] in ['pluginid', 'severity']:
resp['value'] = {
'id': self._check('rule:value', rule[2], int)}
else:
resp['value'] = self._check('rule:value', rule[2], str)
# if there is a plugin constraint, then we will want to convert
# the plugin constraint into a string value. If it's a single
if len(rule) == 4:
if isinstance(rule[3], int):
resp['pluginIDConstraint'] = str(rule[3])
elif isinstance(rule[3], list):
resp['pluginIDConstraint'] = ','.join(
[str(r) for r in rule[3]])
else:
raise TypeError(
'rule {} has an invalid plugin constraint.'.format(rule))
else:
raise TypeError('rules {} not a tuple or dict'.format(rule))
return resp
def _constructor(self, **kw):
if 'type' in kw:
self._check('type', kw['type'], str, choices=[
'combination', 'dnsname', 'dnsnameupload', 'dynamic',
'ldapquery', 'static', 'staticeventfilter', 'staticvulnfilter',
'templates', 'upload', 'watchlist', 'watchlisteventfilter',
'watchlistupload'])
if 'prep' in kw:
kw['prepare'] = str(self._check('prep', kw['prep'], bool)).lower()
del(kw['prep'])
if 'name' in kw:
self._check('name', kw['name'], str)
if 'description' in kw:
self._check('description', kw['description'], str)
if 'context' in kw:
self._check('context', kw['context'], str)
if 'tags' in kw:
self._check('tags', kw['tags'], str)
if 'template' in kw:
if isinstance(kw['template'], int):
kw['template'] = {'id': self._check('template', kw['template'], int)}
else:
self._check('template', kw['template'], dict)
if 'filename' in kw:
self._check('filename', kw['filename'], str)
if 'fobj' in kw:
kw['filename'] = self._api.files.upload(kw['fobj'])
del(kw['fobj'])
if 'data_fields' in kw:
kw['assetDataFields'] = self._check(
'data_fields', kw['data_fields'], list)
del(kw['data_fields'])
if 'combinations' in kw:
if isinstance(kw['combinations'], tuple):
kw['combinations'] = self._combo_expansion(kw['combinations'])
else:
self._check('combinations', kw['combinations'], dict)
if 'rules' in kw:
kw['rules'] = self._dynamic_rules_constructor(kw['rules'])
if 'dns_names' in kw:
if isinstance(kw['dns_names'], list):
kw['definedDNSNames'] = ','.join([
self._check('dns:item', i, str) for i in kw['dns_names']])
else:
kw['definedDNSNames'] = self._check(
'dns_names', kw['dns_names'], str)
del(kw['dns_names'])
if 'dn' in kw and 'search_string' in kw and 'ldap_id' in kw:
kw['definedLDAPQuery'] = {
'searchBase': self._check('dn', kw['dn'], str),
'searchString': self._check(
'search_string', kw['search_string'], str),
'ldap': {'id': self._check('ldap_id', kw['ldap_id'], int)}
}
del(kw['dn'])
del(kw['search_string'])
del(kw['ldap_id'])
elif (('dn' in kw and ('search_string' not in kw or 'ldap_id' not in kw))
or ('search_string' in kw and ('dn' not in kw or 'ldap_id' not in kw))
or ('ldap_id' in kw and ('search_string' not in kw or 'dn' not in kw))):
raise UnexpectedValueError(
'dn, search_string, and ldap_id must all be present')
if 'ips' in kw:
if isinstance(kw['ips'], list):
kw['definedIPs'] = ','.join([self._check('ips:item', i, str)
for i in kw['ips']])
else:
kw['definedIPs'] = self._check('ips', kw['ips'], str)
del(kw['ips'])
if 'exclude_managed_ips' in kw:
kw['excludeManagedIPs'] = str(self._check('exclude_managed_ips',
kw['exclude_managed_ips'], bool)).lower()
del(kw['exclude_managed_ips'])
if 'filters' in kw:
flist = list()
for f in self._check('filters', kw['filters'], list):
if isinstance(f, tuple):
flist.append({
'filterName': self._check('filter:name', f[0], str),
'operator': self._check('filter:operator', f[1], str),
'value': self._check('filter:value', f[2], str)
})
else:
flist.append(self._check('filter', f, dict))
kw['filters'] = flist
if 'tool' in kw:
self._check('tool', kw['tool'], str)
if 'source_type' in kw:
kw['sourceType'] = self._check(
'source_type', kw['source_type'], str)
del(kw['source_type'])
if 'start_offset' in kw:
kw['startOffset'] = self._check(
'start_offset', kw['start_offset'], int)
del(kw['start_offset'])
if 'end_offset' in kw:
kw['endOffset'] = self._check(
'end_offset', kw['end_offset'], int)
del(kw['end_offset'])
if 'view' in kw:
self._check('view', kw['view'], str)
if 'lce_id' in kw:
kw['lce'] = {'id': self._check('lce_id', kw['lce_id'], int)}
del(kw['lce_id'])
if 'sort_field' in kw:
kw['sortField'] = self._check('sort_field', kw['sort_field'], str)
del(kw['sort_field'])
if 'sort_dir' in kw:
kw['sortDir'] = self._check('sort_dir', kw['sort_dir'], str,
case='upper', choices=['ASC', 'DESC'])
del(kw['sort_dir'])
if 'scan_id' in kw:
kw['scanID'] = self._check('scan_id', kw['scan_id'], int)
del(kw['scan_id'])
return kw
def create(self, name, list_type, **kw):
kw['name'] = name
kw['type'] = list_type
payload = self._constructor(**kw)
return self._api.post('asset', json=payload).json()['response']
def details(self, id, orgID, fields=None):
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str) for f in fields])
params['orgID'] = orgID
return self._api.get('asset/{}'.format(self._check('id', id,int)),params=params).json()['response']
def edit(self, id, **kw):
payload = self._constructor(**kw)
return self._api.patch('asset/{}'.format(
self._check('id', id, int)), json=payload).json()['response']
def delete(self, id):
return self._api.delete('asset/{}'.format(
self._check('id', id, int))).json()['response']
def list(self, fields=None):
params = dict()
if fields:
params['fields'] = ','.join([self._check('field', f, str)
for f in fields])
return self._api.get('asset', params=params).json()['response']
def import_definition(self, fobj, name=None):
payload = {'filename': self._api.files.upload(fobj)}
if name:
payload['name'] = self._check('name', name, str)
return self._api.post('asset/import', json=payload).json()['response']
def export_definition(self, id, fobj=None):
resp = self._api.get('asset/{}/export'.format(
self._check('id', id, int)), stream=True)
if not fobj:
fobj = BytesIO()
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fobj.write(chunk)
fobj.seek(0)
resp.close()
return fobj
def refresh(self, id, org_id, *repos):
return self._api.post('asset/{}/refresh'.format(
self._check('id', id, int)), json={
'orgID': self._check('org_id', org_id, int),
'repIDs': [{'id': self._check('repo:id', i, int)} for i in repos]
}).json()['response']
def ldap_query(self, ldap_id, dn, search_string):
return self._api.post('asset/testLDAPQuery', json={
'definedLDAPQuery': {
'searchBase': self._check('dn', dn, str),
'searchString': self._check('search_string', search_string, str),
'ldap': {'id': str(self._check('ldap_id', ldap_id, int))}
}}).json()['response']
def tags(self):
return self._api.get('asset/tag').json()['response']
def share(self, id, *groups):
return self._api.post('asset/{}/share'.format(
self._check('id', id, int)), json={
'groups': [{'id': self._check('group:id', i, int)}
for i in groups]}).json()['response']
| true | true |
1c3d63bf838c9704d007fe35733aa5c33a7f3ab7 | 501 | py | Python | rvpy/transformations.py | TimothyKBook/distributions | 301fd61df894d4b300176e287bf9e725378c38eb | [
"MIT"
] | 1 | 2018-06-27T17:22:56.000Z | 2018-06-27T17:22:56.000Z | rvpy/transformations.py | TimothyKBook/distributions | 301fd61df894d4b300176e287bf9e725378c38eb | [
"MIT"
] | null | null | null | rvpy/transformations.py | TimothyKBook/distributions | 301fd61df894d4b300176e287bf9e725378c38eb | [
"MIT"
] | 1 | 2018-06-12T13:38:25.000Z | 2018-06-12T13:38:25.000Z | def log(rv):
"""
Returns the natural logarithm of a random variable
"""
return rv.log()
def exp(rv):
"""
Returns the exponentiation of a random variable
"""
return rv.exp()
def sqrt(rv):
"""
Returns the square root of a random variable
"""
return rv**0.5
def pow(rv, k):
"""
Returns the square of a random variable
"""
return rv**k
def abs(rv):
"""
Returns the absolute value of a random variable
"""
return rv.abs()
| 16.7 | 54 | 0.57485 | def log(rv):
return rv.log()
def exp(rv):
return rv.exp()
def sqrt(rv):
return rv**0.5
def pow(rv, k):
return rv**k
def abs(rv):
return rv.abs()
| true | true |
1c3d63f3ca3f863d5ff4c28c90d4cd50d9d151b5 | 277 | py | Python | examples/protocols/adherent/app/decorators.py | pauleveritt/viewdom_wired | 40d8f2f190a12bbd07ff957654626001a9f3a778 | [
"MIT"
] | null | null | null | examples/protocols/adherent/app/decorators.py | pauleveritt/viewdom_wired | 40d8f2f190a12bbd07ff957654626001a9f3a778 | [
"MIT"
] | null | null | null | examples/protocols/adherent/app/decorators.py | pauleveritt/viewdom_wired | 40d8f2f190a12bbd07ff957654626001a9f3a778 | [
"MIT"
] | null | null | null | from typing import Callable, Type, TypeVar
protocol = TypeVar("protocol")
def adherent(
c: Callable[[], protocol]
) -> Callable[[Type[protocol]], Type[protocol]]:
def decor(input_value: Type[protocol]) -> Type[protocol]:
return input_value
return decor
| 21.307692 | 61 | 0.68231 | from typing import Callable, Type, TypeVar
protocol = TypeVar("protocol")
def adherent(
c: Callable[[], protocol]
) -> Callable[[Type[protocol]], Type[protocol]]:
def decor(input_value: Type[protocol]) -> Type[protocol]:
return input_value
return decor
| true | true |
1c3d65c4ef650011a667d5d325efd03ccb49af27 | 206 | py | Python | app/api/__init__.py | zhoujiahua/renting | d03da54ffb0cd2da718b7bcd58f84c5b810b8b1c | [
"MIT"
] | 1 | 2021-06-17T15:48:49.000Z | 2021-06-17T15:48:49.000Z | app/api/__init__.py | zhoujiahua/renting | d03da54ffb0cd2da718b7bcd58f84c5b810b8b1c | [
"MIT"
] | null | null | null | app/api/__init__.py | zhoujiahua/renting | d03da54ffb0cd2da718b7bcd58f84c5b810b8b1c | [
"MIT"
] | null | null | null | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
from flask import Blueprint
api_comm = Blueprint("api_comm", __name__)
api_user = Blueprint("api_user", __name__)
api_spider = Blueprint("api_spider", __name__)
| 22.888889 | 46 | 0.728155 |
from flask import Blueprint
api_comm = Blueprint("api_comm", __name__)
api_user = Blueprint("api_user", __name__)
api_spider = Blueprint("api_spider", __name__)
| true | true |
1c3d65e23d66f7fb08357d67fde5433ad52fe72a | 4,870 | py | Python | simscale_sdk/models/dimensional_function_specific_turbulence_dissipation_rate.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | 8 | 2021-01-22T13:41:03.000Z | 2022-01-03T09:00:10.000Z | simscale_sdk/models/dimensional_function_specific_turbulence_dissipation_rate.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | null | null | null | simscale_sdk/models/dimensional_function_specific_turbulence_dissipation_rate.py | slainesimscale/simscale-python-sdk | db483eeabe558e55d020f5f829a3bf13c9c287a7 | [
"MIT"
] | 3 | 2021-03-18T15:52:52.000Z | 2022-01-03T08:59:30.000Z | # coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class DimensionalFunctionSpecificTurbulenceDissipationRate(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'value': 'OneOfDimensionalFunctionSpecificTurbulenceDissipationRateValue',
'unit': 'str'
}
attribute_map = {
'value': 'value',
'unit': 'unit'
}
def __init__(self, value=None, unit=None, local_vars_configuration=None): # noqa: E501
"""DimensionalFunctionSpecificTurbulenceDissipationRate - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._value = None
self._unit = None
self.discriminator = None
if value is not None:
self.value = value
self.unit = unit
@property
def value(self):
"""Gets the value of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:return: The value of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:rtype: OneOfDimensionalFunctionSpecificTurbulenceDissipationRateValue
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this DimensionalFunctionSpecificTurbulenceDissipationRate.
:param value: The value of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:type: OneOfDimensionalFunctionSpecificTurbulenceDissipationRateValue
"""
self._value = value
@property
def unit(self):
"""Gets the unit of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:return: The unit of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""Sets the unit of this DimensionalFunctionSpecificTurbulenceDissipationRate.
:param unit: The unit of this DimensionalFunctionSpecificTurbulenceDissipationRate. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and unit is None: # noqa: E501
raise ValueError("Invalid value for `unit`, must not be `None`") # noqa: E501
allowed_values = ["1/s"] # noqa: E501
if self.local_vars_configuration.client_side_validation and unit not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `unit` ({0}), must be one of {1}" # noqa: E501
.format(unit, allowed_values)
)
self._unit = unit
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DimensionalFunctionSpecificTurbulenceDissipationRate):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, DimensionalFunctionSpecificTurbulenceDissipationRate):
return True
return self.to_dict() != other.to_dict()
| 32.039474 | 109 | 0.62115 |
import pprint
import re
import six
from simscale_sdk.configuration import Configuration
class DimensionalFunctionSpecificTurbulenceDissipationRate(object):
openapi_types = {
'value': 'OneOfDimensionalFunctionSpecificTurbulenceDissipationRateValue',
'unit': 'str'
}
attribute_map = {
'value': 'value',
'unit': 'unit'
}
def __init__(self, value=None, unit=None, local_vars_configuration=None):
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._value = None
self._unit = None
self.discriminator = None
if value is not None:
self.value = value
self.unit = unit
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
@property
def unit(self):
return self._unit
@unit.setter
def unit(self, unit):
if self.local_vars_configuration.client_side_validation and unit is None:
raise ValueError("Invalid value for `unit`, must not be `None`")
allowed_values = ["1/s"]
if self.local_vars_configuration.client_side_validation and unit not in allowed_values:
raise ValueError(
"Invalid value for `unit` ({0}), must be one of {1}"
.format(unit, allowed_values)
)
self._unit = unit
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, DimensionalFunctionSpecificTurbulenceDissipationRate):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, DimensionalFunctionSpecificTurbulenceDissipationRate):
return True
return self.to_dict() != other.to_dict()
| true | true |
1c3d65e50e92bf44e266edb19001fb7e5c810282 | 911 | py | Python | sdk/python/pulumi_azure_native/softwareplan/v20191201/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/softwareplan/v20191201/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/softwareplan/v20191201/outputs.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'SkuResponse',
]
@pulumi.output_type
class SkuResponse(dict):
"""
The SKU to be applied for this resource
"""
def __init__(__self__, *,
name: Optional[str] = None):
"""
The SKU to be applied for this resource
:param str name: Name of the SKU to be applied
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the SKU to be applied
"""
return pulumi.get(self, "name")
| 23.973684 | 80 | 0.609221 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'SkuResponse',
]
@pulumi.output_type
class SkuResponse(dict):
def __init__(__self__, *,
name: Optional[str] = None):
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
| true | true |
1c3d65e7288ffeee6db6f9d23112aa55dbf96daa | 4,230 | py | Python | start_alphagoose_trainer.py | IsaiahPressman/Kaggle_Hungry_Geese | f4d9fcb0811704bd339ad5c7ff937dd0d9e25763 | [
"MIT"
] | null | null | null | start_alphagoose_trainer.py | IsaiahPressman/Kaggle_Hungry_Geese | f4d9fcb0811704bd339ad5c7ff937dd0d9e25763 | [
"MIT"
] | null | null | null | start_alphagoose_trainer.py | IsaiahPressman/Kaggle_Hungry_Geese | f4d9fcb0811704bd339ad5c7ff937dd0d9e25763 | [
"MIT"
] | null | null | null | from pathlib import Path
import torch
from torch import nn
from torchvision import transforms
from hungry_geese.training.alphagoose.alphagoose_trainer import AlphaGooseTrainer
from hungry_geese.training.alphagoose.alphagoose_data import AlphaGooseRandomReflect, ChannelShuffle, ToTensor
from hungry_geese.env import goose_env as ge
from hungry_geese.nns import models, conv_blocks
from hungry_geese.utils import format_experiment_name
if __name__ == '__main__':
DEVICE = torch.device('cuda:1')
obs_type = ge.ObsType.COMBINED_GRADIENT_OBS_LARGE
n_channels = 92
activation = nn.ReLU
normalize = False
use_mhsa = False
model_kwargs = dict(
block_class=conv_blocks.BasicConvolutionalBlock,
block_kwargs=[
dict(
in_channels=obs_type.get_obs_spec()[-3],
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=use_mhsa,
mhsa_heads=4,
),
],
squeeze_excitation=True,
cross_normalize_value=True,
use_separate_action_value_heads=True,
# **ge.RewardType.RANK_ON_DEATH.get_recommended_value_activation_scale_shift_dict()
)
model = models.FullConvActorCriticNetwork(**model_kwargs)
model.to(device=DEVICE)
optimizer = torch.optim.RMSprop(
model.parameters(),
lr=0.002,
#momentum=0.9,
#weight_decay=1e-4
)
batch_size = 2048
# NB: lr_scheduler counts steps in batches, not epochs
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer,
#milestones=[int(100000 * 512 * i / batch_size) for i in [3]],
milestones=[],
gamma=0.1
)
dataset_kwargs = dict(
dataset_dir='/home/isaiah/data/alphagoose_data',
obs_type=obs_type,
transform=transforms.Compose([
AlphaGooseRandomReflect(obs_type),
ChannelShuffle(obs_type),
ToTensor()
]),
)
dataloader_kwargs = dict(
batch_size=batch_size,
shuffle=True,
num_workers=8,
pin_memory=False
)
experiment_name = 'alphagoose_' + format_experiment_name(obs_type,
ge.RewardType.RANK_ON_DEATH,
ge.ActionMasking.OPPOSITE,
[n_channels],
model_kwargs['block_kwargs']) + '_v2'
exp_folder = Path(f'runs/alphagoose/active/{experiment_name}')
train_alg = AlphaGooseTrainer(
model=model,
optimizer=optimizer,
lr_scheduler=lr_scheduler,
dataset_kwargs=dataset_kwargs,
dataloader_kwargs=dataloader_kwargs,
n_iter_per_game=3,
delete_game_after_use=False,
device=DEVICE,
use_mixed_precision=True,
exp_folder=exp_folder,
checkpoint_freq=1,
checkpoint_render_n_games=2,
# min_saved_steps=10,
# min_saved_steps=int(5e5),
start_from_scratch=False,
)
try:
train_alg.train(n_epochs=int(1e7))
except KeyboardInterrupt:
if train_alg.epoch_counter > train_alg.checkpoint_freq:
print('KeyboardInterrupt: saving model')
train_alg.save(train_alg.exp_folder, finished=True)
| 33.84 | 110 | 0.585106 | from pathlib import Path
import torch
from torch import nn
from torchvision import transforms
from hungry_geese.training.alphagoose.alphagoose_trainer import AlphaGooseTrainer
from hungry_geese.training.alphagoose.alphagoose_data import AlphaGooseRandomReflect, ChannelShuffle, ToTensor
from hungry_geese.env import goose_env as ge
from hungry_geese.nns import models, conv_blocks
from hungry_geese.utils import format_experiment_name
if __name__ == '__main__':
DEVICE = torch.device('cuda:1')
obs_type = ge.ObsType.COMBINED_GRADIENT_OBS_LARGE
n_channels = 92
activation = nn.ReLU
normalize = False
use_mhsa = False
model_kwargs = dict(
block_class=conv_blocks.BasicConvolutionalBlock,
block_kwargs=[
dict(
in_channels=obs_type.get_obs_spec()[-3],
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=False
),
dict(
in_channels=n_channels,
out_channels=n_channels,
kernel_size=3,
activation=activation,
normalize=normalize,
use_mhsa=use_mhsa,
mhsa_heads=4,
),
],
squeeze_excitation=True,
cross_normalize_value=True,
use_separate_action_value_heads=True,
)
model = models.FullConvActorCriticNetwork(**model_kwargs)
model.to(device=DEVICE)
optimizer = torch.optim.RMSprop(
model.parameters(),
lr=0.002,
)
batch_size = 2048
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer,
milestones=[],
gamma=0.1
)
dataset_kwargs = dict(
dataset_dir='/home/isaiah/data/alphagoose_data',
obs_type=obs_type,
transform=transforms.Compose([
AlphaGooseRandomReflect(obs_type),
ChannelShuffle(obs_type),
ToTensor()
]),
)
dataloader_kwargs = dict(
batch_size=batch_size,
shuffle=True,
num_workers=8,
pin_memory=False
)
experiment_name = 'alphagoose_' + format_experiment_name(obs_type,
ge.RewardType.RANK_ON_DEATH,
ge.ActionMasking.OPPOSITE,
[n_channels],
model_kwargs['block_kwargs']) + '_v2'
exp_folder = Path(f'runs/alphagoose/active/{experiment_name}')
train_alg = AlphaGooseTrainer(
model=model,
optimizer=optimizer,
lr_scheduler=lr_scheduler,
dataset_kwargs=dataset_kwargs,
dataloader_kwargs=dataloader_kwargs,
n_iter_per_game=3,
delete_game_after_use=False,
device=DEVICE,
use_mixed_precision=True,
exp_folder=exp_folder,
checkpoint_freq=1,
checkpoint_render_n_games=2,
start_from_scratch=False,
)
try:
train_alg.train(n_epochs=int(1e7))
except KeyboardInterrupt:
if train_alg.epoch_counter > train_alg.checkpoint_freq:
print('KeyboardInterrupt: saving model')
train_alg.save(train_alg.exp_folder, finished=True)
| true | true |
1c3d666c80b918f85ea80b58fefbf91004de3fcb | 32 | py | Python | src/some_package/module_two.py | tzaffi/pydistro | 34ae5d4aab84400b86fec66f0effc290dbafcf2c | [
"MIT"
] | null | null | null | src/some_package/module_two.py | tzaffi/pydistro | 34ae5d4aab84400b86fec66f0effc290dbafcf2c | [
"MIT"
] | null | null | null | src/some_package/module_two.py | tzaffi/pydistro | 34ae5d4aab84400b86fec66f0effc290dbafcf2c | [
"MIT"
] | null | null | null | def hello2():
print("hello2")
| 10.666667 | 17 | 0.625 | def hello2():
print("hello2")
| true | true |
1c3d66c2a452f2ca03d805588f00d80297b9521d | 3,183 | py | Python | jakarta_pic_and_text_config.py | urbanriskmap/timeseries-analysis | 6b9a8d1a916ff784cb0de93d6997cd072d1ca6ae | [
"MIT"
] | null | null | null | jakarta_pic_and_text_config.py | urbanriskmap/timeseries-analysis | 6b9a8d1a916ff784cb0de93d6997cd072d1ca6ae | [
"MIT"
] | null | null | null | jakarta_pic_and_text_config.py | urbanriskmap/timeseries-analysis | 6b9a8d1a916ff784cb0de93d6997cd072d1ca6ae | [
"MIT"
] | 1 | 2019-07-15T15:19:17.000Z | 2019-07-15T15:19:17.000Z | # Jakarta config: where only those reports with images included
# import this file and then overwrite whatever you need in
# the default_config object
import logging
import pandas as pd
from sqlalchemy import create_engine
DATABASE = "cognicity"
engine = create_engine(
"postgresql://postgres:postgres@localhost:5432/"
+ DATABASE)
LOGGER = logging.getLogger()
LOGGER.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
LOG_FILENAME = ".default_jakarta.log"
fh = logging.FileHandler(LOG_FILENAME)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
LOGGER.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
LOGGER.addHandler(ch)
start_period = "'2017-01-01 00:00:35.630000-05:00'"
end_period = "'2017-03-10 00:00:35.630000-05:00'"
start_known_flood = "'2017-02-20 00:00:35.630000-05:00'"
end_known_flood = "'2017-02-23 00:00:35.630000-05:00'"
def __get_flood_pkeys(start_date, end_date, engine):
# gets the pkeys of reports during flood dates
pkeys = pd.read_sql_query(
'''
SELECT pkey, created_at FROM ''' + DATABASE + '''.all_reports WHERE
created_at > %(start_date)s::timestamptz
AND
created_at < %(end_date)s::timestamptz
AND
image_url IS NOT NULL
AND
text IS NOT null
AND
LENGTH(text) > 0
''',
params={"start_date": start_date, "end_date": end_date},
con=engine, index_col="pkey")
return set(pkeys.index)
def __get_no_flood_pkeys(start_period,
start_flood_date,
end_flood_date,
end_period,
engine):
# gets the pkeys of reports outside dates
pkeys = pd.read_sql_query(
'''
SELECT pkey,
created_at
FROM ''' + DATABASE + '''.all_reports
WHERE (
created_at > %(start_period)s::timestamptz
AND created_at < %(start_flood_date)s::timestamptz)
OR (
created_at > %(end_flood_date)s::timestamptz
AND created_at < %(end_period)s::timestamptz)
AND
image_url IS NOT NULL
AND
text IS NOT null
AND
LENGTH(text) > 0
''',
params={
"start_period": start_period,
"start_flood_date": start_flood_date,
"end_flood_date": end_flood_date,
"end_period": end_period
},
con=engine, index_col="pkey")
return set(pkeys.index)
flood_pkeys = __get_flood_pkeys(
start_known_flood,
end_known_flood,
engine)
no_flood_pkeys = __get_no_flood_pkeys(
start_period,
start_known_flood,
end_known_flood,
end_period,
engine)
config = {
"flood_pkeys": flood_pkeys,
"no_flood_pkeys": no_flood_pkeys,
"all_pkeys": flood_pkeys.union(no_flood_pkeys),
"database_engine": engine,
"database_name": DATABASE,
"location": "id",
"data_folder_prefix": "default_jakarta_data",
"logger": LOGGER
}
| 27.921053 | 75 | 0.617342 |
import logging
import pandas as pd
from sqlalchemy import create_engine
DATABASE = "cognicity"
engine = create_engine(
"postgresql://postgres:postgres@localhost:5432/"
+ DATABASE)
LOGGER = logging.getLogger()
LOGGER.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
LOG_FILENAME = ".default_jakarta.log"
fh = logging.FileHandler(LOG_FILENAME)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
LOGGER.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
LOGGER.addHandler(ch)
start_period = "'2017-01-01 00:00:35.630000-05:00'"
end_period = "'2017-03-10 00:00:35.630000-05:00'"
start_known_flood = "'2017-02-20 00:00:35.630000-05:00'"
end_known_flood = "'2017-02-23 00:00:35.630000-05:00'"
def __get_flood_pkeys(start_date, end_date, engine):
pkeys = pd.read_sql_query(
'''
SELECT pkey, created_at FROM ''' + DATABASE + '''.all_reports WHERE
created_at > %(start_date)s::timestamptz
AND
created_at < %(end_date)s::timestamptz
AND
image_url IS NOT NULL
AND
text IS NOT null
AND
LENGTH(text) > 0
''',
params={"start_date": start_date, "end_date": end_date},
con=engine, index_col="pkey")
return set(pkeys.index)
def __get_no_flood_pkeys(start_period,
start_flood_date,
end_flood_date,
end_period,
engine):
pkeys = pd.read_sql_query(
'''
SELECT pkey,
created_at
FROM ''' + DATABASE + '''.all_reports
WHERE (
created_at > %(start_period)s::timestamptz
AND created_at < %(start_flood_date)s::timestamptz)
OR (
created_at > %(end_flood_date)s::timestamptz
AND created_at < %(end_period)s::timestamptz)
AND
image_url IS NOT NULL
AND
text IS NOT null
AND
LENGTH(text) > 0
''',
params={
"start_period": start_period,
"start_flood_date": start_flood_date,
"end_flood_date": end_flood_date,
"end_period": end_period
},
con=engine, index_col="pkey")
return set(pkeys.index)
flood_pkeys = __get_flood_pkeys(
start_known_flood,
end_known_flood,
engine)
no_flood_pkeys = __get_no_flood_pkeys(
start_period,
start_known_flood,
end_known_flood,
end_period,
engine)
config = {
"flood_pkeys": flood_pkeys,
"no_flood_pkeys": no_flood_pkeys,
"all_pkeys": flood_pkeys.union(no_flood_pkeys),
"database_engine": engine,
"database_name": DATABASE,
"location": "id",
"data_folder_prefix": "default_jakarta_data",
"logger": LOGGER
}
| true | true |
1c3d6988d63989037b6c0d9cfcd5187d62d1980a | 310 | py | Python | Restful-API/shariffood/apps/rates/serializer.py | F-Shahali/web_workshop | f48648ba82e325c612e7aa496f0e1f58a020af36 | [
"MIT"
] | 80 | 2020-02-08T19:51:12.000Z | 2022-03-05T12:11:53.000Z | Restful-API/shariffood/apps/rates/serializer.py | F-Shahali/web_workshop | f48648ba82e325c612e7aa496f0e1f58a020af36 | [
"MIT"
] | 36 | 2020-01-10T10:09:27.000Z | 2022-03-02T10:52:57.000Z | Restful-API/shariffood/apps/rates/serializer.py | F-Shahali/web_workshop | f48648ba82e325c612e7aa496f0e1f58a020af36 | [
"MIT"
] | 73 | 2020-01-09T10:19:19.000Z | 2022-02-08T16:43:28.000Z | from rest_framework import serializers
from .models import Ratings
class RatingSerializer(serializers.ModelSerializer):
class Meta:
model = Ratings
fields = ['restaurant', 'score']
def validate(self, attrs):
attrs['user'] = self.context['request'].user
return attrs
| 22.142857 | 52 | 0.674194 | from rest_framework import serializers
from .models import Ratings
class RatingSerializer(serializers.ModelSerializer):
class Meta:
model = Ratings
fields = ['restaurant', 'score']
def validate(self, attrs):
attrs['user'] = self.context['request'].user
return attrs
| true | true |
1c3d69ffd5a9f7bfea8a06342a917c263b0279d9 | 926 | py | Python | codereviewr/code/models.py | brad/codereviewr | a3bd7ae868d24c2f8cafca81107bb7a46bb93b93 | [
"MIT"
] | 1 | 2016-05-08T06:16:47.000Z | 2016-05-08T06:16:47.000Z | codereviewr/code/models.py | brad/codereviewr | a3bd7ae868d24c2f8cafca81107bb7a46bb93b93 | [
"MIT"
] | null | null | null | codereviewr/code/models.py | brad/codereviewr | a3bd7ae868d24c2f8cafca81107bb7a46bb93b93 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class Code(models.Model):
"""
Core code model for code snippets
"""
title = models.CharField(max_length=200)
code = models.TextField(help_text="")
author = models.ForeignKey(User)
description = models.TextField(blank=True)
dependencies = models.CharField(blank=True, max_length=255)
# language ... get from Pygments
version = models.CharField(blank=True, max_length=100)
is_public = models.BooleanField(default=True)
created = models.DateTimeField(default=datetime.now)
updated = models.DateTimeField(blank=True, default=datetime.now)
def __unicode__(self):
return "%s by %s" % (self.title, self.author.get_full_name())
class Meta:
verbose_name_plural = 'code'
class Admin:
list_display = ('title','author','is_public','created')
| 31.931034 | 69 | 0.700864 | from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class Code(models.Model):
title = models.CharField(max_length=200)
code = models.TextField(help_text="")
author = models.ForeignKey(User)
description = models.TextField(blank=True)
dependencies = models.CharField(blank=True, max_length=255)
version = models.CharField(blank=True, max_length=100)
is_public = models.BooleanField(default=True)
created = models.DateTimeField(default=datetime.now)
updated = models.DateTimeField(blank=True, default=datetime.now)
def __unicode__(self):
return "%s by %s" % (self.title, self.author.get_full_name())
class Meta:
verbose_name_plural = 'code'
class Admin:
list_display = ('title','author','is_public','created')
| true | true |
1c3d6a39899585a2b5b7db8c7cb264ba3a0d66ef | 8,064 | py | Python | hummingbot/market/openware/lib/client.py | peatio-net/hummingbot-tunex | a0c94c46af3d40c456ec0653dbe31ea56b3431ca | [
"Apache-2.0"
] | null | null | null | hummingbot/market/openware/lib/client.py | peatio-net/hummingbot-tunex | a0c94c46af3d40c456ec0653dbe31ea56b3431ca | [
"Apache-2.0"
] | null | null | null | hummingbot/market/openware/lib/client.py | peatio-net/hummingbot-tunex | a0c94c46af3d40c456ec0653dbe31ea56b3431ca | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
import hashlib
import hmac
import requests
import time
import urllib
from operator import itemgetter
from hummingbot.market.openware.lib.helpers import date_to_milliseconds, interval_to_milliseconds
from hummingbot.market.openware.lib.exceptions import OpenwareAPIException, OpenwareRequestException, OpenwareWithdrawException
class Client(object):
ORDER_STATUS_NEW = 'NEW'
ORDER_STATUS_PARTIALLY_FILLED = 'PARTIALLY_FILLED'
ORDER_STATUS_FILLED = 'FILLED'
ORDER_STATUS_CANCELED = 'CANCELED'
ORDER_STATUS_PENDING_CANCEL = 'PENDING_CANCEL'
ORDER_STATUS_REJECTED = 'REJECTED'
ORDER_STATUS_EXPIRED = 'EXPIRED'
KLINE_INTERVAL_1MINUTE = '1m'
KLINE_INTERVAL_3MINUTE = '3m'
KLINE_INTERVAL_5MINUTE = '5m'
KLINE_INTERVAL_15MINUTE = '15m'
KLINE_INTERVAL_30MINUTE = '30m'
KLINE_INTERVAL_1HOUR = '1h'
KLINE_INTERVAL_2HOUR = '2h'
KLINE_INTERVAL_4HOUR = '4h'
KLINE_INTERVAL_6HOUR = '6h'
KLINE_INTERVAL_8HOUR = '8h'
KLINE_INTERVAL_12HOUR = '12h'
KLINE_INTERVAL_1DAY = '1d'
KLINE_INTERVAL_3DAY = '3d'
KLINE_INTERVAL_1WEEK = '1w'
KLINE_INTERVAL_1MONTH = '1M'
SIDE_BUY = 'buy'
SIDE_SELL = 'sell'
ORDER_TYPE_LIMIT = 'limit'
ORDER_TYPE_MARKET = 'market'
TIME_IN_FORCE_GTC = 'GTC' # Good till cancelled
TIME_IN_FORCE_IOC = 'IOC' # Immediate or cancel
TIME_IN_FORCE_FOK = 'FOK' # Fill or kill
ORDER_RESP_TYPE_ACK = 'ACK'
ORDER_RESP_TYPE_RESULT = 'RESULT'
ORDER_RESP_TYPE_FULL = 'FULL'
# For accessing the data returned by Client.aggregate_trades().
AGG_ID = 'a'
AGG_PRICE = 'p'
AGG_QUANTITY = 'q'
AGG_FIRST_TRADE_ID = 'f'
AGG_LAST_TRADE_ID = 'l'
AGG_TIME = 'T'
AGG_BUYER_MAKES = 'm'
AGG_BEST_MATCH = 'M'
def __init__(self, api_key, api_secret, api_url):
"""Openware API Client constructor
"""
self.api_key = api_key
self.api_secret = api_secret
self.api_url = api_url
self.session = self._init_session()
# self.diffTime = time.time() * 1000 - float(self.get_server_time())
# print('!!!! DIFF TIMESTAMP ===========')
# print(self.diffTime)
# init DNS and SSL cert
self.version()
def _init_session(self):
timestamp = str(time.time() * 1000)
signature = self._generate_signature(timestamp)
session = requests.session()
session.headers.update({'Accept': 'application/json',
'User-Agent': 'openware/python',
'X-Auth-Apikey': self.api_key,
'X-Auth-Nonce': timestamp,
'X-Auth-Signature': signature})
return session
def update_headers(self):
timestamp = str(time.time() * 1000)
signature = self._generate_signature(timestamp)
self.session.headers.update({'Accept': 'application/json',
'User-Agent': 'openware/python',
'X-Auth-Apikey': self.api_key,
'X-Auth-Nonce': timestamp,
'X-Auth-Signature': signature})
return self.session
def _create_api_uri(self, path):
return "%s%s" % (self.api_url, path)
def _generate_signature(self, timestamp):
query_string = "%s%s" % (timestamp, self.api_key)
m = hmac.new(self.api_secret.encode('utf-8'), query_string.encode('utf-8'), hashlib.sha256)
return m.hexdigest()
def _request(self, method, uri, force_params=False, **kwargs):
data = kwargs.get('data', None)
if data and isinstance(data, dict):
kwargs['data'] = data
# if get request assign data array to params value for requests lib
if data and (method == 'get' or force_params):
kwargs['params'] = kwargs['data']
del(kwargs['data'])
self.update_headers()
response = getattr(self.session, method)(uri, **kwargs)
print('!!!!!! ===== REQUEST %s %s' % (uri, method))
return self._handle_response(response)
def _request_api(self, method, path, **kwargs):
uri = self._create_api_uri(path)
return self._request(method, uri, **kwargs)
def _handle_response(self, response):
print(response.url)
print(response.status_code)
if not str(response.status_code).startswith('2'):
print(response.request.headers)
raise OpenwareAPIException(response)
try:
resp = response.json()
print('ALLLLLLLL OK')
print(response.url)
print(response.status_code)
return resp
except ValueError:
print(response.text)
raise OpenwareRequestException('Invalid Response: %s' % response.text)
def _get(self, path, **kwargs):
return self._request_api('get', path, **kwargs)
def _post(self, path, **kwargs):
return self._request_api('post', path, **kwargs)
def _put(self, path, **kwargs):
return self._request_api('put', path, **kwargs)
def _delete(self, path, **kwargs):
return self._request_api('delete', path, signed, version, **kwargs)
def version(self):
return self._get("/public/version")
def get_markets(self):
return self._get('/public/markets')
def get_currencies(self):
return self._get('/public/currencies')
def get_server_time(self):
return self._get('/public/timestamp')
def get_balances(self):
return self._get('/account/balances')
def get_trade_fee(self):
return self._get('/public/trading_fees')
async def get_my_trades(self, **params):
return self._get("/market/trades", data=params)
async def get_order_by_id(self, **params):
id = params.get('id')
result = self._get("/market/orders/{}".format(id))
return result
async def get_order(self, **params):
return self._get("/market/orders", data=params)
def get_deposit_address(self, currency):
return self._get("/account/deposit_address/%s" % currency)
def withdraw(self, **params):
return self._post("/account/withdraws", data=params)
def create_order(self, **params):
"""
Send in a new order
"""
return self._post('/market/orders', data=params)
def order_market(self, **params):
"""
Send in a new market order
"""
params.update({
'ord_type': self.ORDER_TYPE_MARKET
})
return self.create_order(**params)
def order_limit(self, **params):
"""
Send in a new market order
"""
params.update({
'ord_type': self.ORDER_TYPE_LIMIT
})
return self.create_order(**params)
def order_market_buy(self, **params):
"""
Send in a new market buy order
"""
params.update({
'side': self.SIDE_BUY
})
return self.order_market(**params)
def order_limit_buy(self, **params):
"""
Send in a new market buy order
"""
params.update({
'side': self.SIDE_BUY
})
return self.order_limit(**params)
def order_market_sell(self, **params):
"""
Send in a new market sell order
"""
params.update({
'side': self.SIDE_SELL
})
return self.order_market(**params)
def order_limit_sell(self, **params):
"""
Send in a new market sell order
"""
params.update({
'side': self.SIDE_SELL
})
return self.order_limit(**params)
async def cancel_order(self, **params):
"""
Cancel order
"""
id = params.get('id')
print('!!!!!++++ canceling order')
print('/market/orders/%s/cancel' % id)
return self._post('/market/orders/%s/cancel' % id)
| 31.377432 | 127 | 0.593998 |
import hashlib
import hmac
import requests
import time
import urllib
from operator import itemgetter
from hummingbot.market.openware.lib.helpers import date_to_milliseconds, interval_to_milliseconds
from hummingbot.market.openware.lib.exceptions import OpenwareAPIException, OpenwareRequestException, OpenwareWithdrawException
class Client(object):
ORDER_STATUS_NEW = 'NEW'
ORDER_STATUS_PARTIALLY_FILLED = 'PARTIALLY_FILLED'
ORDER_STATUS_FILLED = 'FILLED'
ORDER_STATUS_CANCELED = 'CANCELED'
ORDER_STATUS_PENDING_CANCEL = 'PENDING_CANCEL'
ORDER_STATUS_REJECTED = 'REJECTED'
ORDER_STATUS_EXPIRED = 'EXPIRED'
KLINE_INTERVAL_1MINUTE = '1m'
KLINE_INTERVAL_3MINUTE = '3m'
KLINE_INTERVAL_5MINUTE = '5m'
KLINE_INTERVAL_15MINUTE = '15m'
KLINE_INTERVAL_30MINUTE = '30m'
KLINE_INTERVAL_1HOUR = '1h'
KLINE_INTERVAL_2HOUR = '2h'
KLINE_INTERVAL_4HOUR = '4h'
KLINE_INTERVAL_6HOUR = '6h'
KLINE_INTERVAL_8HOUR = '8h'
KLINE_INTERVAL_12HOUR = '12h'
KLINE_INTERVAL_1DAY = '1d'
KLINE_INTERVAL_3DAY = '3d'
KLINE_INTERVAL_1WEEK = '1w'
KLINE_INTERVAL_1MONTH = '1M'
SIDE_BUY = 'buy'
SIDE_SELL = 'sell'
ORDER_TYPE_LIMIT = 'limit'
ORDER_TYPE_MARKET = 'market'
TIME_IN_FORCE_GTC = 'GTC'
TIME_IN_FORCE_IOC = 'IOC'
TIME_IN_FORCE_FOK = 'FOK'
ORDER_RESP_TYPE_ACK = 'ACK'
ORDER_RESP_TYPE_RESULT = 'RESULT'
ORDER_RESP_TYPE_FULL = 'FULL'
AGG_ID = 'a'
AGG_PRICE = 'p'
AGG_QUANTITY = 'q'
AGG_FIRST_TRADE_ID = 'f'
AGG_LAST_TRADE_ID = 'l'
AGG_TIME = 'T'
AGG_BUYER_MAKES = 'm'
AGG_BEST_MATCH = 'M'
def __init__(self, api_key, api_secret, api_url):
self.api_key = api_key
self.api_secret = api_secret
self.api_url = api_url
self.session = self._init_session()
self.version()
def _init_session(self):
timestamp = str(time.time() * 1000)
signature = self._generate_signature(timestamp)
session = requests.session()
session.headers.update({'Accept': 'application/json',
'User-Agent': 'openware/python',
'X-Auth-Apikey': self.api_key,
'X-Auth-Nonce': timestamp,
'X-Auth-Signature': signature})
return session
def update_headers(self):
timestamp = str(time.time() * 1000)
signature = self._generate_signature(timestamp)
self.session.headers.update({'Accept': 'application/json',
'User-Agent': 'openware/python',
'X-Auth-Apikey': self.api_key,
'X-Auth-Nonce': timestamp,
'X-Auth-Signature': signature})
return self.session
def _create_api_uri(self, path):
return "%s%s" % (self.api_url, path)
def _generate_signature(self, timestamp):
query_string = "%s%s" % (timestamp, self.api_key)
m = hmac.new(self.api_secret.encode('utf-8'), query_string.encode('utf-8'), hashlib.sha256)
return m.hexdigest()
def _request(self, method, uri, force_params=False, **kwargs):
data = kwargs.get('data', None)
if data and isinstance(data, dict):
kwargs['data'] = data
if data and (method == 'get' or force_params):
kwargs['params'] = kwargs['data']
del(kwargs['data'])
self.update_headers()
response = getattr(self.session, method)(uri, **kwargs)
print('!!!!!! ===== REQUEST %s %s' % (uri, method))
return self._handle_response(response)
def _request_api(self, method, path, **kwargs):
uri = self._create_api_uri(path)
return self._request(method, uri, **kwargs)
def _handle_response(self, response):
print(response.url)
print(response.status_code)
if not str(response.status_code).startswith('2'):
print(response.request.headers)
raise OpenwareAPIException(response)
try:
resp = response.json()
print('ALLLLLLLL OK')
print(response.url)
print(response.status_code)
return resp
except ValueError:
print(response.text)
raise OpenwareRequestException('Invalid Response: %s' % response.text)
def _get(self, path, **kwargs):
return self._request_api('get', path, **kwargs)
def _post(self, path, **kwargs):
return self._request_api('post', path, **kwargs)
def _put(self, path, **kwargs):
return self._request_api('put', path, **kwargs)
def _delete(self, path, **kwargs):
return self._request_api('delete', path, signed, version, **kwargs)
def version(self):
return self._get("/public/version")
def get_markets(self):
return self._get('/public/markets')
def get_currencies(self):
return self._get('/public/currencies')
def get_server_time(self):
return self._get('/public/timestamp')
def get_balances(self):
return self._get('/account/balances')
def get_trade_fee(self):
return self._get('/public/trading_fees')
async def get_my_trades(self, **params):
return self._get("/market/trades", data=params)
async def get_order_by_id(self, **params):
id = params.get('id')
result = self._get("/market/orders/{}".format(id))
return result
async def get_order(self, **params):
return self._get("/market/orders", data=params)
def get_deposit_address(self, currency):
return self._get("/account/deposit_address/%s" % currency)
def withdraw(self, **params):
return self._post("/account/withdraws", data=params)
def create_order(self, **params):
return self._post('/market/orders', data=params)
def order_market(self, **params):
params.update({
'ord_type': self.ORDER_TYPE_MARKET
})
return self.create_order(**params)
def order_limit(self, **params):
params.update({
'ord_type': self.ORDER_TYPE_LIMIT
})
return self.create_order(**params)
def order_market_buy(self, **params):
params.update({
'side': self.SIDE_BUY
})
return self.order_market(**params)
def order_limit_buy(self, **params):
params.update({
'side': self.SIDE_BUY
})
return self.order_limit(**params)
def order_market_sell(self, **params):
params.update({
'side': self.SIDE_SELL
})
return self.order_market(**params)
def order_limit_sell(self, **params):
params.update({
'side': self.SIDE_SELL
})
return self.order_limit(**params)
async def cancel_order(self, **params):
id = params.get('id')
print('!!!!!++++ canceling order')
print('/market/orders/%s/cancel' % id)
return self._post('/market/orders/%s/cancel' % id)
| true | true |
1c3d6a42fc6bf4312c5994225342346fe714440e | 21,139 | py | Python | httprunner/make.py | panyuan209/httprunner | d90f2b9ab06963e8efa1c327975fca5296d6bc39 | [
"Apache-2.0"
] | null | null | null | httprunner/make.py | panyuan209/httprunner | d90f2b9ab06963e8efa1c327975fca5296d6bc39 | [
"Apache-2.0"
] | null | null | null | httprunner/make.py | panyuan209/httprunner | d90f2b9ab06963e8efa1c327975fca5296d6bc39 | [
"Apache-2.0"
] | null | null | null | # 生成 pytest 测试用例文件
import os
import string
import subprocess
import sys
from typing import Text, List, Tuple, Dict, Set, NoReturn
import jinja2
from loguru import logger
from sentry_sdk import capture_exception
from httprunner import exceptions, __version__
from httprunner.compat import (
ensure_testcase_v3_api,
ensure_testcase_v3,
convert_variables,
ensure_path_sep,
)
from httprunner.loader import (
load_folder_files,
load_test_file,
load_testcase,
load_testsuite,
load_project_meta,
convert_relative_project_root_dir,
)
from httprunner.response import uniform_validator
from httprunner.utils import merge_variables, is_support_multiprocessing
""" cache converted pytest files, avoid duplicate making
"""
pytest_files_made_cache_mapping: Dict[Text, Text] = {}
""" save generated pytest files to run, except referenced testcase
"""
pytest_files_run_set: Set = set()
__TEMPLATE__ = jinja2.Template(
"""# NOTE: Generated By HttpRunner v{{ version }}
# FROM: {{ testcase_path }}
{% if imports_list and diff_levels > 0 %}
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__){% for _ in range(diff_levels) %}.parent{% endfor %}))
{% endif %}
{% if parameters %}
import pytest
from httprunner import Parameters
{% endif %}
from httprunner import HttpRunner, Config, Step, RunRequest, RunTestCase
{% for import_str in imports_list %}
{{ import_str }}
{% endfor %}
class {{ class_name }}(HttpRunner):
{% if parameters %}
@pytest.mark.parametrize("param", Parameters({{parameters}}))
def test_start(self, param):
super().test_start(param)
{% endif %}
config = {{ config_chain_style }}
teststeps = [
{% for step_chain_style in teststeps_chain_style %}
{{ step_chain_style }},
{% endfor %}
]
if __name__ == "__main__":
{{ class_name }}().test_start()
"""
)
def __ensure_absolute(path: Text) -> Text:
if path.startswith("./"):
# Linux/Darwin, hrun ./test.yml
path = path[len("./") :]
elif path.startswith(".\\"):
# Windows, hrun .\\test.yml
path = path[len(".\\") :]
path = ensure_path_sep(path)
project_meta = load_project_meta(path)
if os.path.isabs(path):
absolute_path = path
else:
absolute_path = os.path.join(project_meta.RootDir, path)
if not os.path.isfile(absolute_path):
logger.error(f"Invalid testcase file path: {absolute_path}")
sys.exit(1)
return absolute_path
def ensure_file_abs_path_valid(file_abs_path: Text) -> Text:
""" ensure file path valid for pytest, handle cases when directory name includes dot/hyphen/space
Args:
file_abs_path: absolute file path
Returns:
ensured valid absolute file path
"""
project_meta = load_project_meta(file_abs_path)
raw_abs_file_name, file_suffix = os.path.splitext(file_abs_path)
file_suffix = file_suffix.lower()
raw_file_relative_name = convert_relative_project_root_dir(raw_abs_file_name)
if raw_file_relative_name == "":
return file_abs_path
path_names = []
for name in raw_file_relative_name.rstrip(os.sep).split(os.sep):
if name[0] in string.digits:
# ensure file name not startswith digit
# 19 => T19, 2C => T2C
name = f"T{name}"
if name.startswith("."):
# avoid ".csv" been converted to "_csv"
pass
else:
# handle cases when directory name includes dot/hyphen/space
name = name.replace(" ", "_").replace(".", "_").replace("-", "_")
path_names.append(name)
new_file_path = os.path.join(
project_meta.RootDir, f"{os.sep.join(path_names)}{file_suffix}"
)
return new_file_path
def __ensure_testcase_module(path: Text) -> NoReturn:
""" ensure pytest files are in python module, generate __init__.py on demand
"""
init_file = os.path.join(os.path.dirname(path), "__init__.py")
if os.path.isfile(init_file):
return
with open(init_file, "w", encoding="utf-8") as f:
f.write("# NOTICE: Generated By HttpRunner. DO NOT EDIT!\n")
def convert_testcase_path(testcase_abs_path: Text) -> Tuple[Text, Text]:
"""convert single YAML/JSON testcase path to python file"""
testcase_new_path = ensure_file_abs_path_valid(testcase_abs_path)
dir_path = os.path.dirname(testcase_new_path)
file_name, _ = os.path.splitext(os.path.basename(testcase_new_path))
testcase_python_abs_path = os.path.join(dir_path, f"{file_name}_test.py")
# convert title case, e.g. request_with_variables => RequestWithVariables
name_in_title_case = file_name.title().replace("_", "")
return testcase_python_abs_path, name_in_title_case
def format_pytest_with_black(*python_paths: Text) -> NoReturn:
logger.info("format pytest cases with black ...")
try:
if is_support_multiprocessing() or len(python_paths) <= 1:
subprocess.run(["black", *python_paths])
else:
logger.warning(
f"this system does not support multiprocessing well, format files one by one ..."
)
[subprocess.run(["black", path]) for path in python_paths]
except subprocess.CalledProcessError as ex:
capture_exception(ex)
logger.error(ex)
sys.exit(1)
except FileNotFoundError:
err_msg = """
missing dependency tool: black
install black manually and try again:
$ pip install black
"""
logger.error(err_msg)
sys.exit(1)
def make_config_chain_style(config: Dict) -> Text:
config_chain_style = f'Config("{config["name"]}")'
if config["variables"]:
variables = config["variables"]
config_chain_style += f".variables(**{variables})"
if "base_url" in config:
config_chain_style += f'.base_url("{config["base_url"]}")'
if "verify" in config:
config_chain_style += f'.verify({config["verify"]})'
if "export" in config:
config_chain_style += f'.export(*{config["export"]})'
if "weight" in config:
config_chain_style += f'.locust_weight({config["weight"]})'
return config_chain_style
def make_request_chain_style(request: Dict) -> Text:
method = request["method"].lower()
url = request["url"]
request_chain_style = f'.{method}("{url}")'
if "params" in request:
params = request["params"]
request_chain_style += f".with_params(**{params})"
if "headers" in request:
headers = request["headers"]
request_chain_style += f".with_headers(**{headers})"
if "cookies" in request:
cookies = request["cookies"]
request_chain_style += f".with_cookies(**{cookies})"
if "data" in request:
data = request["data"]
if isinstance(data, Text):
data = f'"{data}"'
request_chain_style += f".with_data({data})"
if "json" in request:
req_json = request["json"]
if isinstance(req_json, Text):
req_json = f'"{req_json}"'
request_chain_style += f".with_json({req_json})"
if "timeout" in request:
timeout = request["timeout"]
request_chain_style += f".set_timeout({timeout})"
if "verify" in request:
verify = request["verify"]
request_chain_style += f".set_verify({verify})"
if "allow_redirects" in request:
allow_redirects = request["allow_redirects"]
request_chain_style += f".set_allow_redirects({allow_redirects})"
if "upload" in request:
upload = request["upload"]
request_chain_style += f".upload(**{upload})"
return request_chain_style
def make_teststep_chain_style(teststep: Dict) -> Text:
if teststep.get("request"):
step_info = f'RunRequest("{teststep["name"]}")'
elif teststep.get("testcase"):
step_info = f'RunTestCase("{teststep["name"]}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid teststep: {teststep}")
if "variables" in teststep:
variables = teststep["variables"]
step_info += f".with_variables(**{variables})"
if "setup_hooks" in teststep:
setup_hooks = teststep["setup_hooks"]
for hook in setup_hooks:
if isinstance(hook, Text):
step_info += f'.setup_hook("{hook}")'
elif isinstance(hook, Dict) and len(hook) == 1:
assign_var_name, hook_content = list(hook.items())[0]
step_info += f'.setup_hook("{hook_content}", "{assign_var_name}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid setup hook: {hook}")
if teststep.get("request"):
step_info += make_request_chain_style(teststep["request"])
elif teststep.get("testcase"):
testcase = teststep["testcase"]
call_ref_testcase = f".call({testcase})"
step_info += call_ref_testcase
if "teardown_hooks" in teststep:
teardown_hooks = teststep["teardown_hooks"]
for hook in teardown_hooks:
if isinstance(hook, Text):
step_info += f'.teardown_hook("{hook}")'
elif isinstance(hook, Dict) and len(hook) == 1:
assign_var_name, hook_content = list(hook.items())[0]
step_info += f'.teardown_hook("{hook_content}", "{assign_var_name}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid teardown hook: {hook}")
if "extract" in teststep:
# request step
step_info += ".extract()"
for extract_name, extract_path in teststep["extract"].items():
step_info += f""".with_jmespath('{extract_path}', '{extract_name}')"""
if "export" in teststep:
# reference testcase step
export: List[Text] = teststep["export"]
step_info += f".export(*{export})"
if "validate" in teststep:
step_info += ".validate()"
for v in teststep["validate"]:
validator = uniform_validator(v)
assert_method = validator["assert"]
check = validator["check"]
if '"' in check:
# e.g. body."user-agent" => 'body."user-agent"'
check = f"'{check}'"
else:
check = f'"{check}"'
expect = validator["expect"]
if isinstance(expect, Text):
expect = f'"{expect}"'
message = validator["message"]
if message:
step_info += f".assert_{assert_method}({check}, {expect}, '{message}')"
else:
step_info += f".assert_{assert_method}({check}, {expect})"
return f"Step({step_info})"
def make_testcase(testcase: Dict, dir_path: Text = None) -> Text:
"""convert valid testcase dict to pytest file path"""
# ensure compatibility with testcase format v2
testcase = ensure_testcase_v3(testcase)
# validate testcase format
load_testcase(testcase)
testcase_abs_path = __ensure_absolute(testcase["config"]["path"])
logger.info(f"start to make testcase: {testcase_abs_path}")
testcase_python_abs_path, testcase_cls_name = convert_testcase_path(
testcase_abs_path
)
if dir_path:
testcase_python_abs_path = os.path.join(
dir_path, os.path.basename(testcase_python_abs_path)
)
global pytest_files_made_cache_mapping
if testcase_python_abs_path in pytest_files_made_cache_mapping:
return testcase_python_abs_path
config = testcase["config"]
config["path"] = convert_relative_project_root_dir(testcase_python_abs_path)
config["variables"] = convert_variables(
config.get("variables", {}), testcase_abs_path
)
# prepare reference testcase
imports_list = []
teststeps = testcase["teststeps"]
for teststep in teststeps:
if not teststep.get("testcase"):
continue
# make ref testcase pytest file
ref_testcase_path = __ensure_absolute(teststep["testcase"])
test_content = load_test_file(ref_testcase_path)
if not isinstance(test_content, Dict):
raise exceptions.TestCaseFormatError(f"Invalid teststep: {teststep}")
# api in v2 format, convert to v3 testcase
if "request" in test_content and "name" in test_content:
test_content = ensure_testcase_v3_api(test_content)
test_content.setdefault("config", {})["path"] = ref_testcase_path
ref_testcase_python_abs_path = make_testcase(test_content)
# override testcase export
ref_testcase_export: List = test_content["config"].get("export", [])
if ref_testcase_export:
step_export: List = teststep.setdefault("export", [])
step_export.extend(ref_testcase_export)
teststep["export"] = list(set(step_export))
# prepare ref testcase class name
ref_testcase_cls_name = pytest_files_made_cache_mapping[
ref_testcase_python_abs_path
]
teststep["testcase"] = ref_testcase_cls_name
# prepare import ref testcase
ref_testcase_python_relative_path = convert_relative_project_root_dir(
ref_testcase_python_abs_path
)
ref_module_name, _ = os.path.splitext(ref_testcase_python_relative_path)
ref_module_name = ref_module_name.replace(os.sep, ".")
import_expr = f"from {ref_module_name} import TestCase{ref_testcase_cls_name} as {ref_testcase_cls_name}"
if import_expr not in imports_list:
imports_list.append(import_expr)
testcase_path = convert_relative_project_root_dir(testcase_abs_path)
# current file compared to ProjectRootDir
diff_levels = len(testcase_path.split(os.sep))
data = {
"version": __version__,
"testcase_path": testcase_path,
"diff_levels": diff_levels,
"class_name": f"TestCase{testcase_cls_name}",
"imports_list": imports_list,
"config_chain_style": make_config_chain_style(config),
"parameters": config.get("parameters"),
"teststeps_chain_style": [
make_teststep_chain_style(step) for step in teststeps
],
}
content = __TEMPLATE__.render(data)
# ensure new file's directory exists
dir_path = os.path.dirname(testcase_python_abs_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(testcase_python_abs_path, "w", encoding="utf-8") as f:
f.write(content)
pytest_files_made_cache_mapping[testcase_python_abs_path] = testcase_cls_name
__ensure_testcase_module(testcase_python_abs_path)
logger.info(f"generated testcase: {testcase_python_abs_path}")
return testcase_python_abs_path
def make_testsuite(testsuite: Dict) -> NoReturn:
"""convert valid testsuite dict to pytest folder with testcases"""
# validate testsuite format
load_testsuite(testsuite)
testsuite_config = testsuite["config"]
testsuite_path = testsuite_config["path"]
testsuite_variables = convert_variables(
testsuite_config.get("variables", {}), testsuite_path
)
logger.info(f"start to make testsuite: {testsuite_path}")
# create directory with testsuite file name, put its testcases under this directory
testsuite_path = ensure_file_abs_path_valid(testsuite_path)
testsuite_dir, file_suffix = os.path.splitext(testsuite_path)
# demo_testsuite.yml => demo_testsuite_yml
testsuite_dir = f"{testsuite_dir}_{file_suffix.lstrip('.')}"
for testcase in testsuite["testcases"]:
# get referenced testcase content
testcase_file = testcase["testcase"]
testcase_path = __ensure_absolute(testcase_file)
testcase_dict = load_test_file(testcase_path)
testcase_dict.setdefault("config", {})
testcase_dict["config"]["path"] = testcase_path
# override testcase name
testcase_dict["config"]["name"] = testcase["name"]
# override base_url
base_url = testsuite_config.get("base_url") or testcase.get("base_url")
if base_url:
testcase_dict["config"]["base_url"] = base_url
# override verify
if "verify" in testsuite_config:
testcase_dict["config"]["verify"] = testsuite_config["verify"]
# override variables
# testsuite testcase variables > testsuite config variables
testcase_variables = convert_variables(
testcase.get("variables", {}), testcase_path
)
testcase_variables = merge_variables(testcase_variables, testsuite_variables)
# testsuite testcase variables > testcase config variables
testcase_dict["config"]["variables"] = convert_variables(
testcase_dict["config"].get("variables", {}), testcase_path
)
testcase_dict["config"]["variables"].update(testcase_variables)
# override weight
if "weight" in testcase:
testcase_dict["config"]["weight"] = testcase["weight"]
# make testcase
testcase_pytest_path = make_testcase(testcase_dict, testsuite_dir)
pytest_files_run_set.add(testcase_pytest_path)
def __make(tests_path: Text) -> NoReturn:
""" make testcase(s) with testcase/testsuite/folder absolute path
generated pytest file path will be cached in pytest_files_made_cache_mapping
Args:
tests_path: should be in absolute path
"""
logger.info(f"make path: {tests_path}")
test_files = []
if os.path.isdir(tests_path):
files_list = load_folder_files(tests_path)
test_files.extend(files_list)
elif os.path.isfile(tests_path):
test_files.append(tests_path)
else:
raise exceptions.TestcaseNotFound(f"Invalid tests path: {tests_path}")
for test_file in test_files:
if test_file.lower().endswith("_test.py"):
pytest_files_run_set.add(test_file)
continue
try:
test_content = load_test_file(test_file)
except (exceptions.FileNotFound, exceptions.FileFormatError) as ex:
logger.warning(f"Invalid test file: {test_file}\n{type(ex).__name__}: {ex}")
continue
if not isinstance(test_content, Dict):
logger.warning(
f"Invalid test file: {test_file}\n"
f"reason: test content not in dict format."
)
continue
# api in v2 format, convert to v3 testcase
if "request" in test_content and "name" in test_content:
test_content = ensure_testcase_v3_api(test_content)
if "config" not in test_content:
logger.warning(
f"Invalid testcase/testsuite file: {test_file}\n"
f"reason: missing config part."
)
continue
elif not isinstance(test_content["config"], Dict):
logger.warning(
f"Invalid testcase/testsuite file: {test_file}\n"
f"reason: config should be dict type, got {test_content['config']}"
)
continue
# ensure path absolute
test_content.setdefault("config", {})["path"] = test_file
# testcase
if "teststeps" in test_content:
try:
testcase_pytest_path = make_testcase(test_content)
pytest_files_run_set.add(testcase_pytest_path)
except exceptions.TestCaseFormatError as ex:
logger.warning(
f"Invalid testcase file: {test_file}\n{type(ex).__name__}: {ex}"
)
continue
# testsuite
elif "testcases" in test_content:
try:
make_testsuite(test_content)
except exceptions.TestSuiteFormatError as ex:
logger.warning(
f"Invalid testsuite file: {test_file}\n{type(ex).__name__}: {ex}"
)
continue
# invalid format
else:
logger.warning(
f"Invalid test file: {test_file}\n"
f"reason: file content is neither testcase nor testsuite"
)
def main_make(tests_paths: List[Text]) -> List[Text]:
if not tests_paths:
return []
for tests_path in tests_paths:
tests_path = ensure_path_sep(tests_path)
if not os.path.isabs(tests_path):
tests_path = os.path.join(os.getcwd(), tests_path)
try:
__make(tests_path)
except exceptions.MyBaseError as ex:
logger.error(ex)
sys.exit(1)
# format pytest files
pytest_files_format_list = pytest_files_made_cache_mapping.keys()
format_pytest_with_black(*pytest_files_format_list)
return list(pytest_files_run_set)
def init_make_parser(subparsers):
""" make testcases: parse command line options and run commands.
"""
parser = subparsers.add_parser(
"make", help="Convert YAML/JSON testcases to pytest cases.",
)
parser.add_argument(
"testcase_path", nargs="*", help="Specify YAML/JSON testcase file/folder path"
)
return parser
| 33.930979 | 113 | 0.645111 |
import os
import string
import subprocess
import sys
from typing import Text, List, Tuple, Dict, Set, NoReturn
import jinja2
from loguru import logger
from sentry_sdk import capture_exception
from httprunner import exceptions, __version__
from httprunner.compat import (
ensure_testcase_v3_api,
ensure_testcase_v3,
convert_variables,
ensure_path_sep,
)
from httprunner.loader import (
load_folder_files,
load_test_file,
load_testcase,
load_testsuite,
load_project_meta,
convert_relative_project_root_dir,
)
from httprunner.response import uniform_validator
from httprunner.utils import merge_variables, is_support_multiprocessing
pytest_files_made_cache_mapping: Dict[Text, Text] = {}
pytest_files_run_set: Set = set()
__TEMPLATE__ = jinja2.Template(
"""# NOTE: Generated By HttpRunner v{{ version }}
# FROM: {{ testcase_path }}
{% if imports_list and diff_levels > 0 %}
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__){% for _ in range(diff_levels) %}.parent{% endfor %}))
{% endif %}
{% if parameters %}
import pytest
from httprunner import Parameters
{% endif %}
from httprunner import HttpRunner, Config, Step, RunRequest, RunTestCase
{% for import_str in imports_list %}
{{ import_str }}
{% endfor %}
class {{ class_name }}(HttpRunner):
{% if parameters %}
@pytest.mark.parametrize("param", Parameters({{parameters}}))
def test_start(self, param):
super().test_start(param)
{% endif %}
config = {{ config_chain_style }}
teststeps = [
{% for step_chain_style in teststeps_chain_style %}
{{ step_chain_style }},
{% endfor %}
]
if __name__ == "__main__":
{{ class_name }}().test_start()
"""
)
def __ensure_absolute(path: Text) -> Text:
if path.startswith("./"):
path = path[len("./") :]
elif path.startswith(".\\"):
path = path[len(".\\") :]
path = ensure_path_sep(path)
project_meta = load_project_meta(path)
if os.path.isabs(path):
absolute_path = path
else:
absolute_path = os.path.join(project_meta.RootDir, path)
if not os.path.isfile(absolute_path):
logger.error(f"Invalid testcase file path: {absolute_path}")
sys.exit(1)
return absolute_path
def ensure_file_abs_path_valid(file_abs_path: Text) -> Text:
project_meta = load_project_meta(file_abs_path)
raw_abs_file_name, file_suffix = os.path.splitext(file_abs_path)
file_suffix = file_suffix.lower()
raw_file_relative_name = convert_relative_project_root_dir(raw_abs_file_name)
if raw_file_relative_name == "":
return file_abs_path
path_names = []
for name in raw_file_relative_name.rstrip(os.sep).split(os.sep):
if name[0] in string.digits:
name = f"T{name}"
if name.startswith("."):
pass
else:
name = name.replace(" ", "_").replace(".", "_").replace("-", "_")
path_names.append(name)
new_file_path = os.path.join(
project_meta.RootDir, f"{os.sep.join(path_names)}{file_suffix}"
)
return new_file_path
def __ensure_testcase_module(path: Text) -> NoReturn:
init_file = os.path.join(os.path.dirname(path), "__init__.py")
if os.path.isfile(init_file):
return
with open(init_file, "w", encoding="utf-8") as f:
f.write("# NOTICE: Generated By HttpRunner. DO NOT EDIT!\n")
def convert_testcase_path(testcase_abs_path: Text) -> Tuple[Text, Text]:
testcase_new_path = ensure_file_abs_path_valid(testcase_abs_path)
dir_path = os.path.dirname(testcase_new_path)
file_name, _ = os.path.splitext(os.path.basename(testcase_new_path))
testcase_python_abs_path = os.path.join(dir_path, f"{file_name}_test.py")
name_in_title_case = file_name.title().replace("_", "")
return testcase_python_abs_path, name_in_title_case
def format_pytest_with_black(*python_paths: Text) -> NoReturn:
logger.info("format pytest cases with black ...")
try:
if is_support_multiprocessing() or len(python_paths) <= 1:
subprocess.run(["black", *python_paths])
else:
logger.warning(
f"this system does not support multiprocessing well, format files one by one ..."
)
[subprocess.run(["black", path]) for path in python_paths]
except subprocess.CalledProcessError as ex:
capture_exception(ex)
logger.error(ex)
sys.exit(1)
except FileNotFoundError:
err_msg = """
missing dependency tool: black
install black manually and try again:
$ pip install black
"""
logger.error(err_msg)
sys.exit(1)
def make_config_chain_style(config: Dict) -> Text:
config_chain_style = f'Config("{config["name"]}")'
if config["variables"]:
variables = config["variables"]
config_chain_style += f".variables(**{variables})"
if "base_url" in config:
config_chain_style += f'.base_url("{config["base_url"]}")'
if "verify" in config:
config_chain_style += f'.verify({config["verify"]})'
if "export" in config:
config_chain_style += f'.export(*{config["export"]})'
if "weight" in config:
config_chain_style += f'.locust_weight({config["weight"]})'
return config_chain_style
def make_request_chain_style(request: Dict) -> Text:
method = request["method"].lower()
url = request["url"]
request_chain_style = f'.{method}("{url}")'
if "params" in request:
params = request["params"]
request_chain_style += f".with_params(**{params})"
if "headers" in request:
headers = request["headers"]
request_chain_style += f".with_headers(**{headers})"
if "cookies" in request:
cookies = request["cookies"]
request_chain_style += f".with_cookies(**{cookies})"
if "data" in request:
data = request["data"]
if isinstance(data, Text):
data = f'"{data}"'
request_chain_style += f".with_data({data})"
if "json" in request:
req_json = request["json"]
if isinstance(req_json, Text):
req_json = f'"{req_json}"'
request_chain_style += f".with_json({req_json})"
if "timeout" in request:
timeout = request["timeout"]
request_chain_style += f".set_timeout({timeout})"
if "verify" in request:
verify = request["verify"]
request_chain_style += f".set_verify({verify})"
if "allow_redirects" in request:
allow_redirects = request["allow_redirects"]
request_chain_style += f".set_allow_redirects({allow_redirects})"
if "upload" in request:
upload = request["upload"]
request_chain_style += f".upload(**{upload})"
return request_chain_style
def make_teststep_chain_style(teststep: Dict) -> Text:
if teststep.get("request"):
step_info = f'RunRequest("{teststep["name"]}")'
elif teststep.get("testcase"):
step_info = f'RunTestCase("{teststep["name"]}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid teststep: {teststep}")
if "variables" in teststep:
variables = teststep["variables"]
step_info += f".with_variables(**{variables})"
if "setup_hooks" in teststep:
setup_hooks = teststep["setup_hooks"]
for hook in setup_hooks:
if isinstance(hook, Text):
step_info += f'.setup_hook("{hook}")'
elif isinstance(hook, Dict) and len(hook) == 1:
assign_var_name, hook_content = list(hook.items())[0]
step_info += f'.setup_hook("{hook_content}", "{assign_var_name}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid setup hook: {hook}")
if teststep.get("request"):
step_info += make_request_chain_style(teststep["request"])
elif teststep.get("testcase"):
testcase = teststep["testcase"]
call_ref_testcase = f".call({testcase})"
step_info += call_ref_testcase
if "teardown_hooks" in teststep:
teardown_hooks = teststep["teardown_hooks"]
for hook in teardown_hooks:
if isinstance(hook, Text):
step_info += f'.teardown_hook("{hook}")'
elif isinstance(hook, Dict) and len(hook) == 1:
assign_var_name, hook_content = list(hook.items())[0]
step_info += f'.teardown_hook("{hook_content}", "{assign_var_name}")'
else:
raise exceptions.TestCaseFormatError(f"Invalid teardown hook: {hook}")
if "extract" in teststep:
step_info += ".extract()"
for extract_name, extract_path in teststep["extract"].items():
step_info += f""".with_jmespath('{extract_path}', '{extract_name}')"""
if "export" in teststep:
export: List[Text] = teststep["export"]
step_info += f".export(*{export})"
if "validate" in teststep:
step_info += ".validate()"
for v in teststep["validate"]:
validator = uniform_validator(v)
assert_method = validator["assert"]
check = validator["check"]
if '"' in check:
# e.g. body."user-agent" => 'body."user-agent"'
check = f"'{check}'"
else:
check = f'"{check}"'
expect = validator["expect"]
if isinstance(expect, Text):
expect = f'"{expect}"'
message = validator["message"]
if message:
step_info += f".assert_{assert_method}({check}, {expect}, '{message}')"
else:
step_info += f".assert_{assert_method}({check}, {expect})"
return f"Step({step_info})"
def make_testcase(testcase: Dict, dir_path: Text = None) -> Text:
# ensure compatibility with testcase format v2
testcase = ensure_testcase_v3(testcase)
# validate testcase format
load_testcase(testcase)
testcase_abs_path = __ensure_absolute(testcase["config"]["path"])
logger.info(f"start to make testcase: {testcase_abs_path}")
testcase_python_abs_path, testcase_cls_name = convert_testcase_path(
testcase_abs_path
)
if dir_path:
testcase_python_abs_path = os.path.join(
dir_path, os.path.basename(testcase_python_abs_path)
)
global pytest_files_made_cache_mapping
if testcase_python_abs_path in pytest_files_made_cache_mapping:
return testcase_python_abs_path
config = testcase["config"]
config["path"] = convert_relative_project_root_dir(testcase_python_abs_path)
config["variables"] = convert_variables(
config.get("variables", {}), testcase_abs_path
)
# prepare reference testcase
imports_list = []
teststeps = testcase["teststeps"]
for teststep in teststeps:
if not teststep.get("testcase"):
continue
# make ref testcase pytest file
ref_testcase_path = __ensure_absolute(teststep["testcase"])
test_content = load_test_file(ref_testcase_path)
if not isinstance(test_content, Dict):
raise exceptions.TestCaseFormatError(f"Invalid teststep: {teststep}")
# api in v2 format, convert to v3 testcase
if "request" in test_content and "name" in test_content:
test_content = ensure_testcase_v3_api(test_content)
test_content.setdefault("config", {})["path"] = ref_testcase_path
ref_testcase_python_abs_path = make_testcase(test_content)
# override testcase export
ref_testcase_export: List = test_content["config"].get("export", [])
if ref_testcase_export:
step_export: List = teststep.setdefault("export", [])
step_export.extend(ref_testcase_export)
teststep["export"] = list(set(step_export))
# prepare ref testcase class name
ref_testcase_cls_name = pytest_files_made_cache_mapping[
ref_testcase_python_abs_path
]
teststep["testcase"] = ref_testcase_cls_name
# prepare import ref testcase
ref_testcase_python_relative_path = convert_relative_project_root_dir(
ref_testcase_python_abs_path
)
ref_module_name, _ = os.path.splitext(ref_testcase_python_relative_path)
ref_module_name = ref_module_name.replace(os.sep, ".")
import_expr = f"from {ref_module_name} import TestCase{ref_testcase_cls_name} as {ref_testcase_cls_name}"
if import_expr not in imports_list:
imports_list.append(import_expr)
testcase_path = convert_relative_project_root_dir(testcase_abs_path)
# current file compared to ProjectRootDir
diff_levels = len(testcase_path.split(os.sep))
data = {
"version": __version__,
"testcase_path": testcase_path,
"diff_levels": diff_levels,
"class_name": f"TestCase{testcase_cls_name}",
"imports_list": imports_list,
"config_chain_style": make_config_chain_style(config),
"parameters": config.get("parameters"),
"teststeps_chain_style": [
make_teststep_chain_style(step) for step in teststeps
],
}
content = __TEMPLATE__.render(data)
# ensure new file's directory exists
dir_path = os.path.dirname(testcase_python_abs_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(testcase_python_abs_path, "w", encoding="utf-8") as f:
f.write(content)
pytest_files_made_cache_mapping[testcase_python_abs_path] = testcase_cls_name
__ensure_testcase_module(testcase_python_abs_path)
logger.info(f"generated testcase: {testcase_python_abs_path}")
return testcase_python_abs_path
def make_testsuite(testsuite: Dict) -> NoReturn:
# validate testsuite format
load_testsuite(testsuite)
testsuite_config = testsuite["config"]
testsuite_path = testsuite_config["path"]
testsuite_variables = convert_variables(
testsuite_config.get("variables", {}), testsuite_path
)
logger.info(f"start to make testsuite: {testsuite_path}")
# create directory with testsuite file name, put its testcases under this directory
testsuite_path = ensure_file_abs_path_valid(testsuite_path)
testsuite_dir, file_suffix = os.path.splitext(testsuite_path)
# demo_testsuite.yml => demo_testsuite_yml
testsuite_dir = f"{testsuite_dir}_{file_suffix.lstrip('.')}"
for testcase in testsuite["testcases"]:
# get referenced testcase content
testcase_file = testcase["testcase"]
testcase_path = __ensure_absolute(testcase_file)
testcase_dict = load_test_file(testcase_path)
testcase_dict.setdefault("config", {})
testcase_dict["config"]["path"] = testcase_path
# override testcase name
testcase_dict["config"]["name"] = testcase["name"]
# override base_url
base_url = testsuite_config.get("base_url") or testcase.get("base_url")
if base_url:
testcase_dict["config"]["base_url"] = base_url
# override verify
if "verify" in testsuite_config:
testcase_dict["config"]["verify"] = testsuite_config["verify"]
# override variables
# testsuite testcase variables > testsuite config variables
testcase_variables = convert_variables(
testcase.get("variables", {}), testcase_path
)
testcase_variables = merge_variables(testcase_variables, testsuite_variables)
# testsuite testcase variables > testcase config variables
testcase_dict["config"]["variables"] = convert_variables(
testcase_dict["config"].get("variables", {}), testcase_path
)
testcase_dict["config"]["variables"].update(testcase_variables)
# override weight
if "weight" in testcase:
testcase_dict["config"]["weight"] = testcase["weight"]
# make testcase
testcase_pytest_path = make_testcase(testcase_dict, testsuite_dir)
pytest_files_run_set.add(testcase_pytest_path)
def __make(tests_path: Text) -> NoReturn:
logger.info(f"make path: {tests_path}")
test_files = []
if os.path.isdir(tests_path):
files_list = load_folder_files(tests_path)
test_files.extend(files_list)
elif os.path.isfile(tests_path):
test_files.append(tests_path)
else:
raise exceptions.TestcaseNotFound(f"Invalid tests path: {tests_path}")
for test_file in test_files:
if test_file.lower().endswith("_test.py"):
pytest_files_run_set.add(test_file)
continue
try:
test_content = load_test_file(test_file)
except (exceptions.FileNotFound, exceptions.FileFormatError) as ex:
logger.warning(f"Invalid test file: {test_file}\n{type(ex).__name__}: {ex}")
continue
if not isinstance(test_content, Dict):
logger.warning(
f"Invalid test file: {test_file}\n"
f"reason: test content not in dict format."
)
continue
# api in v2 format, convert to v3 testcase
if "request" in test_content and "name" in test_content:
test_content = ensure_testcase_v3_api(test_content)
if "config" not in test_content:
logger.warning(
f"Invalid testcase/testsuite file: {test_file}\n"
f"reason: missing config part."
)
continue
elif not isinstance(test_content["config"], Dict):
logger.warning(
f"Invalid testcase/testsuite file: {test_file}\n"
f"reason: config should be dict type, got {test_content['config']}"
)
continue
# ensure path absolute
test_content.setdefault("config", {})["path"] = test_file
# testcase
if "teststeps" in test_content:
try:
testcase_pytest_path = make_testcase(test_content)
pytest_files_run_set.add(testcase_pytest_path)
except exceptions.TestCaseFormatError as ex:
logger.warning(
f"Invalid testcase file: {test_file}\n{type(ex).__name__}: {ex}"
)
continue
# testsuite
elif "testcases" in test_content:
try:
make_testsuite(test_content)
except exceptions.TestSuiteFormatError as ex:
logger.warning(
f"Invalid testsuite file: {test_file}\n{type(ex).__name__}: {ex}"
)
continue
# invalid format
else:
logger.warning(
f"Invalid test file: {test_file}\n"
f"reason: file content is neither testcase nor testsuite"
)
def main_make(tests_paths: List[Text]) -> List[Text]:
if not tests_paths:
return []
for tests_path in tests_paths:
tests_path = ensure_path_sep(tests_path)
if not os.path.isabs(tests_path):
tests_path = os.path.join(os.getcwd(), tests_path)
try:
__make(tests_path)
except exceptions.MyBaseError as ex:
logger.error(ex)
sys.exit(1)
# format pytest files
pytest_files_format_list = pytest_files_made_cache_mapping.keys()
format_pytest_with_black(*pytest_files_format_list)
return list(pytest_files_run_set)
def init_make_parser(subparsers):
parser = subparsers.add_parser(
"make", help="Convert YAML/JSON testcases to pytest cases.",
)
parser.add_argument(
"testcase_path", nargs="*", help="Specify YAML/JSON testcase file/folder path"
)
return parser
| true | true |
1c3d6aa05c53d101a9a755264ce2aeec7583a535 | 4,807 | py | Python | tools/accuracy_checker/accuracy_checker/annotation_converters/imagenet.py | PinDanil/open_model_zoo | 8538b2769d65d7ca24dd36db0340a9c143583812 | [
"Apache-2.0"
] | 1 | 2021-07-12T07:43:59.000Z | 2021-07-12T07:43:59.000Z | tools/accuracy_checker/accuracy_checker/annotation_converters/imagenet.py | PinDanil/open_model_zoo | 8538b2769d65d7ca24dd36db0340a9c143583812 | [
"Apache-2.0"
] | 3 | 2020-07-20T10:01:14.000Z | 2021-06-07T10:35:52.000Z | tools/accuracy_checker/accuracy_checker/annotation_converters/imagenet.py | ygnn123/open_model_zoo | 9ca5dbeff80464bf5728e8be25daedfe9a9208d7 | [
"Apache-2.0"
] | 1 | 2019-11-14T12:51:15.000Z | 2019-11-14T12:51:15.000Z | """
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pathlib import Path
import numpy as np
from ..config import PathField, BoolField
from ..representation import ClassificationAnnotation
from ..utils import read_txt, get_path, check_file_existence, read_json
from .format_converter import BaseFormatConverter, ConverterReturn, verify_label_map
class ImageNetFormatConverter(BaseFormatConverter):
__provider__ = 'imagenet'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'annotation_file': PathField(description="Path to annotation in txt format."),
'labels_file': PathField(
optional=True,
description="Path to file with word description of labels (synset words)."
),
'has_background': BoolField(
optional=True, default=False,
description="Allows to add background label to original labels and"
" convert dataset for 1001 classes instead 1000."
),
'images_dir': PathField(
is_directory=True, optional=True,
description='path to dataset images, used only for content existence check'
),
'dataset_meta_file': PathField(
description='path to json file with dataset meta (e.g. label_map, color_encoding)', optional=True
)
})
return configuration_parameters
def configure(self):
self.annotation_file = self.get_value_from_config('annotation_file')
self.labels_file = self.get_value_from_config('labels_file')
self.has_background = self.get_value_from_config('has_background')
self.images_dir = self.get_value_from_config('images_dir') or self.annotation_file.parent
self.dataset_meta = self.get_value_from_config('dataset_meta_file')
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
annotation = []
content_errors = [] if check_content else None
original_annotation = read_txt(get_path(self.annotation_file))
num_iterations = len(original_annotation)
for image_id, image in enumerate(original_annotation):
image_name, label = image.split()
image_name = Path(image_name).name.split('@')[-1]
if check_content:
if not check_file_existence(self.images_dir / image_name):
content_errors.append('{}: does not exist'.format(self.images_dir / image_name))
label = np.int64(label) if not self.has_background else np.int64(label) + 1
annotation.append(ClassificationAnnotation(image_name, label))
if progress_callback is not None and image_id % progress_interval == 0:
progress_callback(image_id / num_iterations * 100)
meta = self._create_meta(self.labels_file, self.dataset_meta, self.has_background) or None
return ConverterReturn(annotation, meta, content_errors)
@staticmethod
def _create_meta(labels_file, dataset_meta, has_background=False):
meta = {}
label_map = {}
if dataset_meta:
meta = read_json(dataset_meta)
if 'labels' in dataset_meta and 'label_map' not in meta:
labels = ['background'] + meta['labels'] if has_background else meta['labels']
label_map = dict(enumerate(labels))
meta['label_map'] = label_map
else:
if 'label_map' in meta:
meta['label_map'] = verify_label_map(meta['label_map'])
return meta
if labels_file:
label_map = {}
for i, line in enumerate(read_txt(get_path(labels_file))):
index_for_label = i if not has_background else i + 1
line = line.strip()
label = line[line.find(' ') + 1:]
label_map[index_for_label] = label
meta['label_map'] = label_map
if has_background:
label_map[0] = 'background'
meta['background_label'] = 0
return meta
| 42.539823 | 113 | 0.654046 |
from pathlib import Path
import numpy as np
from ..config import PathField, BoolField
from ..representation import ClassificationAnnotation
from ..utils import read_txt, get_path, check_file_existence, read_json
from .format_converter import BaseFormatConverter, ConverterReturn, verify_label_map
class ImageNetFormatConverter(BaseFormatConverter):
__provider__ = 'imagenet'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'annotation_file': PathField(description="Path to annotation in txt format."),
'labels_file': PathField(
optional=True,
description="Path to file with word description of labels (synset words)."
),
'has_background': BoolField(
optional=True, default=False,
description="Allows to add background label to original labels and"
" convert dataset for 1001 classes instead 1000."
),
'images_dir': PathField(
is_directory=True, optional=True,
description='path to dataset images, used only for content existence check'
),
'dataset_meta_file': PathField(
description='path to json file with dataset meta (e.g. label_map, color_encoding)', optional=True
)
})
return configuration_parameters
def configure(self):
self.annotation_file = self.get_value_from_config('annotation_file')
self.labels_file = self.get_value_from_config('labels_file')
self.has_background = self.get_value_from_config('has_background')
self.images_dir = self.get_value_from_config('images_dir') or self.annotation_file.parent
self.dataset_meta = self.get_value_from_config('dataset_meta_file')
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
annotation = []
content_errors = [] if check_content else None
original_annotation = read_txt(get_path(self.annotation_file))
num_iterations = len(original_annotation)
for image_id, image in enumerate(original_annotation):
image_name, label = image.split()
image_name = Path(image_name).name.split('@')[-1]
if check_content:
if not check_file_existence(self.images_dir / image_name):
content_errors.append('{}: does not exist'.format(self.images_dir / image_name))
label = np.int64(label) if not self.has_background else np.int64(label) + 1
annotation.append(ClassificationAnnotation(image_name, label))
if progress_callback is not None and image_id % progress_interval == 0:
progress_callback(image_id / num_iterations * 100)
meta = self._create_meta(self.labels_file, self.dataset_meta, self.has_background) or None
return ConverterReturn(annotation, meta, content_errors)
@staticmethod
def _create_meta(labels_file, dataset_meta, has_background=False):
meta = {}
label_map = {}
if dataset_meta:
meta = read_json(dataset_meta)
if 'labels' in dataset_meta and 'label_map' not in meta:
labels = ['background'] + meta['labels'] if has_background else meta['labels']
label_map = dict(enumerate(labels))
meta['label_map'] = label_map
else:
if 'label_map' in meta:
meta['label_map'] = verify_label_map(meta['label_map'])
return meta
if labels_file:
label_map = {}
for i, line in enumerate(read_txt(get_path(labels_file))):
index_for_label = i if not has_background else i + 1
line = line.strip()
label = line[line.find(' ') + 1:]
label_map[index_for_label] = label
meta['label_map'] = label_map
if has_background:
label_map[0] = 'background'
meta['background_label'] = 0
return meta
| true | true |
1c3d6b6e8f9960007bd5122a6306677557bd11db | 382,917 | py | Python | nfselib/ginfes/v3_01/servico_consultar_situacao_lote_rps_envio_v03.py | Escodoo/nfselib | dc90cf4b6f2fc9db52bbe9485fb0901b56d3aa71 | [
"MIT"
] | null | null | null | nfselib/ginfes/v3_01/servico_consultar_situacao_lote_rps_envio_v03.py | Escodoo/nfselib | dc90cf4b6f2fc9db52bbe9485fb0901b56d3aa71 | [
"MIT"
] | 1 | 2020-10-15T11:42:54.000Z | 2020-11-08T22:03:07.000Z | nfselib/ginfes/v3_01/servico_consultar_situacao_lote_rps_envio_v03.py | Escodoo/nfselib | dc90cf4b6f2fc9db52bbe9485fb0901b56d3aa71 | [
"MIT"
] | 1 | 2020-07-25T00:02:26.000Z | 2020-07-25T00:02:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Thu Dec 13 15:47:48 2018 by generateDS.py version 2.29.2.
# Python 3.7.1 (default, Oct 22 2018, 10:41:28) [GCC 8.2.1 20180831]
#
# Command line options:
# ('--no-namespace-defs', '')
# ('-o', 'nfselib/v3_01/servico_consultar_situacao_lote_rps_envio_v03.py')
#
# Command line arguments:
# schemas/v3_01/servico_consultar_situacao_lote_rps_envio_v03.xsd
#
# Command line:
# /usr/bin/generateDS --no-namespace-defs -o "nfselib/v3_01/servico_consultar_situacao_lote_rps_envio_v03.py" schemas/v3_01/servico_consultar_situacao_lote_rps_envio_v03.xsd
#
# Current working directory (os.getcwd()):
# nfse
#
from __future__ import unicode_literals
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
from builtins import str
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from nfselib.ginfes.v3_01.generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2 and not isinstance(instring, unicode):
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf-8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None,
documentation=""):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
self.documentation = documentation
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def get_documentation(self): return self.documentation
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ConsultarSituacaoLoteRpsEnvio(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Prestador=None, Protocolo=None, Signature=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Prestador = Prestador
self.Protocolo = Protocolo
self.validate_tsNumeroProtocolo(self.Protocolo)
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ConsultarSituacaoLoteRpsEnvio)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ConsultarSituacaoLoteRpsEnvio.subclass:
return ConsultarSituacaoLoteRpsEnvio.subclass(*args_, **kwargs_)
else:
return ConsultarSituacaoLoteRpsEnvio(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_Protocolo(self): return self.Protocolo
def set_Protocolo(self, Protocolo): self.Protocolo = Protocolo
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroProtocolo(self, value):
# Validate type tsNumeroProtocolo, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNumeroProtocolo' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Prestador is not None or
self.Protocolo is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', namespacedef_='xmlns:tipos="http://www.ginfes.com.br/tipos_v03.xsd"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConsultarSituacaoLoteRpsEnvio')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConsultarSituacaoLoteRpsEnvio')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.Protocolo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Protocolo>%s</Protocolo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Protocolo), input_name='Protocolo')), eol_))
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'Protocolo':
Protocolo_ = child_.text
Protocolo_ = self.gds_validate_string(Protocolo_, node, 'Protocolo')
self.Protocolo = Protocolo_
# validate type tsNumeroProtocolo
self.validate_tsNumeroProtocolo(self.Protocolo)
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class ConsultarSituacaoLoteRpsEnvio
class tcCpfCnpj(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cpf=None, Cnpj=None):
self.original_tagname_ = None
self.Cpf = Cpf
self.validate_tsCpf(self.Cpf)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCpfCnpj)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCpfCnpj.subclass:
return tcCpfCnpj.subclass(*args_, **kwargs_)
else:
return tcCpfCnpj(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cpf(self): return self.Cpf
def set_Cpf(self, Cpf): self.Cpf = Cpf
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def validate_tsCpf(self, value):
# Validate type tsCpf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 11:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCpf' % {"value" : value.encode("utf-8")} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cpf is not None or
self.Cnpj is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCpfCnpj', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCpfCnpj')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCpfCnpj')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCpfCnpj', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCpfCnpj'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCpfCnpj', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cpf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cpf>%s</Cpf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cpf), input_name='Cpf')), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cpf':
Cpf_ = child_.text
Cpf_ = self.gds_validate_string(Cpf_, node, 'Cpf')
self.Cpf = Cpf_
# validate type tsCpf
self.validate_tsCpf(self.Cpf)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
# end class tcCpfCnpj
class tcEndereco(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Endereco=None, Numero=None, Complemento=None, Bairro=None, CodigoMunicipio=None, Uf=None, Cep=None):
self.original_tagname_ = None
self.Endereco = Endereco
self.validate_tsEndereco(self.Endereco)
self.Numero = Numero
self.validate_tsNumeroEndereco(self.Numero)
self.Complemento = Complemento
self.validate_tsComplementoEndereco(self.Complemento)
self.Bairro = Bairro
self.validate_tsBairro(self.Bairro)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
self.Cep = Cep
self.validate_tsCep(self.Cep)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcEndereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcEndereco.subclass:
return tcEndereco.subclass(*args_, **kwargs_)
else:
return tcEndereco(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Complemento(self): return self.Complemento
def set_Complemento(self, Complemento): self.Complemento = Complemento
def get_Bairro(self): return self.Bairro
def set_Bairro(self, Bairro): self.Bairro = Bairro
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def get_Cep(self): return self.Cep
def set_Cep(self, Cep): self.Cep = Cep
def validate_tsEndereco(self, value):
# Validate type tsEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 125:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsNumeroEndereco(self, value):
# Validate type tsNumeroEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsComplementoEndereco(self, value):
# Validate type tsComplementoEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsBairro(self, value):
# Validate type tsBairro, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def validate_tsCep(self, value):
# Validate type tsCep, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 8:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCep' % {"value" : value} )
def hasContent_(self):
if (
self.Endereco is not None or
self.Numero is not None or
self.Complemento is not None or
self.Bairro is not None or
self.CodigoMunicipio is not None or
self.Uf is not None or
self.Cep is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcEndereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcEndereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcEndereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcEndereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcEndereco'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcEndereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Endereco is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Endereco>%s</Endereco>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Endereco), input_name='Endereco')), eol_))
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Numero), input_name='Numero')), eol_))
if self.Complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Complemento>%s</Complemento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Complemento), input_name='Complemento')), eol_))
if self.Bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Bairro>%s</Bairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
if self.Cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cep>%s</Cep>%s' % (self.gds_format_integer(self.Cep, input_name='Cep'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Endereco':
Endereco_ = child_.text
Endereco_ = self.gds_validate_string(Endereco_, node, 'Endereco')
self.Endereco = Endereco_
# validate type tsEndereco
self.validate_tsEndereco(self.Endereco)
elif nodeName_ == 'Numero':
Numero_ = child_.text
Numero_ = self.gds_validate_string(Numero_, node, 'Numero')
self.Numero = Numero_
# validate type tsNumeroEndereco
self.validate_tsNumeroEndereco(self.Numero)
elif nodeName_ == 'Complemento':
Complemento_ = child_.text
Complemento_ = self.gds_validate_string(Complemento_, node, 'Complemento')
self.Complemento = Complemento_
# validate type tsComplementoEndereco
self.validate_tsComplementoEndereco(self.Complemento)
elif nodeName_ == 'Bairro':
Bairro_ = child_.text
Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro')
self.Bairro = Bairro_
# validate type tsBairro
self.validate_tsBairro(self.Bairro)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
elif nodeName_ == 'Cep':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Cep')
self.Cep = ival_
# validate type tsCep
self.validate_tsCep(self.Cep)
# end class tcEndereco
class tcContato(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Telefone=None, Email=None):
self.original_tagname_ = None
self.Telefone = Telefone
self.validate_tsTelefone(self.Telefone)
self.Email = Email
self.validate_tsEmail(self.Email)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcContato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcContato.subclass:
return tcContato.subclass(*args_, **kwargs_)
else:
return tcContato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Telefone(self): return self.Telefone
def set_Telefone(self, Telefone): self.Telefone = Telefone
def get_Email(self): return self.Email
def set_Email(self, Email): self.Email = Email
def validate_tsTelefone(self, value):
# Validate type tsTelefone, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 11:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
def validate_tsEmail(self, value):
# Validate type tsEmail, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Telefone is not None or
self.Email is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcContato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcContato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcContato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcContato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcContato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcContato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Telefone is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Telefone>%s</Telefone>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Telefone), input_name='Telefone')), eol_))
if self.Email is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Email>%s</Email>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Email), input_name='Email')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Telefone':
Telefone_ = child_.text
Telefone_ = self.gds_validate_string(Telefone_, node, 'Telefone')
self.Telefone = Telefone_
# validate type tsTelefone
self.validate_tsTelefone(self.Telefone)
elif nodeName_ == 'Email':
Email_ = child_.text
Email_ = self.gds_validate_string(Email_, node, 'Email')
self.Email = Email_
# validate type tsEmail
self.validate_tsEmail(self.Email)
# end class tcContato
class tcIdentificacaoOrgaoGerador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoMunicipio=None, Uf=None):
self.original_tagname_ = None
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoOrgaoGerador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoOrgaoGerador.subclass:
return tcIdentificacaoOrgaoGerador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoOrgaoGerador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoMunicipio is not None or
self.Uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoOrgaoGerador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoOrgaoGerador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoOrgaoGerador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoOrgaoGerador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
# end class tcIdentificacaoOrgaoGerador
class tcIdentificacaoRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Serie=None, Tipo=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroRps(self.Numero)
self.Serie = Serie
self.validate_tsSerieRps(self.Serie)
self.Tipo = Tipo
self.validate_tsTipoRps(self.Tipo)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoRps.subclass:
return tcIdentificacaoRps.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Serie(self): return self.Serie
def set_Serie(self, Serie): self.Serie = Serie
def get_Tipo(self): return self.Tipo
def set_Tipo(self, Tipo): self.Tipo = Tipo
def validate_tsNumeroRps(self, value):
# Validate type tsNumeroRps, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroRps' % {"value" : value} )
def validate_tsSerieRps(self, value):
# Validate type tsSerieRps, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
def validate_tsTipoRps(self, value):
# Validate type tsTipoRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsTipoRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsTipoRps_patterns_, ))
validate_tsTipoRps_patterns_ = [['^1$|^2$|^3$']]
def hasContent_(self):
if (
self.Numero is not None or
self.Serie is not None or
self.Tipo is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Serie is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Serie>%s</Serie>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Serie), input_name='Serie')), eol_))
if self.Tipo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Tipo>%s</Tipo>%s' % (self.gds_format_integer(self.Tipo, input_name='Tipo'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroRps
self.validate_tsNumeroRps(self.Numero)
elif nodeName_ == 'Serie':
Serie_ = child_.text
Serie_ = self.gds_validate_string(Serie_, node, 'Serie')
self.Serie = Serie_
# validate type tsSerieRps
self.validate_tsSerieRps(self.Serie)
elif nodeName_ == 'Tipo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Tipo')
self.Tipo = ival_
# validate type tsTipoRps
self.validate_tsTipoRps(self.Tipo)
# end class tcIdentificacaoRps
class tcIdentificacaoPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoPrestador.subclass:
return tcIdentificacaoPrestador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='tipos:', name_='tcIdentificacaoPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<tipos:Cnpj>%s</tipos:Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<tipos:InscricaoMunicipal>%s</tipos:InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoPrestador
class tcIdentificacaoTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoTomador.subclass:
return tcIdentificacaoTomador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoTomador
class tcDadosTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoTomador=None, RazaoSocial=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoTomador = IdentificacaoTomador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosTomador.subclass:
return tcDadosTomador.subclass(*args_, **kwargs_)
else:
return tcDadosTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoTomador(self): return self.IdentificacaoTomador
def set_IdentificacaoTomador(self, IdentificacaoTomador): self.IdentificacaoTomador = IdentificacaoTomador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoTomador is not None or
self.RazaoSocial is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoTomador is not None:
self.IdentificacaoTomador.export(outfile, level, namespace_, name_='IdentificacaoTomador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoTomador':
obj_ = tcIdentificacaoTomador.factory()
obj_.build(child_)
self.IdentificacaoTomador = obj_
obj_.original_tagname_ = 'IdentificacaoTomador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosTomador
class tcIdentificacaoIntermediarioServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, RazaoSocial=None, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoIntermediarioServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoIntermediarioServico.subclass:
return tcIdentificacaoIntermediarioServico.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoIntermediarioServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.RazaoSocial is not None or
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoIntermediarioServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoIntermediarioServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoIntermediarioServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoIntermediarioServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoIntermediarioServico
class tcValores(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ValorServicos=None, ValorDeducoes=None, ValorPis=None, ValorCofins=None, ValorInss=None, ValorIr=None, ValorCsll=None, IssRetido=None, ValorIss=None, ValorIssRetido=None, OutrasRetencoes=None, BaseCalculo=None, Aliquota=None, ValorLiquidoNfse=None, DescontoIncondicionado=None, DescontoCondicionado=None):
self.original_tagname_ = None
self.ValorServicos = ValorServicos
self.validate_tsValor(self.ValorServicos)
self.ValorDeducoes = ValorDeducoes
self.validate_tsValor(self.ValorDeducoes)
self.ValorPis = ValorPis
self.validate_tsValor(self.ValorPis)
self.ValorCofins = ValorCofins
self.validate_tsValor(self.ValorCofins)
self.ValorInss = ValorInss
self.validate_tsValor(self.ValorInss)
self.ValorIr = ValorIr
self.validate_tsValor(self.ValorIr)
self.ValorCsll = ValorCsll
self.validate_tsValor(self.ValorCsll)
self.IssRetido = IssRetido
self.validate_tsSimNao(self.IssRetido)
self.ValorIss = ValorIss
self.validate_tsValor(self.ValorIss)
self.ValorIssRetido = ValorIssRetido
self.validate_tsValor(self.ValorIssRetido)
self.OutrasRetencoes = OutrasRetencoes
self.validate_tsValor(self.OutrasRetencoes)
self.BaseCalculo = BaseCalculo
self.validate_tsValor(self.BaseCalculo)
self.Aliquota = Aliquota
self.validate_tsAliquota(self.Aliquota)
self.ValorLiquidoNfse = ValorLiquidoNfse
self.validate_tsValor(self.ValorLiquidoNfse)
self.DescontoIncondicionado = DescontoIncondicionado
self.validate_tsValor(self.DescontoIncondicionado)
self.DescontoCondicionado = DescontoCondicionado
self.validate_tsValor(self.DescontoCondicionado)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcValores)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcValores.subclass:
return tcValores.subclass(*args_, **kwargs_)
else:
return tcValores(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ValorServicos(self): return self.ValorServicos
def set_ValorServicos(self, ValorServicos): self.ValorServicos = ValorServicos
def get_ValorDeducoes(self): return self.ValorDeducoes
def set_ValorDeducoes(self, ValorDeducoes): self.ValorDeducoes = ValorDeducoes
def get_ValorPis(self): return self.ValorPis
def set_ValorPis(self, ValorPis): self.ValorPis = ValorPis
def get_ValorCofins(self): return self.ValorCofins
def set_ValorCofins(self, ValorCofins): self.ValorCofins = ValorCofins
def get_ValorInss(self): return self.ValorInss
def set_ValorInss(self, ValorInss): self.ValorInss = ValorInss
def get_ValorIr(self): return self.ValorIr
def set_ValorIr(self, ValorIr): self.ValorIr = ValorIr
def get_ValorCsll(self): return self.ValorCsll
def set_ValorCsll(self, ValorCsll): self.ValorCsll = ValorCsll
def get_IssRetido(self): return self.IssRetido
def set_IssRetido(self, IssRetido): self.IssRetido = IssRetido
def get_ValorIss(self): return self.ValorIss
def set_ValorIss(self, ValorIss): self.ValorIss = ValorIss
def get_ValorIssRetido(self): return self.ValorIssRetido
def set_ValorIssRetido(self, ValorIssRetido): self.ValorIssRetido = ValorIssRetido
def get_OutrasRetencoes(self): return self.OutrasRetencoes
def set_OutrasRetencoes(self, OutrasRetencoes): self.OutrasRetencoes = OutrasRetencoes
def get_BaseCalculo(self): return self.BaseCalculo
def set_BaseCalculo(self, BaseCalculo): self.BaseCalculo = BaseCalculo
def get_Aliquota(self): return self.Aliquota
def set_Aliquota(self, Aliquota): self.Aliquota = Aliquota
def get_ValorLiquidoNfse(self): return self.ValorLiquidoNfse
def set_ValorLiquidoNfse(self, ValorLiquidoNfse): self.ValorLiquidoNfse = ValorLiquidoNfse
def get_DescontoIncondicionado(self): return self.DescontoIncondicionado
def set_DescontoIncondicionado(self, DescontoIncondicionado): self.DescontoIncondicionado = DescontoIncondicionado
def get_DescontoCondicionado(self): return self.DescontoCondicionado
def set_DescontoCondicionado(self, DescontoCondicionado): self.DescontoCondicionado = DescontoCondicionado
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsAliquota(self, value):
# Validate type tsAliquota, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsAliquota' % {"value" : value} )
if len(str(value)) >= 5:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsAliquota' % {"value" : value} )
def hasContent_(self):
if (
self.ValorServicos is not None or
self.ValorDeducoes is not None or
self.ValorPis is not None or
self.ValorCofins is not None or
self.ValorInss is not None or
self.ValorIr is not None or
self.ValorCsll is not None or
self.IssRetido is not None or
self.ValorIss is not None or
self.ValorIssRetido is not None or
self.OutrasRetencoes is not None or
self.BaseCalculo is not None or
self.Aliquota is not None or
self.ValorLiquidoNfse is not None or
self.DescontoIncondicionado is not None or
self.DescontoCondicionado is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcValores', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcValores')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcValores')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcValores', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcValores'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcValores', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ValorServicos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorServicos>%s</ValorServicos>%s' % (self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), eol_))
if self.ValorDeducoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorDeducoes>%s</ValorDeducoes>%s' % (self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), eol_))
if self.ValorPis is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorPis>%s</ValorPis>%s' % (self.gds_format_float(self.ValorPis, input_name='ValorPis'), eol_))
if self.ValorCofins is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCofins>%s</ValorCofins>%s' % (self.gds_format_float(self.ValorCofins, input_name='ValorCofins'), eol_))
if self.ValorInss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorInss>%s</ValorInss>%s' % (self.gds_format_float(self.ValorInss, input_name='ValorInss'), eol_))
if self.ValorIr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIr>%s</ValorIr>%s' % (self.gds_format_float(self.ValorIr, input_name='ValorIr'), eol_))
if self.ValorCsll is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCsll>%s</ValorCsll>%s' % (self.gds_format_float(self.ValorCsll, input_name='ValorCsll'), eol_))
if self.IssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IssRetido>%s</IssRetido>%s' % (self.gds_format_integer(self.IssRetido, input_name='IssRetido'), eol_))
if self.ValorIss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIss>%s</ValorIss>%s' % (self.gds_format_float(self.ValorIss, input_name='ValorIss'), eol_))
if self.ValorIssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIssRetido>%s</ValorIssRetido>%s' % (self.gds_format_float(self.ValorIssRetido, input_name='ValorIssRetido'), eol_))
if self.OutrasRetencoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasRetencoes>%s</OutrasRetencoes>%s' % (self.gds_format_float(self.OutrasRetencoes, input_name='OutrasRetencoes'), eol_))
if self.BaseCalculo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<BaseCalculo>%s</BaseCalculo>%s' % (self.gds_format_float(self.BaseCalculo, input_name='BaseCalculo'), eol_))
if self.Aliquota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Aliquota>%s</Aliquota>%s' % (self.gds_format_float(self.Aliquota, input_name='Aliquota'), eol_))
if self.ValorLiquidoNfse is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorLiquidoNfse>%s</ValorLiquidoNfse>%s' % (self.gds_format_float(self.ValorLiquidoNfse, input_name='ValorLiquidoNfse'), eol_))
if self.DescontoIncondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoIncondicionado>%s</DescontoIncondicionado>%s' % (self.gds_format_float(self.DescontoIncondicionado, input_name='DescontoIncondicionado'), eol_))
if self.DescontoCondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoCondicionado>%s</DescontoCondicionado>%s' % (self.gds_format_float(self.DescontoCondicionado, input_name='DescontoCondicionado'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ValorServicos':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorServicos')
self.ValorServicos = fval_
# validate type tsValor
self.validate_tsValor(self.ValorServicos)
elif nodeName_ == 'ValorDeducoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes')
self.ValorDeducoes = fval_
# validate type tsValor
self.validate_tsValor(self.ValorDeducoes)
elif nodeName_ == 'ValorPis':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorPis')
self.ValorPis = fval_
# validate type tsValor
self.validate_tsValor(self.ValorPis)
elif nodeName_ == 'ValorCofins':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCofins')
self.ValorCofins = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCofins)
elif nodeName_ == 'ValorInss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorInss')
self.ValorInss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorInss)
elif nodeName_ == 'ValorIr':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIr')
self.ValorIr = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIr)
elif nodeName_ == 'ValorCsll':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCsll')
self.ValorCsll = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCsll)
elif nodeName_ == 'IssRetido':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IssRetido')
self.IssRetido = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IssRetido)
elif nodeName_ == 'ValorIss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIss')
self.ValorIss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIss)
elif nodeName_ == 'ValorIssRetido':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIssRetido')
self.ValorIssRetido = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIssRetido)
elif nodeName_ == 'OutrasRetencoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'OutrasRetencoes')
self.OutrasRetencoes = fval_
# validate type tsValor
self.validate_tsValor(self.OutrasRetencoes)
elif nodeName_ == 'BaseCalculo':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'BaseCalculo')
self.BaseCalculo = fval_
# validate type tsValor
self.validate_tsValor(self.BaseCalculo)
elif nodeName_ == 'Aliquota':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'Aliquota')
self.Aliquota = fval_
# validate type tsAliquota
self.validate_tsAliquota(self.Aliquota)
elif nodeName_ == 'ValorLiquidoNfse':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorLiquidoNfse')
self.ValorLiquidoNfse = fval_
# validate type tsValor
self.validate_tsValor(self.ValorLiquidoNfse)
elif nodeName_ == 'DescontoIncondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoIncondicionado')
self.DescontoIncondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoIncondicionado)
elif nodeName_ == 'DescontoCondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoCondicionado')
self.DescontoCondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoCondicionado)
# end class tcValores
class tcDadosServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Valores=None, ItemListaServico=None, CodigoCnae=None, CodigoTributacaoMunicipio=None, Discriminacao=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Valores = Valores
self.ItemListaServico = ItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
self.CodigoCnae = CodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
self.Discriminacao = Discriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosServico.subclass:
return tcDadosServico.subclass(*args_, **kwargs_)
else:
return tcDadosServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Valores(self): return self.Valores
def set_Valores(self, Valores): self.Valores = Valores
def get_ItemListaServico(self): return self.ItemListaServico
def set_ItemListaServico(self, ItemListaServico): self.ItemListaServico = ItemListaServico
def get_CodigoCnae(self): return self.CodigoCnae
def set_CodigoCnae(self, CodigoCnae): self.CodigoCnae = CodigoCnae
def get_CodigoTributacaoMunicipio(self): return self.CodigoTributacaoMunicipio
def set_CodigoTributacaoMunicipio(self, CodigoTributacaoMunicipio): self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
def get_Discriminacao(self): return self.Discriminacao
def set_Discriminacao(self, Discriminacao): self.Discriminacao = Discriminacao
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsItemListaServico(self, value):
# Validate type tsItemListaServico, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoCnae(self, value):
# Validate type tsCodigoCnae, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoCnae' % {"value" : value} )
def validate_tsCodigoTributacao(self, value):
# Validate type tsCodigoTributacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
def validate_tsDiscriminacao(self, value):
# Validate type tsDiscriminacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 2000:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Valores is not None or
self.ItemListaServico is not None or
self.CodigoCnae is not None or
self.CodigoTributacaoMunicipio is not None or
self.Discriminacao is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Valores is not None:
self.Valores.export(outfile, level, namespace_, name_='Valores', pretty_print=pretty_print)
if self.ItemListaServico is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ItemListaServico>%s</ItemListaServico>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ItemListaServico), input_name='ItemListaServico')), eol_))
if self.CodigoCnae is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCnae>%s</CodigoCnae>%s' % (self.gds_format_integer(self.CodigoCnae, input_name='CodigoCnae'), eol_))
if self.CodigoTributacaoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoTributacaoMunicipio>%s</CodigoTributacaoMunicipio>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoTributacaoMunicipio), input_name='CodigoTributacaoMunicipio')), eol_))
if self.Discriminacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Discriminacao>%s</Discriminacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Valores':
obj_ = tcValores.factory()
obj_.build(child_)
self.Valores = obj_
obj_.original_tagname_ = 'Valores'
elif nodeName_ == 'ItemListaServico':
ItemListaServico_ = child_.text
ItemListaServico_ = self.gds_validate_string(ItemListaServico_, node, 'ItemListaServico')
self.ItemListaServico = ItemListaServico_
# validate type tsItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
elif nodeName_ == 'CodigoCnae':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoCnae')
self.CodigoCnae = ival_
# validate type tsCodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
elif nodeName_ == 'CodigoTributacaoMunicipio':
CodigoTributacaoMunicipio_ = child_.text
CodigoTributacaoMunicipio_ = self.gds_validate_string(CodigoTributacaoMunicipio_, node, 'CodigoTributacaoMunicipio')
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio_
# validate type tsCodigoTributacao
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
elif nodeName_ == 'Discriminacao':
Discriminacao_ = child_.text
Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao')
self.Discriminacao = Discriminacao_
# validate type tsDiscriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcDadosServico
class tcDadosConstrucaoCivil(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoObra=None, Art=None):
self.original_tagname_ = None
self.CodigoObra = CodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
self.Art = Art
self.validate_tsArt(self.Art)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosConstrucaoCivil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosConstrucaoCivil.subclass:
return tcDadosConstrucaoCivil.subclass(*args_, **kwargs_)
else:
return tcDadosConstrucaoCivil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoObra(self): return self.CodigoObra
def set_CodigoObra(self, CodigoObra): self.CodigoObra = CodigoObra
def get_Art(self): return self.Art
def set_Art(self, Art): self.Art = Art
def validate_tsCodigoObra(self, value):
# Validate type tsCodigoObra, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
def validate_tsArt(self, value):
# Validate type tsArt, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoObra is not None or
self.Art is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosConstrucaoCivil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosConstrucaoCivil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosConstrucaoCivil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosConstrucaoCivil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoObra is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoObra>%s</CodigoObra>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoObra), input_name='CodigoObra')), eol_))
if self.Art is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Art>%s</Art>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Art), input_name='Art')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoObra':
CodigoObra_ = child_.text
CodigoObra_ = self.gds_validate_string(CodigoObra_, node, 'CodigoObra')
self.CodigoObra = CodigoObra_
# validate type tsCodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
elif nodeName_ == 'Art':
Art_ = child_.text
Art_ = self.gds_validate_string(Art_, node, 'Art')
self.Art = Art_
# validate type tsArt
self.validate_tsArt(self.Art)
# end class tcDadosConstrucaoCivil
class tcDadosPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoPrestador=None, RazaoSocial=None, NomeFantasia=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoPrestador = IdentificacaoPrestador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.NomeFantasia = NomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosPrestador.subclass:
return tcDadosPrestador.subclass(*args_, **kwargs_)
else:
return tcDadosPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoPrestador(self): return self.IdentificacaoPrestador
def set_IdentificacaoPrestador(self, IdentificacaoPrestador): self.IdentificacaoPrestador = IdentificacaoPrestador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_NomeFantasia(self): return self.NomeFantasia
def set_NomeFantasia(self, NomeFantasia): self.NomeFantasia = NomeFantasia
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsNomeFantasia(self, value):
# Validate type tsNomeFantasia, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoPrestador is not None or
self.RazaoSocial is not None or
self.NomeFantasia is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoPrestador is not None:
self.IdentificacaoPrestador.export(outfile, level, namespace_, name_='IdentificacaoPrestador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.NomeFantasia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NomeFantasia>%s</NomeFantasia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NomeFantasia), input_name='NomeFantasia')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoPrestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.IdentificacaoPrestador = obj_
obj_.original_tagname_ = 'IdentificacaoPrestador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'NomeFantasia':
NomeFantasia_ = child_.text
NomeFantasia_ = self.gds_validate_string(NomeFantasia_, node, 'NomeFantasia')
self.NomeFantasia = NomeFantasia_
# validate type tsNomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosPrestador
class tcInfRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoRps=None, DataEmissao=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Status=None, RpsSubstituido=None, Servico=None, Prestador=None, Tomador=None, IntermediarioServico=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Status = Status
self.validate_tsStatusRps(self.Status)
self.RpsSubstituido = RpsSubstituido
self.Servico = Servico
self.Prestador = Prestador
self.Tomador = Tomador
self.IntermediarioServico = IntermediarioServico
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfRps.subclass:
return tcInfRps.subclass(*args_, **kwargs_)
else:
return tcInfRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Status(self): return self.Status
def set_Status(self, Status): self.Status = Status
def get_RpsSubstituido(self): return self.RpsSubstituido
def set_RpsSubstituido(self, RpsSubstituido): self.RpsSubstituido = RpsSubstituido
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_Tomador(self): return self.Tomador
def set_Tomador(self, Tomador): self.Tomador = Tomador
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsStatusRps(self, value):
# Validate type tsStatusRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsStatusRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsStatusRps_patterns_, ))
validate_tsStatusRps_patterns_ = [['^1$|^2$']]
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.DataEmissao is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Status is not None or
self.RpsSubstituido is not None or
self.Servico is not None or
self.Prestador is not None or
self.Tomador is not None or
self.IntermediarioServico is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Status is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Status>%s</Status>%s' % (self.gds_format_integer(self.Status, input_name='Status'), eol_))
if self.RpsSubstituido is not None:
self.RpsSubstituido.export(outfile, level, namespace_, name_='RpsSubstituido', pretty_print=pretty_print)
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.Tomador is not None:
self.Tomador.export(outfile, level, namespace_, name_='Tomador', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Status':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Status')
self.Status = ival_
# validate type tsStatusRps
self.validate_tsStatusRps(self.Status)
elif nodeName_ == 'RpsSubstituido':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.RpsSubstituido = obj_
obj_.original_tagname_ = 'RpsSubstituido'
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'Tomador':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.Tomador = obj_
obj_.original_tagname_ = 'Tomador'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfRps
class tcRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfRps=None, Signature=None):
self.original_tagname_ = None
self.InfRps = InfRps
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcRps.subclass:
return tcRps.subclass(*args_, **kwargs_)
else:
return tcRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfRps(self): return self.InfRps
def set_InfRps(self, InfRps): self.InfRps = InfRps
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfRps is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfRps is not None:
self.InfRps.export(outfile, level, namespace_, name_='InfRps', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfRps':
obj_ = tcInfRps.factory()
obj_.build(child_)
self.InfRps = obj_
obj_.original_tagname_ = 'InfRps'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcRps
class tcIdentificacaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Cnpj=None, InscricaoMunicipal=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoNfse.subclass:
return tcIdentificacaoNfse.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Numero is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcIdentificacaoNfse
class tcInfNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Numero=None, CodigoVerificacao=None, DataEmissao=None, IdentificacaoRps=None, DataEmissaoRps=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Competencia=None, NfseSubstituida=None, OutrasInformacoes=None, Servico=None, ValorCredito=None, PrestadorServico=None, TomadorServico=None, IntermediarioServico=None, OrgaoGerador=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissaoRps, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRps, '%Y-%m-%d').date()
else:
initvalue_ = DataEmissaoRps
self.DataEmissaoRps = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Competencia = Competencia
self.NfseSubstituida = NfseSubstituida
self.validate_tsNumeroNfse(self.NfseSubstituida)
self.OutrasInformacoes = OutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
self.Servico = Servico
self.ValorCredito = ValorCredito
self.validate_tsValor(self.ValorCredito)
self.PrestadorServico = PrestadorServico
self.TomadorServico = TomadorServico
self.IntermediarioServico = IntermediarioServico
self.OrgaoGerador = OrgaoGerador
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfNfse.subclass:
return tcInfNfse.subclass(*args_, **kwargs_)
else:
return tcInfNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_CodigoVerificacao(self): return self.CodigoVerificacao
def set_CodigoVerificacao(self, CodigoVerificacao): self.CodigoVerificacao = CodigoVerificacao
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissaoRps(self): return self.DataEmissaoRps
def set_DataEmissaoRps(self, DataEmissaoRps): self.DataEmissaoRps = DataEmissaoRps
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Competencia(self): return self.Competencia
def set_Competencia(self, Competencia): self.Competencia = Competencia
def get_NfseSubstituida(self): return self.NfseSubstituida
def set_NfseSubstituida(self, NfseSubstituida): self.NfseSubstituida = NfseSubstituida
def get_OutrasInformacoes(self): return self.OutrasInformacoes
def set_OutrasInformacoes(self, OutrasInformacoes): self.OutrasInformacoes = OutrasInformacoes
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_ValorCredito(self): return self.ValorCredito
def set_ValorCredito(self, ValorCredito): self.ValorCredito = ValorCredito
def get_PrestadorServico(self): return self.PrestadorServico
def set_PrestadorServico(self, PrestadorServico): self.PrestadorServico = PrestadorServico
def get_TomadorServico(self): return self.TomadorServico
def set_TomadorServico(self, TomadorServico): self.TomadorServico = TomadorServico
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_OrgaoGerador(self): return self.OrgaoGerador
def set_OrgaoGerador(self, OrgaoGerador): self.OrgaoGerador = OrgaoGerador
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCodigoVerificacao(self, value):
# Validate type tsCodigoVerificacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsOutrasInformacoes(self, value):
# Validate type tsOutrasInformacoes, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Numero is not None or
self.CodigoVerificacao is not None or
self.DataEmissao is not None or
self.IdentificacaoRps is not None or
self.DataEmissaoRps is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Competencia is not None or
self.NfseSubstituida is not None or
self.OutrasInformacoes is not None or
self.Servico is not None or
self.ValorCredito is not None or
self.PrestadorServico is not None or
self.TomadorServico is not None or
self.IntermediarioServico is not None or
self.OrgaoGerador is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoVerificacao>%s</CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissaoRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissaoRps>%s</DataEmissaoRps>%s' % (self.gds_format_date(self.DataEmissaoRps, input_name='DataEmissaoRps'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Competencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Competencia>%s</Competencia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Competencia), input_name='Competencia')), eol_))
if self.NfseSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituida>%s</NfseSubstituida>%s' % (self.gds_format_integer(self.NfseSubstituida, input_name='NfseSubstituida'), eol_))
if self.OutrasInformacoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasInformacoes>%s</OutrasInformacoes>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.OutrasInformacoes), input_name='OutrasInformacoes')), eol_))
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.ValorCredito is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCredito>%s</ValorCredito>%s' % (self.gds_format_float(self.ValorCredito, input_name='ValorCredito'), eol_))
if self.PrestadorServico is not None:
self.PrestadorServico.export(outfile, level, namespace_, name_='PrestadorServico', pretty_print=pretty_print)
if self.TomadorServico is not None:
self.TomadorServico.export(outfile, level, namespace_, name_='TomadorServico', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.OrgaoGerador is not None:
self.OrgaoGerador.export(outfile, level, namespace_, name_='OrgaoGerador', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tsCodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissaoRps':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataEmissaoRps = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Competencia':
Competencia_ = child_.text
Competencia_ = self.gds_validate_string(Competencia_, node, 'Competencia')
self.Competencia = Competencia_
elif nodeName_ == 'NfseSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituida')
self.NfseSubstituida = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituida)
elif nodeName_ == 'OutrasInformacoes':
OutrasInformacoes_ = child_.text
OutrasInformacoes_ = self.gds_validate_string(OutrasInformacoes_, node, 'OutrasInformacoes')
self.OutrasInformacoes = OutrasInformacoes_
# validate type tsOutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'ValorCredito':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCredito')
self.ValorCredito = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCredito)
elif nodeName_ == 'PrestadorServico':
obj_ = tcDadosPrestador.factory()
obj_.build(child_)
self.PrestadorServico = obj_
obj_.original_tagname_ = 'PrestadorServico'
elif nodeName_ == 'TomadorServico':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.TomadorServico = obj_
obj_.original_tagname_ = 'TomadorServico'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'OrgaoGerador':
obj_ = tcIdentificacaoOrgaoGerador.factory()
obj_.build(child_)
self.OrgaoGerador = obj_
obj_.original_tagname_ = 'OrgaoGerador'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfNfse
class tcNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfNfse=None, Signature=None):
self.original_tagname_ = None
self.InfNfse = InfNfse
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcNfse.subclass:
return tcNfse.subclass(*args_, **kwargs_)
else:
return tcNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfNfse(self): return self.InfNfse
def set_InfNfse(self, InfNfse): self.InfNfse = InfNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfNfse is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfNfse is not None:
self.InfNfse.export(outfile, level, namespace_, name_='InfNfse', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfNfse':
obj_ = tcInfNfse.factory()
obj_.build(child_)
self.InfNfse = obj_
obj_.original_tagname_ = 'InfNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcNfse
class tcInfPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoNfse=None, CodigoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoNfse = IdentificacaoNfse
self.CodigoCancelamento = CodigoCancelamento
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfPedidoCancelamento.subclass:
return tcInfPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoNfse(self): return self.IdentificacaoNfse
def set_IdentificacaoNfse(self, IdentificacaoNfse): self.IdentificacaoNfse = IdentificacaoNfse
def get_CodigoCancelamento(self): return self.CodigoCancelamento
def set_CodigoCancelamento(self, CodigoCancelamento): self.CodigoCancelamento = CodigoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsCodigoCancelamentoNfse(self, value):
# Validate type tsCodigoCancelamentoNfse, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoNfse is not None or
self.CodigoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfPedidoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoNfse is not None:
self.IdentificacaoNfse.export(outfile, level, namespace_, name_='IdentificacaoNfse', pretty_print=pretty_print)
if self.CodigoCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCancelamento>%s</CodigoCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoCancelamento), input_name='CodigoCancelamento')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoNfse':
obj_ = tcIdentificacaoNfse.factory()
obj_.build(child_)
self.IdentificacaoNfse = obj_
obj_.original_tagname_ = 'IdentificacaoNfse'
elif nodeName_ == 'CodigoCancelamento':
CodigoCancelamento_ = child_.text
CodigoCancelamento_ = self.gds_validate_string(CodigoCancelamento_, node, 'CodigoCancelamento')
self.CodigoCancelamento = CodigoCancelamento_
# validate type tsCodigoCancelamentoNfse
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
# end class tcInfPedidoCancelamento
class tcPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfPedidoCancelamento=None, Signature=None):
self.original_tagname_ = None
self.InfPedidoCancelamento = InfPedidoCancelamento
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcPedidoCancelamento.subclass:
return tcPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfPedidoCancelamento(self): return self.InfPedidoCancelamento
def set_InfPedidoCancelamento(self, InfPedidoCancelamento): self.InfPedidoCancelamento = InfPedidoCancelamento
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfPedidoCancelamento is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcPedidoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfPedidoCancelamento is not None:
self.InfPedidoCancelamento.export(outfile, level, namespace_, name_='InfPedidoCancelamento', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfPedidoCancelamento':
obj_ = tcInfPedidoCancelamento.factory()
obj_.build(child_)
self.InfPedidoCancelamento = obj_
obj_.original_tagname_ = 'InfPedidoCancelamento'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcPedidoCancelamento
class tcInfConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Sucesso=None, DataHora=None):
self.original_tagname_ = None
self.Sucesso = Sucesso
if isinstance(DataHora, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataHora, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataHora
self.DataHora = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfConfirmacaoCancelamento.subclass:
return tcInfConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Sucesso(self): return self.Sucesso
def set_Sucesso(self, Sucesso): self.Sucesso = Sucesso
def get_DataHora(self): return self.DataHora
def set_DataHora(self, DataHora): self.DataHora = DataHora
def hasContent_(self):
if (
self.Sucesso is not None or
self.DataHora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfConfirmacaoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Sucesso is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Sucesso>%s</Sucesso>%s' % (self.gds_format_boolean(self.Sucesso, input_name='Sucesso'), eol_))
if self.DataHora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataHora>%s</DataHora>%s' % (self.gds_format_datetime(self.DataHora, input_name='DataHora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Sucesso':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'Sucesso')
self.Sucesso = ival_
elif nodeName_ == 'DataHora':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataHora = dval_
# end class tcInfConfirmacaoCancelamento
class tcConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Pedido=None, InfConfirmacaoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Pedido = Pedido
self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcConfirmacaoCancelamento.subclass:
return tcConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Pedido(self): return self.Pedido
def set_Pedido(self, Pedido): self.Pedido = Pedido
def get_InfConfirmacaoCancelamento(self): return self.InfConfirmacaoCancelamento
def set_InfConfirmacaoCancelamento(self, InfConfirmacaoCancelamento): self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Pedido is not None or
self.InfConfirmacaoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcConfirmacaoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Pedido is not None:
self.Pedido.export(outfile, level, namespace_, name_='Pedido', pretty_print=pretty_print)
if self.InfConfirmacaoCancelamento is not None:
self.InfConfirmacaoCancelamento.export(outfile, level, namespace_, name_='InfConfirmacaoCancelamento', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Pedido':
obj_ = tcPedidoCancelamento.factory()
obj_.build(child_)
self.Pedido = obj_
obj_.original_tagname_ = 'Pedido'
elif nodeName_ == 'InfConfirmacaoCancelamento':
obj_ = tcInfConfirmacaoCancelamento.factory()
obj_.build(child_)
self.InfConfirmacaoCancelamento = obj_
obj_.original_tagname_ = 'InfConfirmacaoCancelamento'
# end class tcConfirmacaoCancelamento
class tcCancelamentoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Confirmacao=None, Signature=None):
self.original_tagname_ = None
self.Confirmacao = Confirmacao
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCancelamentoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCancelamentoNfse.subclass:
return tcCancelamentoNfse.subclass(*args_, **kwargs_)
else:
return tcCancelamentoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Confirmacao(self): return self.Confirmacao
def set_Confirmacao(self, Confirmacao): self.Confirmacao = Confirmacao
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.Confirmacao is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCancelamentoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCancelamentoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCancelamentoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCancelamentoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Confirmacao is not None:
self.Confirmacao.export(outfile, level, namespace_, name_='Confirmacao', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Confirmacao':
obj_ = tcConfirmacaoCancelamento.factory()
obj_.build(child_)
self.Confirmacao = obj_
obj_.original_tagname_ = 'Confirmacao'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcCancelamentoNfse
class tcInfSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NfseSubstituidora=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NfseSubstituidora = NfseSubstituidora
self.validate_tsNumeroNfse(self.NfseSubstituidora)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfSubstituicaoNfse.subclass:
return tcInfSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcInfSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NfseSubstituidora(self): return self.NfseSubstituidora
def set_NfseSubstituidora(self, NfseSubstituidora): self.NfseSubstituidora = NfseSubstituidora
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NfseSubstituidora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfSubstituicaoNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NfseSubstituidora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituidora>%s</NfseSubstituidora>%s' % (self.gds_format_integer(self.NfseSubstituidora, input_name='NfseSubstituidora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NfseSubstituidora':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituidora')
self.NfseSubstituidora = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituidora)
# end class tcInfSubstituicaoNfse
class tcSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SubstituicaoNfse=None, Signature=None):
self.original_tagname_ = None
self.SubstituicaoNfse = SubstituicaoNfse
if Signature is None:
self.Signature = []
else:
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcSubstituicaoNfse.subclass:
return tcSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SubstituicaoNfse(self): return self.SubstituicaoNfse
def set_SubstituicaoNfse(self, SubstituicaoNfse): self.SubstituicaoNfse = SubstituicaoNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def add_Signature(self, value): self.Signature.append(value)
def insert_Signature_at(self, index, value): self.Signature.insert(index, value)
def replace_Signature_at(self, index, value): self.Signature[index] = value
def hasContent_(self):
if (
self.SubstituicaoNfse is not None or
self.Signature
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcSubstituicaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SubstituicaoNfse is not None:
self.SubstituicaoNfse.export(outfile, level, namespace_, name_='SubstituicaoNfse', pretty_print=pretty_print)
for Signature_ in self.Signature:
Signature_.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SubstituicaoNfse':
obj_ = tcInfSubstituicaoNfse.factory()
obj_.build(child_)
self.SubstituicaoNfse = obj_
obj_.original_tagname_ = 'SubstituicaoNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature.append(obj_)
obj_.original_tagname_ = 'Signature'
# end class tcSubstituicaoNfse
class tcCompNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Nfse=None, NfseCancelamento=None, NfseSubstituicao=None):
self.original_tagname_ = None
self.Nfse = Nfse
self.NfseCancelamento = NfseCancelamento
self.NfseSubstituicao = NfseSubstituicao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCompNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCompNfse.subclass:
return tcCompNfse.subclass(*args_, **kwargs_)
else:
return tcCompNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Nfse(self): return self.Nfse
def set_Nfse(self, Nfse): self.Nfse = Nfse
def get_NfseCancelamento(self): return self.NfseCancelamento
def set_NfseCancelamento(self, NfseCancelamento): self.NfseCancelamento = NfseCancelamento
def get_NfseSubstituicao(self): return self.NfseSubstituicao
def set_NfseSubstituicao(self, NfseSubstituicao): self.NfseSubstituicao = NfseSubstituicao
def hasContent_(self):
if (
self.Nfse is not None or
self.NfseCancelamento is not None or
self.NfseSubstituicao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCompNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCompNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCompNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCompNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCompNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCompNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Nfse is not None:
self.Nfse.export(outfile, level, namespace_, name_='Nfse', pretty_print=pretty_print)
if self.NfseCancelamento is not None:
self.NfseCancelamento.export(outfile, level, namespace_, name_='NfseCancelamento', pretty_print=pretty_print)
if self.NfseSubstituicao is not None:
self.NfseSubstituicao.export(outfile, level, namespace_, name_='NfseSubstituicao', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nfse':
obj_ = tcNfse.factory()
obj_.build(child_)
self.Nfse = obj_
obj_.original_tagname_ = 'Nfse'
elif nodeName_ == 'NfseCancelamento':
obj_ = tcCancelamentoNfse.factory()
obj_.build(child_)
self.NfseCancelamento = obj_
obj_.original_tagname_ = 'NfseCancelamento'
elif nodeName_ == 'NfseSubstituicao':
obj_ = tcSubstituicaoNfse.factory()
obj_.build(child_)
self.NfseSubstituicao = obj_
obj_.original_tagname_ = 'NfseSubstituicao'
# end class tcCompNfse
class ListaMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, MensagemRetorno=None):
self.original_tagname_ = None
if MensagemRetorno is None:
self.MensagemRetorno = []
else:
self.MensagemRetorno = MensagemRetorno
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaMensagemRetorno.subclass:
return ListaMensagemRetorno.subclass(*args_, **kwargs_)
else:
return ListaMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_MensagemRetorno(self): return self.MensagemRetorno
def set_MensagemRetorno(self, MensagemRetorno): self.MensagemRetorno = MensagemRetorno
def add_MensagemRetorno(self, value): self.MensagemRetorno.append(value)
def insert_MensagemRetorno_at(self, index, value): self.MensagemRetorno.insert(index, value)
def replace_MensagemRetorno_at(self, index, value): self.MensagemRetorno[index] = value
def hasContent_(self):
if (
self.MensagemRetorno
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for MensagemRetorno_ in self.MensagemRetorno:
MensagemRetorno_.export(outfile, level, namespace_, name_='MensagemRetorno', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'MensagemRetorno':
obj_ = tcMensagemRetorno.factory()
obj_.build(child_)
self.MensagemRetorno.append(obj_)
obj_.original_tagname_ = 'MensagemRetorno'
# end class ListaMensagemRetorno
class tcMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Codigo=None, Mensagem=None, Correcao=None):
self.original_tagname_ = None
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
self.Correcao = Correcao
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetorno.subclass:
return tcMensagemRetorno.subclass(*args_, **kwargs_)
else:
return tcMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def get_Correcao(self): return self.Correcao
def set_Correcao(self, Correcao): self.Correcao = Correcao
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Codigo is not None or
self.Mensagem is not None or
self.Correcao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
if self.Correcao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Correcao>%s</Correcao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Correcao), input_name='Correcao')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
elif nodeName_ == 'Correcao':
Correcao_ = child_.text
Correcao_ = self.gds_validate_string(Correcao_, node, 'Correcao')
self.Correcao = Correcao_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
# end class tcMensagemRetorno
class tcMensagemRetornoLote(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoRps=None, Codigo=None, Mensagem=None):
self.original_tagname_ = None
self.IdentificacaoRps = IdentificacaoRps
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetornoLote)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetornoLote.subclass:
return tcMensagemRetornoLote.subclass(*args_, **kwargs_)
else:
return tcMensagemRetornoLote(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.Codigo is not None or
self.Mensagem is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetornoLote')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetornoLote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetornoLote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetornoLote'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
# end class tcMensagemRetornoLote
class tcLoteRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NumeroLote=None, Cnpj=None, InscricaoMunicipal=None, QuantidadeRps=None, ListaRps=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NumeroLote = NumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.QuantidadeRps = QuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
self.ListaRps = ListaRps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcLoteRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcLoteRps.subclass:
return tcLoteRps.subclass(*args_, **kwargs_)
else:
return tcLoteRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NumeroLote(self): return self.NumeroLote
def set_NumeroLote(self, NumeroLote): self.NumeroLote = NumeroLote
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_QuantidadeRps(self): return self.QuantidadeRps
def set_QuantidadeRps(self, QuantidadeRps): self.QuantidadeRps = QuantidadeRps
def get_ListaRps(self): return self.ListaRps
def set_ListaRps(self, ListaRps): self.ListaRps = ListaRps
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroLote(self, value):
# Validate type tsNumeroLote, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroLote' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsQuantidadeRps(self, value):
# Validate type tsQuantidadeRps, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 4:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsQuantidadeRps' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NumeroLote is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.QuantidadeRps is not None or
self.ListaRps is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcLoteRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcLoteRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcLoteRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcLoteRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcLoteRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcLoteRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NumeroLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NumeroLote>%s</NumeroLote>%s' % (self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.QuantidadeRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<QuantidadeRps>%s</QuantidadeRps>%s' % (self.gds_format_integer(self.QuantidadeRps, input_name='QuantidadeRps'), eol_))
if self.ListaRps is not None:
self.ListaRps.export(outfile, level, namespace_, name_='ListaRps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NumeroLote':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote')
self.NumeroLote = ival_
# validate type tsNumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'QuantidadeRps':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'QuantidadeRps')
self.QuantidadeRps = ival_
# validate type tsQuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
elif nodeName_ == 'ListaRps':
obj_ = ListaRpsType.factory()
obj_.build(child_)
self.ListaRps = obj_
obj_.original_tagname_ = 'ListaRps'
# end class tcLoteRps
class SignatureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None, Object=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.SignedInfo = SignedInfo
self.SignatureValue = SignatureValue
self.KeyInfo = KeyInfo
if Object is None:
self.Object = []
else:
self.Object = Object
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureType.subclass:
return SignatureType.subclass(*args_, **kwargs_)
else:
return SignatureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignedInfo(self): return self.SignedInfo
def set_SignedInfo(self, SignedInfo): self.SignedInfo = SignedInfo
def get_SignatureValue(self): return self.SignatureValue
def set_SignatureValue(self, SignatureValue): self.SignatureValue = SignatureValue
def get_KeyInfo(self): return self.KeyInfo
def set_KeyInfo(self, KeyInfo): self.KeyInfo = KeyInfo
def get_Object(self): return self.Object
def set_Object(self, Object): self.Object = Object
def add_Object(self, value): self.Object.append(value)
def insert_Object_at(self, index, value): self.Object.insert(index, value)
def replace_Object_at(self, index, value): self.Object[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignedInfo is not None or
self.SignatureValue is not None or
self.KeyInfo is not None or
self.Object
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SignedInfo is not None:
self.SignedInfo.export(outfile, level, namespace_='ds:', name_='SignedInfo', pretty_print=pretty_print)
if self.SignatureValue is not None:
self.SignatureValue.export(outfile, level, namespace_='ds:', name_='SignatureValue', pretty_print=pretty_print)
if self.KeyInfo is not None:
self.KeyInfo.export(outfile, level, namespace_='ds:', name_='KeyInfo', pretty_print=pretty_print)
for Object_ in self.Object:
Object_.export(outfile, level, namespace_='ds:', name_='Object', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignedInfo':
obj_ = SignedInfoType.factory()
obj_.build(child_)
self.SignedInfo = obj_
obj_.original_tagname_ = 'SignedInfo'
elif nodeName_ == 'SignatureValue':
obj_ = SignatureValueType.factory()
obj_.build(child_)
self.SignatureValue = obj_
obj_.original_tagname_ = 'SignatureValue'
elif nodeName_ == 'KeyInfo':
obj_ = KeyInfoType.factory()
obj_.build(child_)
self.KeyInfo = obj_
obj_.original_tagname_ = 'KeyInfo'
elif nodeName_ == 'Object':
obj_ = ObjectType.factory()
obj_.build(child_)
self.Object.append(obj_)
obj_.original_tagname_ = 'Object'
# end class SignatureType
class SignatureValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, valueOf_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureValueType.subclass:
return SignatureValueType.subclass(*args_, **kwargs_)
else:
return SignatureValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureValueType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureValueType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureValueType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureValueType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureValueType
class SignedInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.CanonicalizationMethod = CanonicalizationMethod
self.SignatureMethod = SignatureMethod
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignedInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignedInfoType.subclass:
return SignedInfoType.subclass(*args_, **kwargs_)
else:
return SignedInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CanonicalizationMethod(self): return self.CanonicalizationMethod
def set_CanonicalizationMethod(self, CanonicalizationMethod): self.CanonicalizationMethod = CanonicalizationMethod
def get_SignatureMethod(self): return self.SignatureMethod
def set_SignatureMethod(self, SignatureMethod): self.SignatureMethod = SignatureMethod
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.CanonicalizationMethod is not None or
self.SignatureMethod is not None or
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignedInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignedInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignedInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignedInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignedInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignedInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CanonicalizationMethod is not None:
self.CanonicalizationMethod.export(outfile, level, namespace_='ds:', name_='CanonicalizationMethod', pretty_print=pretty_print)
if self.SignatureMethod is not None:
self.SignatureMethod.export(outfile, level, namespace_='ds:', name_='SignatureMethod', pretty_print=pretty_print)
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CanonicalizationMethod':
obj_ = CanonicalizationMethodType.factory()
obj_.build(child_)
self.CanonicalizationMethod = obj_
obj_.original_tagname_ = 'CanonicalizationMethod'
elif nodeName_ == 'SignatureMethod':
obj_ = SignatureMethodType.factory()
obj_.build(child_)
self.SignatureMethod = obj_
obj_.original_tagname_ = 'SignatureMethod'
elif nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class SignedInfoType
class CanonicalizationMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CanonicalizationMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CanonicalizationMethodType.subclass:
return CanonicalizationMethodType.subclass(*args_, **kwargs_)
else:
return CanonicalizationMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalizationMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CanonicalizationMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='CanonicalizationMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='CanonicalizationMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class CanonicalizationMethodType
class SignatureMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, HMACOutputLength=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.HMACOutputLength = HMACOutputLength
self.validate_HMACOutputLengthType(self.HMACOutputLength)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureMethodType.subclass:
return SignatureMethodType.subclass(*args_, **kwargs_)
else:
return SignatureMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_HMACOutputLength(self): return self.HMACOutputLength
def set_HMACOutputLength(self, HMACOutputLength): self.HMACOutputLength = HMACOutputLength
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def validate_HMACOutputLengthType(self, value):
# Validate type HMACOutputLengthType, a restriction on integer.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.HMACOutputLength is not None or
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.HMACOutputLength is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<HMACOutputLength>%s</HMACOutputLength>%s' % (self.gds_format_integer(self.HMACOutputLength, input_name='HMACOutputLength'), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'HMACOutputLength' and child_.text is not None:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeInteger, 'HMACOutputLength', ival_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignatureMethodType
class ReferenceType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, URI=None, Type=None, Transforms=None, DigestMethod=None, DigestValue=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
self.DigestMethod = DigestMethod
self.DigestValue = DigestValue
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReferenceType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReferenceType.subclass:
return ReferenceType.subclass(*args_, **kwargs_)
else:
return ReferenceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_DigestMethod(self): return self.DigestMethod
def set_DigestMethod(self, DigestMethod): self.DigestMethod = DigestMethod
def get_DigestValue(self): return self.DigestValue
def set_DigestValue(self, DigestValue): self.DigestValue = DigestValue
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None or
self.DigestMethod is not None or
self.DigestValue is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ReferenceType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReferenceType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReferenceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ReferenceType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ReferenceType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='ReferenceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
if self.DigestMethod is not None:
self.DigestMethod.export(outfile, level, namespace_='ds:', name_='DigestMethod', pretty_print=pretty_print)
if self.DigestValue is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:DigestValue>%s</ds:DigestValue>%s' % (self.gds_format_base64(self.DigestValue, input_name='DigestValue'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
elif nodeName_ == 'DigestMethod':
obj_ = DigestMethodType.factory()
obj_.build(child_)
self.DigestMethod = obj_
obj_.original_tagname_ = 'DigestMethod'
elif nodeName_ == 'DigestValue':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'DigestValue')
else:
bval_ = None
self.DigestValue = bval_
# end class ReferenceType
class TransformsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Transform=None):
self.original_tagname_ = None
if Transform is None:
self.Transform = []
else:
self.Transform = Transform
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformsType.subclass:
return TransformsType.subclass(*args_, **kwargs_)
else:
return TransformsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transform(self): return self.Transform
def set_Transform(self, Transform): self.Transform = Transform
def add_Transform(self, value): self.Transform.append(value)
def insert_Transform_at(self, index, value): self.Transform.insert(index, value)
def replace_Transform_at(self, index, value): self.Transform[index] = value
def hasContent_(self):
if (
self.Transform
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TransformsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Transform_ in self.Transform:
Transform_.export(outfile, level, namespace_='ds:', name_='Transform', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transform':
obj_ = TransformType.factory()
obj_.build(child_)
self.Transform.append(obj_)
obj_.original_tagname_ = 'Transform'
# end class TransformsType
class TransformType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, XPath=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.anytypeobjs_ = anytypeobjs_
if XPath is None:
self.XPath = []
else:
self.XPath = XPath
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformType.subclass:
return TransformType.subclass(*args_, **kwargs_)
else:
return TransformType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_XPath(self): return self.XPath
def set_XPath(self, XPath): self.XPath = XPath
def add_XPath(self, value): self.XPath.append(value)
def insert_XPath_at(self, index, value): self.XPath.insert(index, value)
def replace_XPath_at(self, index, value): self.XPath[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.XPath or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='TransformType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for XPath_ in self.XPath:
showIndent(outfile, level, pretty_print)
outfile.write('<XPath>%s</XPath>%s' % (self.gds_encode(self.gds_format_string(quote_xml(XPath_), input_name='XPath')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
elif nodeName_ == 'XPath' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'XPath', valuestr_)
self.content_.append(obj_)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class TransformType
class DigestMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DigestMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DigestMethodType.subclass:
return DigestMethodType.subclass(*args_, **kwargs_)
else:
return DigestMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DigestMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DigestMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DigestMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DigestMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DigestMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='DigestMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class DigestMethodType
class KeyInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, KeyName=None, KeyValue=None, RetrievalMethod=None, X509Data=None, PGPData=None, SPKIData=None, MgmtData=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if KeyName is None:
self.KeyName = []
else:
self.KeyName = KeyName
if KeyValue is None:
self.KeyValue = []
else:
self.KeyValue = KeyValue
if RetrievalMethod is None:
self.RetrievalMethod = []
else:
self.RetrievalMethod = RetrievalMethod
if X509Data is None:
self.X509Data = []
else:
self.X509Data = X509Data
if PGPData is None:
self.PGPData = []
else:
self.PGPData = PGPData
if SPKIData is None:
self.SPKIData = []
else:
self.SPKIData = SPKIData
if MgmtData is None:
self.MgmtData = []
else:
self.MgmtData = MgmtData
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyInfoType.subclass:
return KeyInfoType.subclass(*args_, **kwargs_)
else:
return KeyInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_KeyName(self): return self.KeyName
def set_KeyName(self, KeyName): self.KeyName = KeyName
def add_KeyName(self, value): self.KeyName.append(value)
def insert_KeyName_at(self, index, value): self.KeyName.insert(index, value)
def replace_KeyName_at(self, index, value): self.KeyName[index] = value
def get_KeyValue(self): return self.KeyValue
def set_KeyValue(self, KeyValue): self.KeyValue = KeyValue
def add_KeyValue(self, value): self.KeyValue.append(value)
def insert_KeyValue_at(self, index, value): self.KeyValue.insert(index, value)
def replace_KeyValue_at(self, index, value): self.KeyValue[index] = value
def get_RetrievalMethod(self): return self.RetrievalMethod
def set_RetrievalMethod(self, RetrievalMethod): self.RetrievalMethod = RetrievalMethod
def add_RetrievalMethod(self, value): self.RetrievalMethod.append(value)
def insert_RetrievalMethod_at(self, index, value): self.RetrievalMethod.insert(index, value)
def replace_RetrievalMethod_at(self, index, value): self.RetrievalMethod[index] = value
def get_X509Data(self): return self.X509Data
def set_X509Data(self, X509Data): self.X509Data = X509Data
def add_X509Data(self, value): self.X509Data.append(value)
def insert_X509Data_at(self, index, value): self.X509Data.insert(index, value)
def replace_X509Data_at(self, index, value): self.X509Data[index] = value
def get_PGPData(self): return self.PGPData
def set_PGPData(self, PGPData): self.PGPData = PGPData
def add_PGPData(self, value): self.PGPData.append(value)
def insert_PGPData_at(self, index, value): self.PGPData.insert(index, value)
def replace_PGPData_at(self, index, value): self.PGPData[index] = value
def get_SPKIData(self): return self.SPKIData
def set_SPKIData(self, SPKIData): self.SPKIData = SPKIData
def add_SPKIData(self, value): self.SPKIData.append(value)
def insert_SPKIData_at(self, index, value): self.SPKIData.insert(index, value)
def replace_SPKIData_at(self, index, value): self.SPKIData[index] = value
def get_MgmtData(self): return self.MgmtData
def set_MgmtData(self, MgmtData): self.MgmtData = MgmtData
def add_MgmtData(self, value): self.MgmtData.append(value)
def insert_MgmtData_at(self, index, value): self.MgmtData.insert(index, value)
def replace_MgmtData_at(self, index, value): self.MgmtData[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.KeyName or
self.KeyValue or
self.RetrievalMethod or
self.X509Data or
self.PGPData or
self.SPKIData or
self.MgmtData or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='KeyInfoType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for KeyName_ in self.KeyName:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:KeyName>%s</ds:KeyName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(KeyName_), input_name='KeyName')), eol_))
for KeyValue_ in self.KeyValue:
KeyValue_.export(outfile, level, namespace_='ds:', name_='KeyValue', pretty_print=pretty_print)
for RetrievalMethod_ in self.RetrievalMethod:
RetrievalMethod_.export(outfile, level, namespace_='ds:', name_='RetrievalMethod', pretty_print=pretty_print)
for X509Data_ in self.X509Data:
X509Data_.export(outfile, level, namespace_='ds:', name_='X509Data', pretty_print=pretty_print)
for PGPData_ in self.PGPData:
PGPData_.export(outfile, level, namespace_='ds:', name_='PGPData', pretty_print=pretty_print)
for SPKIData_ in self.SPKIData:
SPKIData_.export(outfile, level, namespace_='ds:', name_='SPKIData', pretty_print=pretty_print)
for MgmtData_ in self.MgmtData:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:MgmtData>%s</ds:MgmtData>%s' % (self.gds_encode(self.gds_format_string(quote_xml(MgmtData_), input_name='MgmtData')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'KeyName' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'KeyName', valuestr_)
self.content_.append(obj_)
elif nodeName_ == 'KeyValue':
obj_ = KeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'KeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_KeyValue'):
self.add_KeyValue(obj_.value)
elif hasattr(self, 'set_KeyValue'):
self.set_KeyValue(obj_.value)
elif nodeName_ == 'RetrievalMethod':
obj_ = RetrievalMethodType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RetrievalMethod', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RetrievalMethod'):
self.add_RetrievalMethod(obj_.value)
elif hasattr(self, 'set_RetrievalMethod'):
self.set_RetrievalMethod(obj_.value)
elif nodeName_ == 'X509Data':
obj_ = X509DataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'X509Data', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_X509Data'):
self.add_X509Data(obj_.value)
elif hasattr(self, 'set_X509Data'):
self.set_X509Data(obj_.value)
elif nodeName_ == 'PGPData':
obj_ = PGPDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'PGPData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_PGPData'):
self.add_PGPData(obj_.value)
elif hasattr(self, 'set_PGPData'):
self.set_PGPData(obj_.value)
elif nodeName_ == 'SPKIData':
obj_ = SPKIDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'SPKIData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_SPKIData'):
self.add_SPKIData(obj_.value)
elif hasattr(self, 'set_SPKIData'):
self.set_SPKIData(obj_.value)
elif nodeName_ == 'MgmtData' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'MgmtData', valuestr_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyInfoType
class KeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DSAKeyValue=None, RSAKeyValue=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.DSAKeyValue = DSAKeyValue
self.RSAKeyValue = RSAKeyValue
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyValueType.subclass:
return KeyValueType.subclass(*args_, **kwargs_)
else:
return KeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DSAKeyValue(self): return self.DSAKeyValue
def set_DSAKeyValue(self, DSAKeyValue): self.DSAKeyValue = DSAKeyValue
def get_RSAKeyValue(self): return self.RSAKeyValue
def set_RSAKeyValue(self, RSAKeyValue): self.RSAKeyValue = RSAKeyValue
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.DSAKeyValue is not None or
self.RSAKeyValue is not None or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='KeyValueType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DSAKeyValue is not None:
self.DSAKeyValue.export(outfile, level, namespace_='ds:', name_='DSAKeyValue', pretty_print=pretty_print)
if self.RSAKeyValue is not None:
self.RSAKeyValue.export(outfile, level, namespace_='ds:', name_='RSAKeyValue', pretty_print=pretty_print)
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DSAKeyValue':
obj_ = DSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'DSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_DSAKeyValue'):
self.add_DSAKeyValue(obj_.value)
elif hasattr(self, 'set_DSAKeyValue'):
self.set_DSAKeyValue(obj_.value)
elif nodeName_ == 'RSAKeyValue':
obj_ = RSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RSAKeyValue'):
self.add_RSAKeyValue(obj_.value)
elif hasattr(self, 'set_RSAKeyValue'):
self.set_RSAKeyValue(obj_.value)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyValueType
class RetrievalMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, URI=None, Type=None, Transforms=None):
self.original_tagname_ = None
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RetrievalMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RetrievalMethodType.subclass:
return RetrievalMethodType.subclass(*args_, **kwargs_)
else:
return RetrievalMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RetrievalMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetrievalMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RetrievalMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RetrievalMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RetrievalMethodType'):
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='RetrievalMethodType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
# end class RetrievalMethodType
class X509DataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerSerial=None, X509SKI=None, X509SubjectName=None, X509Certificate=None, X509CRL=None, anytypeobjs_=None):
self.original_tagname_ = None
if X509IssuerSerial is None:
self.X509IssuerSerial = []
else:
self.X509IssuerSerial = X509IssuerSerial
if X509SKI is None:
self.X509SKI = []
else:
self.X509SKI = X509SKI
if X509SubjectName is None:
self.X509SubjectName = []
else:
self.X509SubjectName = X509SubjectName
if X509Certificate is None:
self.X509Certificate = []
else:
self.X509Certificate = X509Certificate
if X509CRL is None:
self.X509CRL = []
else:
self.X509CRL = X509CRL
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509DataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509DataType.subclass:
return X509DataType.subclass(*args_, **kwargs_)
else:
return X509DataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerSerial(self): return self.X509IssuerSerial
def set_X509IssuerSerial(self, X509IssuerSerial): self.X509IssuerSerial = X509IssuerSerial
def add_X509IssuerSerial(self, value): self.X509IssuerSerial.append(value)
def insert_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial.insert(index, value)
def replace_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial[index] = value
def get_X509SKI(self): return self.X509SKI
def set_X509SKI(self, X509SKI): self.X509SKI = X509SKI
def add_X509SKI(self, value): self.X509SKI.append(value)
def insert_X509SKI_at(self, index, value): self.X509SKI.insert(index, value)
def replace_X509SKI_at(self, index, value): self.X509SKI[index] = value
def get_X509SubjectName(self): return self.X509SubjectName
def set_X509SubjectName(self, X509SubjectName): self.X509SubjectName = X509SubjectName
def add_X509SubjectName(self, value): self.X509SubjectName.append(value)
def insert_X509SubjectName_at(self, index, value): self.X509SubjectName.insert(index, value)
def replace_X509SubjectName_at(self, index, value): self.X509SubjectName[index] = value
def get_X509Certificate(self): return self.X509Certificate
def set_X509Certificate(self, X509Certificate): self.X509Certificate = X509Certificate
def add_X509Certificate(self, value): self.X509Certificate.append(value)
def insert_X509Certificate_at(self, index, value): self.X509Certificate.insert(index, value)
def replace_X509Certificate_at(self, index, value): self.X509Certificate[index] = value
def get_X509CRL(self): return self.X509CRL
def set_X509CRL(self, X509CRL): self.X509CRL = X509CRL
def add_X509CRL(self, value): self.X509CRL.append(value)
def insert_X509CRL_at(self, index, value): self.X509CRL.insert(index, value)
def replace_X509CRL_at(self, index, value): self.X509CRL[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.X509IssuerSerial or
self.X509SKI or
self.X509SubjectName or
self.X509Certificate or
self.X509CRL or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509DataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509DataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509DataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509DataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509DataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509DataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for X509IssuerSerial_ in self.X509IssuerSerial:
X509IssuerSerial_.export(outfile, level, namespace_, name_='X509IssuerSerial', pretty_print=pretty_print)
for X509SKI_ in self.X509SKI:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SKI>%s</X509SKI>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SKI_), input_name='X509SKI')), eol_))
for X509SubjectName_ in self.X509SubjectName:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SubjectName>%s</X509SubjectName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SubjectName_), input_name='X509SubjectName')), eol_))
for X509Certificate_ in self.X509Certificate:
showIndent(outfile, level, pretty_print)
outfile.write('<X509Certificate>%s</X509Certificate>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509Certificate_), input_name='X509Certificate')), eol_))
for X509CRL_ in self.X509CRL:
showIndent(outfile, level, pretty_print)
outfile.write('<X509CRL>%s</X509CRL>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509CRL_), input_name='X509CRL')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerSerial':
obj_ = X509IssuerSerialType.factory()
obj_.build(child_)
self.X509IssuerSerial.append(obj_)
obj_.original_tagname_ = 'X509IssuerSerial'
elif nodeName_ == 'X509SKI':
X509SKI_ = child_.text
X509SKI_ = self.gds_validate_string(X509SKI_, node, 'X509SKI')
self.X509SKI.append(X509SKI_)
elif nodeName_ == 'X509SubjectName':
X509SubjectName_ = child_.text
X509SubjectName_ = self.gds_validate_string(X509SubjectName_, node, 'X509SubjectName')
self.X509SubjectName.append(X509SubjectName_)
elif nodeName_ == 'X509Certificate':
X509Certificate_ = child_.text
X509Certificate_ = self.gds_validate_string(X509Certificate_, node, 'X509Certificate')
self.X509Certificate.append(X509Certificate_)
elif nodeName_ == 'X509CRL':
X509CRL_ = child_.text
X509CRL_ = self.gds_validate_string(X509CRL_, node, 'X509CRL')
self.X509CRL.append(X509CRL_)
else:
obj_ = self.gds_build_any(child_, 'X509DataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class X509DataType
class X509IssuerSerialType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerName=None, X509SerialNumber=None):
self.original_tagname_ = None
self.X509IssuerName = X509IssuerName
self.X509SerialNumber = X509SerialNumber
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509IssuerSerialType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509IssuerSerialType.subclass:
return X509IssuerSerialType.subclass(*args_, **kwargs_)
else:
return X509IssuerSerialType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerName(self): return self.X509IssuerName
def set_X509IssuerName(self, X509IssuerName): self.X509IssuerName = X509IssuerName
def get_X509SerialNumber(self): return self.X509SerialNumber
def set_X509SerialNumber(self, X509SerialNumber): self.X509SerialNumber = X509SerialNumber
def hasContent_(self):
if (
self.X509IssuerName is not None or
self.X509SerialNumber is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509IssuerSerialType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509IssuerSerialType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509IssuerSerialType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509IssuerSerialType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509IssuerSerialType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509IssuerSerialType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509IssuerName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509IssuerName>%s</X509IssuerName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509IssuerName), input_name='X509IssuerName')), eol_))
if self.X509SerialNumber is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SerialNumber>%s</X509SerialNumber>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509SerialNumber), input_name='X509SerialNumber')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerName':
X509IssuerName_ = child_.text
X509IssuerName_ = self.gds_validate_string(X509IssuerName_, node, 'X509IssuerName')
self.X509IssuerName = X509IssuerName_
elif nodeName_ == 'X509SerialNumber':
X509SerialNumber_ = child_.text
X509SerialNumber_ = self.gds_validate_string(X509SerialNumber_, node, 'X509SerialNumber')
self.X509SerialNumber = X509SerialNumber_
# end class X509IssuerSerialType
class PGPDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PGPKeyID=None, PGPKeyPacket=None, anytypeobjs_=None):
self.original_tagname_ = None
self.PGPKeyID = PGPKeyID
self.PGPKeyPacket = PGPKeyPacket
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PGPDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PGPDataType.subclass:
return PGPDataType.subclass(*args_, **kwargs_)
else:
return PGPDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PGPKeyID(self): return self.PGPKeyID
def set_PGPKeyID(self, PGPKeyID): self.PGPKeyID = PGPKeyID
def get_PGPKeyPacket(self): return self.PGPKeyPacket
def set_PGPKeyPacket(self, PGPKeyPacket): self.PGPKeyPacket = PGPKeyPacket
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.PGPKeyID is not None or
self.PGPKeyPacket is not None or
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='PGPDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PGPDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PGPDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='PGPDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='PGPDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='PGPDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PGPKeyID is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyID>%s</PGPKeyID>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyID), input_name='PGPKeyID')), eol_))
if self.PGPKeyPacket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyPacket>%s</PGPKeyPacket>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyPacket), input_name='PGPKeyPacket')), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PGPKeyID':
PGPKeyID_ = child_.text
PGPKeyID_ = self.gds_validate_string(PGPKeyID_, node, 'PGPKeyID')
self.PGPKeyID = PGPKeyID_
elif nodeName_ == 'PGPKeyPacket':
PGPKeyPacket_ = child_.text
PGPKeyPacket_ = self.gds_validate_string(PGPKeyPacket_, node, 'PGPKeyPacket')
self.PGPKeyPacket = PGPKeyPacket_
else:
obj_ = self.gds_build_any(child_, 'PGPDataType')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PGPDataType
class SPKIDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SPKISexp=None, anytypeobjs_=None):
self.original_tagname_ = None
if SPKISexp is None:
self.SPKISexp = []
else:
self.SPKISexp = SPKISexp
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SPKIDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SPKIDataType.subclass:
return SPKIDataType.subclass(*args_, **kwargs_)
else:
return SPKIDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SPKISexp(self): return self.SPKISexp
def set_SPKISexp(self, SPKISexp): self.SPKISexp = SPKISexp
def add_SPKISexp(self, value): self.SPKISexp.append(value)
def insert_SPKISexp_at(self, index, value): self.SPKISexp.insert(index, value)
def replace_SPKISexp_at(self, index, value): self.SPKISexp[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.SPKISexp or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SPKIDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SPKIDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SPKIDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SPKIDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SPKIDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='SPKIDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SPKISexp_ in self.SPKISexp:
showIndent(outfile, level, pretty_print)
outfile.write('<SPKISexp>%s</SPKISexp>%s' % (self.gds_encode(self.gds_format_string(quote_xml(SPKISexp_), input_name='SPKISexp')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SPKISexp':
SPKISexp_ = child_.text
SPKISexp_ = self.gds_validate_string(SPKISexp_, node, 'SPKISexp')
self.SPKISexp.append(SPKISexp_)
else:
obj_ = self.gds_build_any(child_, 'SPKIDataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class SPKIDataType
class ObjectType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, MimeType=None, Encoding=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.MimeType = _cast(None, MimeType)
self.Encoding = _cast(None, Encoding)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ObjectType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ObjectType.subclass:
return ObjectType.subclass(*args_, **kwargs_)
else:
return ObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_MimeType(self): return self.MimeType
def set_MimeType(self, MimeType): self.MimeType = MimeType
def get_Encoding(self): return self.Encoding
def set_Encoding(self, Encoding): self.Encoding = Encoding
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ObjectType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ObjectType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ObjectType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ObjectType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ObjectType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.MimeType is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
outfile.write(' MimeType=%s' % (quote_attrib(self.MimeType), ))
if self.Encoding is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
outfile.write(' Encoding=%s' % (quote_attrib(self.Encoding), ))
def exportChildren(self, outfile, level, namespace_='', name_='ObjectType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('MimeType', node)
if value is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
self.MimeType = value
value = find_attr_value_('Encoding', node)
if value is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
self.Encoding = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class ObjectType
class ManifestType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ManifestType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ManifestType.subclass:
return ManifestType.subclass(*args_, **kwargs_)
else:
return ManifestType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ManifestType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ManifestType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ManifestType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ManifestType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ManifestType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ManifestType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class ManifestType
class SignaturePropertiesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignatureProperty=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if SignatureProperty is None:
self.SignatureProperty = []
else:
self.SignatureProperty = SignatureProperty
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertiesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertiesType.subclass:
return SignaturePropertiesType.subclass(*args_, **kwargs_)
else:
return SignaturePropertiesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignatureProperty(self): return self.SignatureProperty
def set_SignatureProperty(self, SignatureProperty): self.SignatureProperty = SignatureProperty
def add_SignatureProperty(self, value): self.SignatureProperty.append(value)
def insert_SignatureProperty_at(self, index, value): self.SignatureProperty.insert(index, value)
def replace_SignatureProperty_at(self, index, value): self.SignatureProperty[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignatureProperty
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertiesType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertiesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertiesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertiesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertiesType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertiesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SignatureProperty_ in self.SignatureProperty:
SignatureProperty_.export(outfile, level, namespace_='ds:', name_='SignatureProperty', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignatureProperty':
obj_ = SignaturePropertyType.factory()
obj_.build(child_)
self.SignatureProperty.append(obj_)
obj_.original_tagname_ = 'SignatureProperty'
# end class SignaturePropertiesType
class SignaturePropertyType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Target=None, Id=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Target = _cast(None, Target)
self.Id = _cast(None, Id)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertyType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertyType.subclass:
return SignaturePropertyType.subclass(*args_, **kwargs_)
else:
return SignaturePropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Target(self): return self.Target
def set_Target(self, Target): self.Target = Target
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertyType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertyType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertyType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertyType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertyType'):
if self.Target is not None and 'Target' not in already_processed:
already_processed.add('Target')
outfile.write(' Target=%s' % (quote_attrib(self.Target), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertyType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Target', node)
if value is not None and 'Target' not in already_processed:
already_processed.add('Target')
self.Target = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignaturePropertyType
class DSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, P=None, Q=None, G=None, Y=None, J=None, Seed=None, PgenCounter=None):
self.original_tagname_ = None
self.P = P
self.validate_CryptoBinary(self.P)
self.Q = Q
self.validate_CryptoBinary(self.Q)
self.G = G
self.validate_CryptoBinary(self.G)
self.Y = Y
self.validate_CryptoBinary(self.Y)
self.J = J
self.validate_CryptoBinary(self.J)
self.Seed = Seed
self.validate_CryptoBinary(self.Seed)
self.PgenCounter = PgenCounter
self.validate_CryptoBinary(self.PgenCounter)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DSAKeyValueType.subclass:
return DSAKeyValueType.subclass(*args_, **kwargs_)
else:
return DSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_P(self): return self.P
def set_P(self, P): self.P = P
def get_Q(self): return self.Q
def set_Q(self, Q): self.Q = Q
def get_G(self): return self.G
def set_G(self, G): self.G = G
def get_Y(self): return self.Y
def set_Y(self, Y): self.Y = Y
def get_J(self): return self.J
def set_J(self, J): self.J = J
def get_Seed(self): return self.Seed
def set_Seed(self, Seed): self.Seed = Seed
def get_PgenCounter(self): return self.PgenCounter
def set_PgenCounter(self, PgenCounter): self.PgenCounter = PgenCounter
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.P is not None or
self.Q is not None or
self.G is not None or
self.Y is not None or
self.J is not None or
self.Seed is not None or
self.PgenCounter is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='DSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.P is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<P>%s</P>%s' % (self.gds_format_base64(self.P, input_name='P'), eol_))
if self.Q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Q>%s</Q>%s' % (self.gds_format_base64(self.Q, input_name='Q'), eol_))
if self.G is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<G>%s</G>%s' % (self.gds_format_base64(self.G, input_name='G'), eol_))
if self.Y is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Y>%s</Y>%s' % (self.gds_format_base64(self.Y, input_name='Y'), eol_))
if self.J is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<J>%s</J>%s' % (self.gds_format_base64(self.J, input_name='J'), eol_))
if self.Seed is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Seed>%s</Seed>%s' % (self.gds_format_base64(self.Seed, input_name='Seed'), eol_))
if self.PgenCounter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PgenCounter>%s</PgenCounter>%s' % (self.gds_format_base64(self.PgenCounter, input_name='PgenCounter'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'P':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'P')
else:
bval_ = None
self.P = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.P)
elif nodeName_ == 'Q':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Q')
else:
bval_ = None
self.Q = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Q)
elif nodeName_ == 'G':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'G')
else:
bval_ = None
self.G = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.G)
elif nodeName_ == 'Y':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Y')
else:
bval_ = None
self.Y = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Y)
elif nodeName_ == 'J':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'J')
else:
bval_ = None
self.J = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.J)
elif nodeName_ == 'Seed':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Seed')
else:
bval_ = None
self.Seed = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Seed)
elif nodeName_ == 'PgenCounter':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PgenCounter')
else:
bval_ = None
self.PgenCounter = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.PgenCounter)
# end class DSAKeyValueType
class RSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Modulus=None, Exponent=None):
self.original_tagname_ = None
self.Modulus = Modulus
self.validate_CryptoBinary(self.Modulus)
self.Exponent = Exponent
self.validate_CryptoBinary(self.Exponent)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RSAKeyValueType.subclass:
return RSAKeyValueType.subclass(*args_, **kwargs_)
else:
return RSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Modulus(self): return self.Modulus
def set_Modulus(self, Modulus): self.Modulus = Modulus
def get_Exponent(self): return self.Exponent
def set_Exponent(self, Exponent): self.Exponent = Exponent
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.Modulus is not None or
self.Exponent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='RSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Modulus is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Modulus>%s</Modulus>%s' % (self.gds_format_base64(self.Modulus, input_name='Modulus'), eol_))
if self.Exponent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Exponent>%s</Exponent>%s' % (self.gds_format_base64(self.Exponent, input_name='Exponent'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Modulus':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Modulus')
else:
bval_ = None
self.Modulus = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Modulus)
elif nodeName_ == 'Exponent':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Exponent')
else:
bval_ = None
self.Exponent = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Exponent)
# end class RSAKeyValueType
class ListaRpsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Rps=None):
self.original_tagname_ = None
if Rps is None:
self.Rps = []
else:
self.Rps = Rps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaRpsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaRpsType.subclass:
return ListaRpsType.subclass(*args_, **kwargs_)
else:
return ListaRpsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Rps(self): return self.Rps
def set_Rps(self, Rps): self.Rps = Rps
def add_Rps(self, value): self.Rps.append(value)
def insert_Rps_at(self, index, value): self.Rps.insert(index, value)
def replace_Rps_at(self, index, value): self.Rps[index] = value
def hasContent_(self):
if (
self.Rps
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaRpsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaRpsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaRpsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaRpsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaRpsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaRpsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Rps_ in self.Rps:
Rps_.export(outfile, level, namespace_, name_='Rps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Rps':
obj_ = tcRps.factory()
obj_.build(child_)
self.Rps.append(obj_)
obj_.original_tagname_ = 'Rps'
# end class ListaRpsType
GDSClassesMapping = {
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from servico_consultar_situacao_lote_rps_envio_v03 import *\n\n')
sys.stdout.write('import servico_consultar_situacao_lote_rps_envio_v03 as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"CanonicalizationMethodType",
"ConsultarSituacaoLoteRpsEnvio",
"DSAKeyValueType",
"DigestMethodType",
"KeyInfoType",
"KeyValueType",
"ListaMensagemRetorno",
"ListaRpsType",
"ManifestType",
"ObjectType",
"PGPDataType",
"RSAKeyValueType",
"ReferenceType",
"RetrievalMethodType",
"SPKIDataType",
"SignatureMethodType",
"SignaturePropertiesType",
"SignaturePropertyType",
"SignatureType",
"SignatureValueType",
"SignedInfoType",
"TransformType",
"TransformsType",
"X509DataType",
"X509IssuerSerialType",
"tcCancelamentoNfse",
"tcCompNfse",
"tcConfirmacaoCancelamento",
"tcContato",
"tcCpfCnpj",
"tcDadosConstrucaoCivil",
"tcDadosPrestador",
"tcDadosServico",
"tcDadosTomador",
"tcEndereco",
"tcIdentificacaoIntermediarioServico",
"tcIdentificacaoNfse",
"tcIdentificacaoOrgaoGerador",
"tcIdentificacaoPrestador",
"tcIdentificacaoRps",
"tcIdentificacaoTomador",
"tcInfConfirmacaoCancelamento",
"tcInfNfse",
"tcInfPedidoCancelamento",
"tcInfRps",
"tcInfSubstituicaoNfse",
"tcLoteRps",
"tcMensagemRetorno",
"tcMensagemRetornoLote",
"tcNfse",
"tcPedidoCancelamento",
"tcRps",
"tcSubstituicaoNfse",
"tcValores"
]
| 48.3359 | 450 | 0.636378 |
from __future__ import unicode_literals
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
from builtins import str
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
infes.v3_01.generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2 and not isinstance(instring, unicode):
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
CurrentSubclassModule_ = None
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None,
documentation=""):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
self.documentation = documentation
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def get_documentation(self): return self.documentation
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ConsultarSituacaoLoteRpsEnvio(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Prestador=None, Protocolo=None, Signature=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Prestador = Prestador
self.Protocolo = Protocolo
self.validate_tsNumeroProtocolo(self.Protocolo)
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ConsultarSituacaoLoteRpsEnvio)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ConsultarSituacaoLoteRpsEnvio.subclass:
return ConsultarSituacaoLoteRpsEnvio.subclass(*args_, **kwargs_)
else:
return ConsultarSituacaoLoteRpsEnvio(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_Protocolo(self): return self.Protocolo
def set_Protocolo(self, Protocolo): self.Protocolo = Protocolo
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroProtocolo(self, value):
# Validate type tsNumeroProtocolo, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 50:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNumeroProtocolo' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Prestador is not None or
self.Protocolo is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', namespacedef_='xmlns:tipos="http://www.ginfes.com.br/tipos_v03.xsd"', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ConsultarSituacaoLoteRpsEnvio')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConsultarSituacaoLoteRpsEnvio')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ConsultarSituacaoLoteRpsEnvio', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.Protocolo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Protocolo>%s</Protocolo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Protocolo), input_name='Protocolo')), eol_))
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'Protocolo':
Protocolo_ = child_.text
Protocolo_ = self.gds_validate_string(Protocolo_, node, 'Protocolo')
self.Protocolo = Protocolo_
# validate type tsNumeroProtocolo
self.validate_tsNumeroProtocolo(self.Protocolo)
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class ConsultarSituacaoLoteRpsEnvio
class tcCpfCnpj(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cpf=None, Cnpj=None):
self.original_tagname_ = None
self.Cpf = Cpf
self.validate_tsCpf(self.Cpf)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCpfCnpj)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCpfCnpj.subclass:
return tcCpfCnpj.subclass(*args_, **kwargs_)
else:
return tcCpfCnpj(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cpf(self): return self.Cpf
def set_Cpf(self, Cpf): self.Cpf = Cpf
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def validate_tsCpf(self, value):
# Validate type tsCpf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 11:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCpf' % {"value" : value.encode("utf-8")} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cpf is not None or
self.Cnpj is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCpfCnpj', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCpfCnpj')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCpfCnpj')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCpfCnpj', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCpfCnpj'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCpfCnpj', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cpf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cpf>%s</Cpf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cpf), input_name='Cpf')), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cpf':
Cpf_ = child_.text
Cpf_ = self.gds_validate_string(Cpf_, node, 'Cpf')
self.Cpf = Cpf_
# validate type tsCpf
self.validate_tsCpf(self.Cpf)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
# end class tcCpfCnpj
class tcEndereco(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Endereco=None, Numero=None, Complemento=None, Bairro=None, CodigoMunicipio=None, Uf=None, Cep=None):
self.original_tagname_ = None
self.Endereco = Endereco
self.validate_tsEndereco(self.Endereco)
self.Numero = Numero
self.validate_tsNumeroEndereco(self.Numero)
self.Complemento = Complemento
self.validate_tsComplementoEndereco(self.Complemento)
self.Bairro = Bairro
self.validate_tsBairro(self.Bairro)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
self.Cep = Cep
self.validate_tsCep(self.Cep)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcEndereco)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcEndereco.subclass:
return tcEndereco.subclass(*args_, **kwargs_)
else:
return tcEndereco(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Complemento(self): return self.Complemento
def set_Complemento(self, Complemento): self.Complemento = Complemento
def get_Bairro(self): return self.Bairro
def set_Bairro(self, Bairro): self.Bairro = Bairro
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def get_Cep(self): return self.Cep
def set_Cep(self, Cep): self.Cep = Cep
def validate_tsEndereco(self, value):
# Validate type tsEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 125:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsNumeroEndereco(self, value):
# Validate type tsNumeroEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 10:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNumeroEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsComplementoEndereco(self, value):
# Validate type tsComplementoEndereco, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsComplementoEndereco' % {"value" : value.encode("utf-8")} )
def validate_tsBairro(self, value):
# Validate type tsBairro, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsBairro' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def validate_tsCep(self, value):
# Validate type tsCep, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 8:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCep' % {"value" : value} )
def hasContent_(self):
if (
self.Endereco is not None or
self.Numero is not None or
self.Complemento is not None or
self.Bairro is not None or
self.CodigoMunicipio is not None or
self.Uf is not None or
self.Cep is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcEndereco', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcEndereco')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcEndereco')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcEndereco', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcEndereco'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcEndereco', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Endereco is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Endereco>%s</Endereco>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Endereco), input_name='Endereco')), eol_))
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Numero), input_name='Numero')), eol_))
if self.Complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Complemento>%s</Complemento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Complemento), input_name='Complemento')), eol_))
if self.Bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Bairro>%s</Bairro>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
if self.Cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cep>%s</Cep>%s' % (self.gds_format_integer(self.Cep, input_name='Cep'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Endereco':
Endereco_ = child_.text
Endereco_ = self.gds_validate_string(Endereco_, node, 'Endereco')
self.Endereco = Endereco_
# validate type tsEndereco
self.validate_tsEndereco(self.Endereco)
elif nodeName_ == 'Numero':
Numero_ = child_.text
Numero_ = self.gds_validate_string(Numero_, node, 'Numero')
self.Numero = Numero_
# validate type tsNumeroEndereco
self.validate_tsNumeroEndereco(self.Numero)
elif nodeName_ == 'Complemento':
Complemento_ = child_.text
Complemento_ = self.gds_validate_string(Complemento_, node, 'Complemento')
self.Complemento = Complemento_
# validate type tsComplementoEndereco
self.validate_tsComplementoEndereco(self.Complemento)
elif nodeName_ == 'Bairro':
Bairro_ = child_.text
Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro')
self.Bairro = Bairro_
# validate type tsBairro
self.validate_tsBairro(self.Bairro)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
elif nodeName_ == 'Cep':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Cep')
self.Cep = ival_
# validate type tsCep
self.validate_tsCep(self.Cep)
# end class tcEndereco
class tcContato(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Telefone=None, Email=None):
self.original_tagname_ = None
self.Telefone = Telefone
self.validate_tsTelefone(self.Telefone)
self.Email = Email
self.validate_tsEmail(self.Email)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcContato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcContato.subclass:
return tcContato.subclass(*args_, **kwargs_)
else:
return tcContato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Telefone(self): return self.Telefone
def set_Telefone(self, Telefone): self.Telefone = Telefone
def get_Email(self): return self.Email
def set_Email(self, Email): self.Email = Email
def validate_tsTelefone(self, value):
# Validate type tsTelefone, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 11:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsTelefone' % {"value" : value.encode("utf-8")} )
def validate_tsEmail(self, value):
# Validate type tsEmail, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 80:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsEmail' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Telefone is not None or
self.Email is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcContato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcContato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcContato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcContato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcContato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcContato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Telefone is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Telefone>%s</Telefone>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Telefone), input_name='Telefone')), eol_))
if self.Email is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Email>%s</Email>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Email), input_name='Email')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Telefone':
Telefone_ = child_.text
Telefone_ = self.gds_validate_string(Telefone_, node, 'Telefone')
self.Telefone = Telefone_
# validate type tsTelefone
self.validate_tsTelefone(self.Telefone)
elif nodeName_ == 'Email':
Email_ = child_.text
Email_ = self.gds_validate_string(Email_, node, 'Email')
self.Email = Email_
# validate type tsEmail
self.validate_tsEmail(self.Email)
# end class tcContato
class tcIdentificacaoOrgaoGerador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoMunicipio=None, Uf=None):
self.original_tagname_ = None
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
self.Uf = Uf
self.validate_tsUf(self.Uf)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoOrgaoGerador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoOrgaoGerador.subclass:
return tcIdentificacaoOrgaoGerador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoOrgaoGerador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def get_Uf(self): return self.Uf
def set_Uf(self, Uf): self.Uf = Uf
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def validate_tsUf(self, value):
# Validate type tsUf, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 2:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsUf' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoMunicipio is not None or
self.Uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoOrgaoGerador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoOrgaoGerador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoOrgaoGerador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoOrgaoGerador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoOrgaoGerador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
if self.Uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Uf>%s</Uf>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Uf), input_name='Uf')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
elif nodeName_ == 'Uf':
Uf_ = child_.text
Uf_ = self.gds_validate_string(Uf_, node, 'Uf')
self.Uf = Uf_
# validate type tsUf
self.validate_tsUf(self.Uf)
# end class tcIdentificacaoOrgaoGerador
class tcIdentificacaoRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Serie=None, Tipo=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroRps(self.Numero)
self.Serie = Serie
self.validate_tsSerieRps(self.Serie)
self.Tipo = Tipo
self.validate_tsTipoRps(self.Tipo)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoRps.subclass:
return tcIdentificacaoRps.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Serie(self): return self.Serie
def set_Serie(self, Serie): self.Serie = Serie
def get_Tipo(self): return self.Tipo
def set_Tipo(self, Tipo): self.Tipo = Tipo
def validate_tsNumeroRps(self, value):
# Validate type tsNumeroRps, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroRps' % {"value" : value} )
def validate_tsSerieRps(self, value):
# Validate type tsSerieRps, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsSerieRps' % {"value" : value.encode("utf-8")} )
def validate_tsTipoRps(self, value):
# Validate type tsTipoRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsTipoRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsTipoRps_patterns_, ))
validate_tsTipoRps_patterns_ = [['^1$|^2$|^3$']]
def hasContent_(self):
if (
self.Numero is not None or
self.Serie is not None or
self.Tipo is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Serie is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Serie>%s</Serie>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Serie), input_name='Serie')), eol_))
if self.Tipo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Tipo>%s</Tipo>%s' % (self.gds_format_integer(self.Tipo, input_name='Tipo'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroRps
self.validate_tsNumeroRps(self.Numero)
elif nodeName_ == 'Serie':
Serie_ = child_.text
Serie_ = self.gds_validate_string(Serie_, node, 'Serie')
self.Serie = Serie_
# validate type tsSerieRps
self.validate_tsSerieRps(self.Serie)
elif nodeName_ == 'Tipo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Tipo')
self.Tipo = ival_
# validate type tsTipoRps
self.validate_tsTipoRps(self.Tipo)
# end class tcIdentificacaoRps
class tcIdentificacaoPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Cnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoPrestador.subclass:
return tcIdentificacaoPrestador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Cnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='tipos:', name_='tcIdentificacaoPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<tipos:Cnpj>%s</tipos:Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<tipos:InscricaoMunicipal>%s</tipos:InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoPrestador
class tcIdentificacaoTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoTomador.subclass:
return tcIdentificacaoTomador.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoTomador
class tcDadosTomador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoTomador=None, RazaoSocial=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoTomador = IdentificacaoTomador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosTomador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosTomador.subclass:
return tcDadosTomador.subclass(*args_, **kwargs_)
else:
return tcDadosTomador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoTomador(self): return self.IdentificacaoTomador
def set_IdentificacaoTomador(self, IdentificacaoTomador): self.IdentificacaoTomador = IdentificacaoTomador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoTomador is not None or
self.RazaoSocial is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosTomador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosTomador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosTomador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosTomador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosTomador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosTomador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoTomador is not None:
self.IdentificacaoTomador.export(outfile, level, namespace_, name_='IdentificacaoTomador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoTomador':
obj_ = tcIdentificacaoTomador.factory()
obj_.build(child_)
self.IdentificacaoTomador = obj_
obj_.original_tagname_ = 'IdentificacaoTomador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosTomador
class tcIdentificacaoIntermediarioServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, RazaoSocial=None, CpfCnpj=None, InscricaoMunicipal=None):
self.original_tagname_ = None
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.CpfCnpj = CpfCnpj
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoIntermediarioServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoIntermediarioServico.subclass:
return tcIdentificacaoIntermediarioServico.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoIntermediarioServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_CpfCnpj(self): return self.CpfCnpj
def set_CpfCnpj(self, CpfCnpj): self.CpfCnpj = CpfCnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.RazaoSocial is not None or
self.CpfCnpj is not None or
self.InscricaoMunicipal is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoIntermediarioServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoIntermediarioServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoIntermediarioServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoIntermediarioServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoIntermediarioServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.CpfCnpj is not None:
self.CpfCnpj.export(outfile, level, namespace_, name_='CpfCnpj', pretty_print=pretty_print)
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'CpfCnpj':
obj_ = tcCpfCnpj.factory()
obj_.build(child_)
self.CpfCnpj = obj_
obj_.original_tagname_ = 'CpfCnpj'
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
# end class tcIdentificacaoIntermediarioServico
class tcValores(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ValorServicos=None, ValorDeducoes=None, ValorPis=None, ValorCofins=None, ValorInss=None, ValorIr=None, ValorCsll=None, IssRetido=None, ValorIss=None, ValorIssRetido=None, OutrasRetencoes=None, BaseCalculo=None, Aliquota=None, ValorLiquidoNfse=None, DescontoIncondicionado=None, DescontoCondicionado=None):
self.original_tagname_ = None
self.ValorServicos = ValorServicos
self.validate_tsValor(self.ValorServicos)
self.ValorDeducoes = ValorDeducoes
self.validate_tsValor(self.ValorDeducoes)
self.ValorPis = ValorPis
self.validate_tsValor(self.ValorPis)
self.ValorCofins = ValorCofins
self.validate_tsValor(self.ValorCofins)
self.ValorInss = ValorInss
self.validate_tsValor(self.ValorInss)
self.ValorIr = ValorIr
self.validate_tsValor(self.ValorIr)
self.ValorCsll = ValorCsll
self.validate_tsValor(self.ValorCsll)
self.IssRetido = IssRetido
self.validate_tsSimNao(self.IssRetido)
self.ValorIss = ValorIss
self.validate_tsValor(self.ValorIss)
self.ValorIssRetido = ValorIssRetido
self.validate_tsValor(self.ValorIssRetido)
self.OutrasRetencoes = OutrasRetencoes
self.validate_tsValor(self.OutrasRetencoes)
self.BaseCalculo = BaseCalculo
self.validate_tsValor(self.BaseCalculo)
self.Aliquota = Aliquota
self.validate_tsAliquota(self.Aliquota)
self.ValorLiquidoNfse = ValorLiquidoNfse
self.validate_tsValor(self.ValorLiquidoNfse)
self.DescontoIncondicionado = DescontoIncondicionado
self.validate_tsValor(self.DescontoIncondicionado)
self.DescontoCondicionado = DescontoCondicionado
self.validate_tsValor(self.DescontoCondicionado)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcValores)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcValores.subclass:
return tcValores.subclass(*args_, **kwargs_)
else:
return tcValores(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ValorServicos(self): return self.ValorServicos
def set_ValorServicos(self, ValorServicos): self.ValorServicos = ValorServicos
def get_ValorDeducoes(self): return self.ValorDeducoes
def set_ValorDeducoes(self, ValorDeducoes): self.ValorDeducoes = ValorDeducoes
def get_ValorPis(self): return self.ValorPis
def set_ValorPis(self, ValorPis): self.ValorPis = ValorPis
def get_ValorCofins(self): return self.ValorCofins
def set_ValorCofins(self, ValorCofins): self.ValorCofins = ValorCofins
def get_ValorInss(self): return self.ValorInss
def set_ValorInss(self, ValorInss): self.ValorInss = ValorInss
def get_ValorIr(self): return self.ValorIr
def set_ValorIr(self, ValorIr): self.ValorIr = ValorIr
def get_ValorCsll(self): return self.ValorCsll
def set_ValorCsll(self, ValorCsll): self.ValorCsll = ValorCsll
def get_IssRetido(self): return self.IssRetido
def set_IssRetido(self, IssRetido): self.IssRetido = IssRetido
def get_ValorIss(self): return self.ValorIss
def set_ValorIss(self, ValorIss): self.ValorIss = ValorIss
def get_ValorIssRetido(self): return self.ValorIssRetido
def set_ValorIssRetido(self, ValorIssRetido): self.ValorIssRetido = ValorIssRetido
def get_OutrasRetencoes(self): return self.OutrasRetencoes
def set_OutrasRetencoes(self, OutrasRetencoes): self.OutrasRetencoes = OutrasRetencoes
def get_BaseCalculo(self): return self.BaseCalculo
def set_BaseCalculo(self, BaseCalculo): self.BaseCalculo = BaseCalculo
def get_Aliquota(self): return self.Aliquota
def set_Aliquota(self, Aliquota): self.Aliquota = Aliquota
def get_ValorLiquidoNfse(self): return self.ValorLiquidoNfse
def set_ValorLiquidoNfse(self, ValorLiquidoNfse): self.ValorLiquidoNfse = ValorLiquidoNfse
def get_DescontoIncondicionado(self): return self.DescontoIncondicionado
def set_DescontoIncondicionado(self, DescontoIncondicionado): self.DescontoIncondicionado = DescontoIncondicionado
def get_DescontoCondicionado(self): return self.DescontoCondicionado
def set_DescontoCondicionado(self, DescontoCondicionado): self.DescontoCondicionado = DescontoCondicionado
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsAliquota(self, value):
# Validate type tsAliquota, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsAliquota' % {"value" : value} )
if len(str(value)) >= 5:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsAliquota' % {"value" : value} )
def hasContent_(self):
if (
self.ValorServicos is not None or
self.ValorDeducoes is not None or
self.ValorPis is not None or
self.ValorCofins is not None or
self.ValorInss is not None or
self.ValorIr is not None or
self.ValorCsll is not None or
self.IssRetido is not None or
self.ValorIss is not None or
self.ValorIssRetido is not None or
self.OutrasRetencoes is not None or
self.BaseCalculo is not None or
self.Aliquota is not None or
self.ValorLiquidoNfse is not None or
self.DescontoIncondicionado is not None or
self.DescontoCondicionado is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcValores', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcValores')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcValores')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcValores', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcValores'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcValores', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ValorServicos is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorServicos>%s</ValorServicos>%s' % (self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), eol_))
if self.ValorDeducoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorDeducoes>%s</ValorDeducoes>%s' % (self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), eol_))
if self.ValorPis is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorPis>%s</ValorPis>%s' % (self.gds_format_float(self.ValorPis, input_name='ValorPis'), eol_))
if self.ValorCofins is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCofins>%s</ValorCofins>%s' % (self.gds_format_float(self.ValorCofins, input_name='ValorCofins'), eol_))
if self.ValorInss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorInss>%s</ValorInss>%s' % (self.gds_format_float(self.ValorInss, input_name='ValorInss'), eol_))
if self.ValorIr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIr>%s</ValorIr>%s' % (self.gds_format_float(self.ValorIr, input_name='ValorIr'), eol_))
if self.ValorCsll is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCsll>%s</ValorCsll>%s' % (self.gds_format_float(self.ValorCsll, input_name='ValorCsll'), eol_))
if self.IssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IssRetido>%s</IssRetido>%s' % (self.gds_format_integer(self.IssRetido, input_name='IssRetido'), eol_))
if self.ValorIss is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIss>%s</ValorIss>%s' % (self.gds_format_float(self.ValorIss, input_name='ValorIss'), eol_))
if self.ValorIssRetido is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorIssRetido>%s</ValorIssRetido>%s' % (self.gds_format_float(self.ValorIssRetido, input_name='ValorIssRetido'), eol_))
if self.OutrasRetencoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasRetencoes>%s</OutrasRetencoes>%s' % (self.gds_format_float(self.OutrasRetencoes, input_name='OutrasRetencoes'), eol_))
if self.BaseCalculo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<BaseCalculo>%s</BaseCalculo>%s' % (self.gds_format_float(self.BaseCalculo, input_name='BaseCalculo'), eol_))
if self.Aliquota is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Aliquota>%s</Aliquota>%s' % (self.gds_format_float(self.Aliquota, input_name='Aliquota'), eol_))
if self.ValorLiquidoNfse is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorLiquidoNfse>%s</ValorLiquidoNfse>%s' % (self.gds_format_float(self.ValorLiquidoNfse, input_name='ValorLiquidoNfse'), eol_))
if self.DescontoIncondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoIncondicionado>%s</DescontoIncondicionado>%s' % (self.gds_format_float(self.DescontoIncondicionado, input_name='DescontoIncondicionado'), eol_))
if self.DescontoCondicionado is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DescontoCondicionado>%s</DescontoCondicionado>%s' % (self.gds_format_float(self.DescontoCondicionado, input_name='DescontoCondicionado'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ValorServicos':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorServicos')
self.ValorServicos = fval_
# validate type tsValor
self.validate_tsValor(self.ValorServicos)
elif nodeName_ == 'ValorDeducoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes')
self.ValorDeducoes = fval_
# validate type tsValor
self.validate_tsValor(self.ValorDeducoes)
elif nodeName_ == 'ValorPis':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorPis')
self.ValorPis = fval_
# validate type tsValor
self.validate_tsValor(self.ValorPis)
elif nodeName_ == 'ValorCofins':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCofins')
self.ValorCofins = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCofins)
elif nodeName_ == 'ValorInss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorInss')
self.ValorInss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorInss)
elif nodeName_ == 'ValorIr':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIr')
self.ValorIr = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIr)
elif nodeName_ == 'ValorCsll':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCsll')
self.ValorCsll = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCsll)
elif nodeName_ == 'IssRetido':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IssRetido')
self.IssRetido = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IssRetido)
elif nodeName_ == 'ValorIss':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIss')
self.ValorIss = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIss)
elif nodeName_ == 'ValorIssRetido':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorIssRetido')
self.ValorIssRetido = fval_
# validate type tsValor
self.validate_tsValor(self.ValorIssRetido)
elif nodeName_ == 'OutrasRetencoes':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'OutrasRetencoes')
self.OutrasRetencoes = fval_
# validate type tsValor
self.validate_tsValor(self.OutrasRetencoes)
elif nodeName_ == 'BaseCalculo':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'BaseCalculo')
self.BaseCalculo = fval_
# validate type tsValor
self.validate_tsValor(self.BaseCalculo)
elif nodeName_ == 'Aliquota':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'Aliquota')
self.Aliquota = fval_
# validate type tsAliquota
self.validate_tsAliquota(self.Aliquota)
elif nodeName_ == 'ValorLiquidoNfse':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorLiquidoNfse')
self.ValorLiquidoNfse = fval_
# validate type tsValor
self.validate_tsValor(self.ValorLiquidoNfse)
elif nodeName_ == 'DescontoIncondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoIncondicionado')
self.DescontoIncondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoIncondicionado)
elif nodeName_ == 'DescontoCondicionado':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'DescontoCondicionado')
self.DescontoCondicionado = fval_
# validate type tsValor
self.validate_tsValor(self.DescontoCondicionado)
# end class tcValores
class tcDadosServico(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Valores=None, ItemListaServico=None, CodigoCnae=None, CodigoTributacaoMunicipio=None, Discriminacao=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Valores = Valores
self.ItemListaServico = ItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
self.CodigoCnae = CodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
self.Discriminacao = Discriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosServico)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosServico.subclass:
return tcDadosServico.subclass(*args_, **kwargs_)
else:
return tcDadosServico(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Valores(self): return self.Valores
def set_Valores(self, Valores): self.Valores = Valores
def get_ItemListaServico(self): return self.ItemListaServico
def set_ItemListaServico(self, ItemListaServico): self.ItemListaServico = ItemListaServico
def get_CodigoCnae(self): return self.CodigoCnae
def set_CodigoCnae(self, CodigoCnae): self.CodigoCnae = CodigoCnae
def get_CodigoTributacaoMunicipio(self): return self.CodigoTributacaoMunicipio
def set_CodigoTributacaoMunicipio(self, CodigoTributacaoMunicipio): self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio
def get_Discriminacao(self): return self.Discriminacao
def set_Discriminacao(self, Discriminacao): self.Discriminacao = Discriminacao
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsItemListaServico(self, value):
# Validate type tsItemListaServico, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 5:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsItemListaServico' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoCnae(self, value):
# Validate type tsCodigoCnae, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoCnae' % {"value" : value} )
def validate_tsCodigoTributacao(self, value):
# Validate type tsCodigoTributacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 20:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoTributacao' % {"value" : value.encode("utf-8")} )
def validate_tsDiscriminacao(self, value):
# Validate type tsDiscriminacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 2000:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDiscriminacao' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Valores is not None or
self.ItemListaServico is not None or
self.CodigoCnae is not None or
self.CodigoTributacaoMunicipio is not None or
self.Discriminacao is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosServico', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosServico')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosServico')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosServico', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosServico'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosServico', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Valores is not None:
self.Valores.export(outfile, level, namespace_, name_='Valores', pretty_print=pretty_print)
if self.ItemListaServico is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ItemListaServico>%s</ItemListaServico>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.ItemListaServico), input_name='ItemListaServico')), eol_))
if self.CodigoCnae is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCnae>%s</CodigoCnae>%s' % (self.gds_format_integer(self.CodigoCnae, input_name='CodigoCnae'), eol_))
if self.CodigoTributacaoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoTributacaoMunicipio>%s</CodigoTributacaoMunicipio>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoTributacaoMunicipio), input_name='CodigoTributacaoMunicipio')), eol_))
if self.Discriminacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Discriminacao>%s</Discriminacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Valores':
obj_ = tcValores.factory()
obj_.build(child_)
self.Valores = obj_
obj_.original_tagname_ = 'Valores'
elif nodeName_ == 'ItemListaServico':
ItemListaServico_ = child_.text
ItemListaServico_ = self.gds_validate_string(ItemListaServico_, node, 'ItemListaServico')
self.ItemListaServico = ItemListaServico_
# validate type tsItemListaServico
self.validate_tsItemListaServico(self.ItemListaServico)
elif nodeName_ == 'CodigoCnae':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoCnae')
self.CodigoCnae = ival_
# validate type tsCodigoCnae
self.validate_tsCodigoCnae(self.CodigoCnae)
elif nodeName_ == 'CodigoTributacaoMunicipio':
CodigoTributacaoMunicipio_ = child_.text
CodigoTributacaoMunicipio_ = self.gds_validate_string(CodigoTributacaoMunicipio_, node, 'CodigoTributacaoMunicipio')
self.CodigoTributacaoMunicipio = CodigoTributacaoMunicipio_
# validate type tsCodigoTributacao
self.validate_tsCodigoTributacao(self.CodigoTributacaoMunicipio)
elif nodeName_ == 'Discriminacao':
Discriminacao_ = child_.text
Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao')
self.Discriminacao = Discriminacao_
# validate type tsDiscriminacao
self.validate_tsDiscriminacao(self.Discriminacao)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcDadosServico
class tcDadosConstrucaoCivil(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CodigoObra=None, Art=None):
self.original_tagname_ = None
self.CodigoObra = CodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
self.Art = Art
self.validate_tsArt(self.Art)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosConstrucaoCivil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosConstrucaoCivil.subclass:
return tcDadosConstrucaoCivil.subclass(*args_, **kwargs_)
else:
return tcDadosConstrucaoCivil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CodigoObra(self): return self.CodigoObra
def set_CodigoObra(self, CodigoObra): self.CodigoObra = CodigoObra
def get_Art(self): return self.Art
def set_Art(self, Art): self.Art = Art
def validate_tsCodigoObra(self, value):
# Validate type tsCodigoObra, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoObra' % {"value" : value.encode("utf-8")} )
def validate_tsArt(self, value):
# Validate type tsArt, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsArt' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.CodigoObra is not None or
self.Art is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosConstrucaoCivil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosConstrucaoCivil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosConstrucaoCivil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosConstrucaoCivil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosConstrucaoCivil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CodigoObra is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoObra>%s</CodigoObra>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoObra), input_name='CodigoObra')), eol_))
if self.Art is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Art>%s</Art>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Art), input_name='Art')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CodigoObra':
CodigoObra_ = child_.text
CodigoObra_ = self.gds_validate_string(CodigoObra_, node, 'CodigoObra')
self.CodigoObra = CodigoObra_
# validate type tsCodigoObra
self.validate_tsCodigoObra(self.CodigoObra)
elif nodeName_ == 'Art':
Art_ = child_.text
Art_ = self.gds_validate_string(Art_, node, 'Art')
self.Art = Art_
# validate type tsArt
self.validate_tsArt(self.Art)
# end class tcDadosConstrucaoCivil
class tcDadosPrestador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoPrestador=None, RazaoSocial=None, NomeFantasia=None, Endereco=None, Contato=None):
self.original_tagname_ = None
self.IdentificacaoPrestador = IdentificacaoPrestador
self.RazaoSocial = RazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
self.NomeFantasia = NomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
self.Endereco = Endereco
self.Contato = Contato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcDadosPrestador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcDadosPrestador.subclass:
return tcDadosPrestador.subclass(*args_, **kwargs_)
else:
return tcDadosPrestador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoPrestador(self): return self.IdentificacaoPrestador
def set_IdentificacaoPrestador(self, IdentificacaoPrestador): self.IdentificacaoPrestador = IdentificacaoPrestador
def get_RazaoSocial(self): return self.RazaoSocial
def set_RazaoSocial(self, RazaoSocial): self.RazaoSocial = RazaoSocial
def get_NomeFantasia(self): return self.NomeFantasia
def set_NomeFantasia(self, NomeFantasia): self.NomeFantasia = NomeFantasia
def get_Endereco(self): return self.Endereco
def set_Endereco(self, Endereco): self.Endereco = Endereco
def get_Contato(self): return self.Contato
def set_Contato(self, Contato): self.Contato = Contato
def validate_tsRazaoSocial(self, value):
# Validate type tsRazaoSocial, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 115:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsRazaoSocial' % {"value" : value.encode("utf-8")} )
def validate_tsNomeFantasia(self, value):
# Validate type tsNomeFantasia, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 60:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsNomeFantasia' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoPrestador is not None or
self.RazaoSocial is not None or
self.NomeFantasia is not None or
self.Endereco is not None or
self.Contato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcDadosPrestador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcDadosPrestador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcDadosPrestador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcDadosPrestador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcDadosPrestador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcDadosPrestador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoPrestador is not None:
self.IdentificacaoPrestador.export(outfile, level, namespace_, name_='IdentificacaoPrestador', pretty_print=pretty_print)
if self.RazaoSocial is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RazaoSocial>%s</RazaoSocial>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), eol_))
if self.NomeFantasia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NomeFantasia>%s</NomeFantasia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.NomeFantasia), input_name='NomeFantasia')), eol_))
if self.Endereco is not None:
self.Endereco.export(outfile, level, namespace_, name_='Endereco', pretty_print=pretty_print)
if self.Contato is not None:
self.Contato.export(outfile, level, namespace_, name_='Contato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoPrestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.IdentificacaoPrestador = obj_
obj_.original_tagname_ = 'IdentificacaoPrestador'
elif nodeName_ == 'RazaoSocial':
RazaoSocial_ = child_.text
RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial')
self.RazaoSocial = RazaoSocial_
# validate type tsRazaoSocial
self.validate_tsRazaoSocial(self.RazaoSocial)
elif nodeName_ == 'NomeFantasia':
NomeFantasia_ = child_.text
NomeFantasia_ = self.gds_validate_string(NomeFantasia_, node, 'NomeFantasia')
self.NomeFantasia = NomeFantasia_
# validate type tsNomeFantasia
self.validate_tsNomeFantasia(self.NomeFantasia)
elif nodeName_ == 'Endereco':
obj_ = tcEndereco.factory()
obj_.build(child_)
self.Endereco = obj_
obj_.original_tagname_ = 'Endereco'
elif nodeName_ == 'Contato':
obj_ = tcContato.factory()
obj_.build(child_)
self.Contato = obj_
obj_.original_tagname_ = 'Contato'
# end class tcDadosPrestador
class tcInfRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoRps=None, DataEmissao=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Status=None, RpsSubstituido=None, Servico=None, Prestador=None, Tomador=None, IntermediarioServico=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Status = Status
self.validate_tsStatusRps(self.Status)
self.RpsSubstituido = RpsSubstituido
self.Servico = Servico
self.Prestador = Prestador
self.Tomador = Tomador
self.IntermediarioServico = IntermediarioServico
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfRps.subclass:
return tcInfRps.subclass(*args_, **kwargs_)
else:
return tcInfRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Status(self): return self.Status
def set_Status(self, Status): self.Status = Status
def get_RpsSubstituido(self): return self.RpsSubstituido
def set_RpsSubstituido(self, RpsSubstituido): self.RpsSubstituido = RpsSubstituido
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_Prestador(self): return self.Prestador
def set_Prestador(self, Prestador): self.Prestador = Prestador
def get_Tomador(self): return self.Tomador
def set_Tomador(self, Tomador): self.Tomador = Tomador
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsStatusRps(self, value):
# Validate type tsStatusRps, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsStatusRps_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsStatusRps_patterns_, ))
validate_tsStatusRps_patterns_ = [['^1$|^2$']]
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.DataEmissao is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Status is not None or
self.RpsSubstituido is not None or
self.Servico is not None or
self.Prestador is not None or
self.Tomador is not None or
self.IntermediarioServico is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Status is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Status>%s</Status>%s' % (self.gds_format_integer(self.Status, input_name='Status'), eol_))
if self.RpsSubstituido is not None:
self.RpsSubstituido.export(outfile, level, namespace_, name_='RpsSubstituido', pretty_print=pretty_print)
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.Prestador is not None:
self.Prestador.export(outfile, level, namespace_, name_='Prestador', pretty_print=pretty_print)
if self.Tomador is not None:
self.Tomador.export(outfile, level, namespace_, name_='Tomador', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Status':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'Status')
self.Status = ival_
# validate type tsStatusRps
self.validate_tsStatusRps(self.Status)
elif nodeName_ == 'RpsSubstituido':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.RpsSubstituido = obj_
obj_.original_tagname_ = 'RpsSubstituido'
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'Prestador':
obj_ = tcIdentificacaoPrestador.factory()
obj_.build(child_)
self.Prestador = obj_
obj_.original_tagname_ = 'Prestador'
elif nodeName_ == 'Tomador':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.Tomador = obj_
obj_.original_tagname_ = 'Tomador'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfRps
class tcRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfRps=None, Signature=None):
self.original_tagname_ = None
self.InfRps = InfRps
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcRps.subclass:
return tcRps.subclass(*args_, **kwargs_)
else:
return tcRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfRps(self): return self.InfRps
def set_InfRps(self, InfRps): self.InfRps = InfRps
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfRps is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcRps'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfRps is not None:
self.InfRps.export(outfile, level, namespace_, name_='InfRps', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfRps':
obj_ = tcInfRps.factory()
obj_.build(child_)
self.InfRps = obj_
obj_.original_tagname_ = 'InfRps'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcRps
class tcIdentificacaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Numero=None, Cnpj=None, InscricaoMunicipal=None, CodigoMunicipio=None):
self.original_tagname_ = None
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.CodigoMunicipio = CodigoMunicipio
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcIdentificacaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcIdentificacaoNfse.subclass:
return tcIdentificacaoNfse.subclass(*args_, **kwargs_)
else:
return tcIdentificacaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_CodigoMunicipio(self): return self.CodigoMunicipio
def set_CodigoMunicipio(self, CodigoMunicipio): self.CodigoMunicipio = CodigoMunicipio
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsCodigoMunicipioIbge(self, value):
# Validate type tsCodigoMunicipioIbge, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 7:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsCodigoMunicipioIbge' % {"value" : value} )
def hasContent_(self):
if (
self.Numero is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.CodigoMunicipio is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcIdentificacaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcIdentificacaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcIdentificacaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcIdentificacaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcIdentificacaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.CodigoMunicipio is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoMunicipio>%s</CodigoMunicipio>%s' % (self.gds_format_integer(self.CodigoMunicipio, input_name='CodigoMunicipio'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'CodigoMunicipio':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'CodigoMunicipio')
self.CodigoMunicipio = ival_
# validate type tsCodigoMunicipioIbge
self.validate_tsCodigoMunicipioIbge(self.CodigoMunicipio)
# end class tcIdentificacaoNfse
class tcInfNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Numero=None, CodigoVerificacao=None, DataEmissao=None, IdentificacaoRps=None, DataEmissaoRps=None, NaturezaOperacao=None, RegimeEspecialTributacao=None, OptanteSimplesNacional=None, IncentivadorCultural=None, Competencia=None, NfseSubstituida=None, OutrasInformacoes=None, Servico=None, ValorCredito=None, PrestadorServico=None, TomadorServico=None, IntermediarioServico=None, OrgaoGerador=None, ConstrucaoCivil=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Numero = Numero
self.validate_tsNumeroNfse(self.Numero)
self.CodigoVerificacao = CodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
if isinstance(DataEmissao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataEmissao
self.DataEmissao = initvalue_
self.IdentificacaoRps = IdentificacaoRps
if isinstance(DataEmissaoRps, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataEmissaoRps, '%Y-%m-%d').date()
else:
initvalue_ = DataEmissaoRps
self.DataEmissaoRps = initvalue_
self.NaturezaOperacao = NaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
self.RegimeEspecialTributacao = RegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
self.OptanteSimplesNacional = OptanteSimplesNacional
self.validate_tsSimNao(self.OptanteSimplesNacional)
self.IncentivadorCultural = IncentivadorCultural
self.validate_tsSimNao(self.IncentivadorCultural)
self.Competencia = Competencia
self.NfseSubstituida = NfseSubstituida
self.validate_tsNumeroNfse(self.NfseSubstituida)
self.OutrasInformacoes = OutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
self.Servico = Servico
self.ValorCredito = ValorCredito
self.validate_tsValor(self.ValorCredito)
self.PrestadorServico = PrestadorServico
self.TomadorServico = TomadorServico
self.IntermediarioServico = IntermediarioServico
self.OrgaoGerador = OrgaoGerador
self.ConstrucaoCivil = ConstrucaoCivil
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfNfse.subclass:
return tcInfNfse.subclass(*args_, **kwargs_)
else:
return tcInfNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Numero(self): return self.Numero
def set_Numero(self, Numero): self.Numero = Numero
def get_CodigoVerificacao(self): return self.CodigoVerificacao
def set_CodigoVerificacao(self, CodigoVerificacao): self.CodigoVerificacao = CodigoVerificacao
def get_DataEmissao(self): return self.DataEmissao
def set_DataEmissao(self, DataEmissao): self.DataEmissao = DataEmissao
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_DataEmissaoRps(self): return self.DataEmissaoRps
def set_DataEmissaoRps(self, DataEmissaoRps): self.DataEmissaoRps = DataEmissaoRps
def get_NaturezaOperacao(self): return self.NaturezaOperacao
def set_NaturezaOperacao(self, NaturezaOperacao): self.NaturezaOperacao = NaturezaOperacao
def get_RegimeEspecialTributacao(self): return self.RegimeEspecialTributacao
def set_RegimeEspecialTributacao(self, RegimeEspecialTributacao): self.RegimeEspecialTributacao = RegimeEspecialTributacao
def get_OptanteSimplesNacional(self): return self.OptanteSimplesNacional
def set_OptanteSimplesNacional(self, OptanteSimplesNacional): self.OptanteSimplesNacional = OptanteSimplesNacional
def get_IncentivadorCultural(self): return self.IncentivadorCultural
def set_IncentivadorCultural(self, IncentivadorCultural): self.IncentivadorCultural = IncentivadorCultural
def get_Competencia(self): return self.Competencia
def set_Competencia(self, Competencia): self.Competencia = Competencia
def get_NfseSubstituida(self): return self.NfseSubstituida
def set_NfseSubstituida(self, NfseSubstituida): self.NfseSubstituida = NfseSubstituida
def get_OutrasInformacoes(self): return self.OutrasInformacoes
def set_OutrasInformacoes(self, OutrasInformacoes): self.OutrasInformacoes = OutrasInformacoes
def get_Servico(self): return self.Servico
def set_Servico(self, Servico): self.Servico = Servico
def get_ValorCredito(self): return self.ValorCredito
def set_ValorCredito(self, ValorCredito): self.ValorCredito = ValorCredito
def get_PrestadorServico(self): return self.PrestadorServico
def set_PrestadorServico(self, PrestadorServico): self.PrestadorServico = PrestadorServico
def get_TomadorServico(self): return self.TomadorServico
def set_TomadorServico(self, TomadorServico): self.TomadorServico = TomadorServico
def get_IntermediarioServico(self): return self.IntermediarioServico
def set_IntermediarioServico(self, IntermediarioServico): self.IntermediarioServico = IntermediarioServico
def get_OrgaoGerador(self): return self.OrgaoGerador
def set_OrgaoGerador(self, OrgaoGerador): self.OrgaoGerador = OrgaoGerador
def get_ConstrucaoCivil(self): return self.ConstrucaoCivil
def set_ConstrucaoCivil(self, ConstrucaoCivil): self.ConstrucaoCivil = ConstrucaoCivil
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsCodigoVerificacao(self, value):
# Validate type tsCodigoVerificacao, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 9:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoVerificacao' % {"value" : value.encode("utf-8")} )
def validate_tsNaturezaOperacao(self, value):
# Validate type tsNaturezaOperacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsNaturezaOperacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsNaturezaOperacao_patterns_, ))
validate_tsNaturezaOperacao_patterns_ = [['^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsRegimeEspecialTributacao(self, value):
# Validate type tsRegimeEspecialTributacao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsRegimeEspecialTributacao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsRegimeEspecialTributacao_patterns_, ))
validate_tsRegimeEspecialTributacao_patterns_ = [['^0$|^1$|^2$|^3$|^4$|^5$|^6$']]
def validate_tsSimNao(self, value):
# Validate type tsSimNao, a restriction on xsd:byte.
if value is not None and Validate_simpletypes_:
if not self.gds_validate_simple_patterns(
self.validate_tsSimNao_patterns_, value):
warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tsSimNao_patterns_, ))
validate_tsSimNao_patterns_ = [['^1$|^2$']]
def validate_tsOutrasInformacoes(self, value):
# Validate type tsOutrasInformacoes, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsOutrasInformacoes' % {"value" : value.encode("utf-8")} )
def validate_tsValor(self, value):
# Validate type tsValor, a restriction on xsd:decimal.
if value is not None and Validate_simpletypes_:
if value < 0:
warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tsValor' % {"value" : value} )
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsValor' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Numero is not None or
self.CodigoVerificacao is not None or
self.DataEmissao is not None or
self.IdentificacaoRps is not None or
self.DataEmissaoRps is not None or
self.NaturezaOperacao is not None or
self.RegimeEspecialTributacao is not None or
self.OptanteSimplesNacional is not None or
self.IncentivadorCultural is not None or
self.Competencia is not None or
self.NfseSubstituida is not None or
self.OutrasInformacoes is not None or
self.Servico is not None or
self.ValorCredito is not None or
self.PrestadorServico is not None or
self.TomadorServico is not None or
self.IntermediarioServico is not None or
self.OrgaoGerador is not None or
self.ConstrucaoCivil is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Numero is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Numero>%s</Numero>%s' % (self.gds_format_integer(self.Numero, input_name='Numero'), eol_))
if self.CodigoVerificacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoVerificacao>%s</CodigoVerificacao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), eol_))
if self.DataEmissao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissao>%s</DataEmissao>%s' % (self.gds_format_datetime(self.DataEmissao, input_name='DataEmissao'), eol_))
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.DataEmissaoRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataEmissaoRps>%s</DataEmissaoRps>%s' % (self.gds_format_date(self.DataEmissaoRps, input_name='DataEmissaoRps'), eol_))
if self.NaturezaOperacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NaturezaOperacao>%s</NaturezaOperacao>%s' % (self.gds_format_integer(self.NaturezaOperacao, input_name='NaturezaOperacao'), eol_))
if self.RegimeEspecialTributacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<RegimeEspecialTributacao>%s</RegimeEspecialTributacao>%s' % (self.gds_format_integer(self.RegimeEspecialTributacao, input_name='RegimeEspecialTributacao'), eol_))
if self.OptanteSimplesNacional is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OptanteSimplesNacional>%s</OptanteSimplesNacional>%s' % (self.gds_format_integer(self.OptanteSimplesNacional, input_name='OptanteSimplesNacional'), eol_))
if self.IncentivadorCultural is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<IncentivadorCultural>%s</IncentivadorCultural>%s' % (self.gds_format_integer(self.IncentivadorCultural, input_name='IncentivadorCultural'), eol_))
if self.Competencia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Competencia>%s</Competencia>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Competencia), input_name='Competencia')), eol_))
if self.NfseSubstituida is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituida>%s</NfseSubstituida>%s' % (self.gds_format_integer(self.NfseSubstituida, input_name='NfseSubstituida'), eol_))
if self.OutrasInformacoes is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<OutrasInformacoes>%s</OutrasInformacoes>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.OutrasInformacoes), input_name='OutrasInformacoes')), eol_))
if self.Servico is not None:
self.Servico.export(outfile, level, namespace_, name_='Servico', pretty_print=pretty_print)
if self.ValorCredito is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ValorCredito>%s</ValorCredito>%s' % (self.gds_format_float(self.ValorCredito, input_name='ValorCredito'), eol_))
if self.PrestadorServico is not None:
self.PrestadorServico.export(outfile, level, namespace_, name_='PrestadorServico', pretty_print=pretty_print)
if self.TomadorServico is not None:
self.TomadorServico.export(outfile, level, namespace_, name_='TomadorServico', pretty_print=pretty_print)
if self.IntermediarioServico is not None:
self.IntermediarioServico.export(outfile, level, namespace_, name_='IntermediarioServico', pretty_print=pretty_print)
if self.OrgaoGerador is not None:
self.OrgaoGerador.export(outfile, level, namespace_, name_='OrgaoGerador', pretty_print=pretty_print)
if self.ConstrucaoCivil is not None:
self.ConstrucaoCivil.export(outfile, level, namespace_, name_='ConstrucaoCivil', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Numero':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'Numero')
self.Numero = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.Numero)
elif nodeName_ == 'CodigoVerificacao':
CodigoVerificacao_ = child_.text
CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao')
self.CodigoVerificacao = CodigoVerificacao_
# validate type tsCodigoVerificacao
self.validate_tsCodigoVerificacao(self.CodigoVerificacao)
elif nodeName_ == 'DataEmissao':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataEmissao = dval_
elif nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'DataEmissaoRps':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.DataEmissaoRps = dval_
elif nodeName_ == 'NaturezaOperacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'NaturezaOperacao')
self.NaturezaOperacao = ival_
# validate type tsNaturezaOperacao
self.validate_tsNaturezaOperacao(self.NaturezaOperacao)
elif nodeName_ == 'RegimeEspecialTributacao':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'RegimeEspecialTributacao')
self.RegimeEspecialTributacao = ival_
# validate type tsRegimeEspecialTributacao
self.validate_tsRegimeEspecialTributacao(self.RegimeEspecialTributacao)
elif nodeName_ == 'OptanteSimplesNacional':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'OptanteSimplesNacional')
self.OptanteSimplesNacional = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.OptanteSimplesNacional)
elif nodeName_ == 'IncentivadorCultural':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'IncentivadorCultural')
self.IncentivadorCultural = ival_
# validate type tsSimNao
self.validate_tsSimNao(self.IncentivadorCultural)
elif nodeName_ == 'Competencia':
Competencia_ = child_.text
Competencia_ = self.gds_validate_string(Competencia_, node, 'Competencia')
self.Competencia = Competencia_
elif nodeName_ == 'NfseSubstituida':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituida')
self.NfseSubstituida = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituida)
elif nodeName_ == 'OutrasInformacoes':
OutrasInformacoes_ = child_.text
OutrasInformacoes_ = self.gds_validate_string(OutrasInformacoes_, node, 'OutrasInformacoes')
self.OutrasInformacoes = OutrasInformacoes_
# validate type tsOutrasInformacoes
self.validate_tsOutrasInformacoes(self.OutrasInformacoes)
elif nodeName_ == 'Servico':
obj_ = tcDadosServico.factory()
obj_.build(child_)
self.Servico = obj_
obj_.original_tagname_ = 'Servico'
elif nodeName_ == 'ValorCredito':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'ValorCredito')
self.ValorCredito = fval_
# validate type tsValor
self.validate_tsValor(self.ValorCredito)
elif nodeName_ == 'PrestadorServico':
obj_ = tcDadosPrestador.factory()
obj_.build(child_)
self.PrestadorServico = obj_
obj_.original_tagname_ = 'PrestadorServico'
elif nodeName_ == 'TomadorServico':
obj_ = tcDadosTomador.factory()
obj_.build(child_)
self.TomadorServico = obj_
obj_.original_tagname_ = 'TomadorServico'
elif nodeName_ == 'IntermediarioServico':
obj_ = tcIdentificacaoIntermediarioServico.factory()
obj_.build(child_)
self.IntermediarioServico = obj_
obj_.original_tagname_ = 'IntermediarioServico'
elif nodeName_ == 'OrgaoGerador':
obj_ = tcIdentificacaoOrgaoGerador.factory()
obj_.build(child_)
self.OrgaoGerador = obj_
obj_.original_tagname_ = 'OrgaoGerador'
elif nodeName_ == 'ConstrucaoCivil':
obj_ = tcDadosConstrucaoCivil.factory()
obj_.build(child_)
self.ConstrucaoCivil = obj_
obj_.original_tagname_ = 'ConstrucaoCivil'
# end class tcInfNfse
class tcNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfNfse=None, Signature=None):
self.original_tagname_ = None
self.InfNfse = InfNfse
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcNfse.subclass:
return tcNfse.subclass(*args_, **kwargs_)
else:
return tcNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfNfse(self): return self.InfNfse
def set_InfNfse(self, InfNfse): self.InfNfse = InfNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfNfse is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfNfse is not None:
self.InfNfse.export(outfile, level, namespace_, name_='InfNfse', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfNfse':
obj_ = tcInfNfse.factory()
obj_.build(child_)
self.InfNfse = obj_
obj_.original_tagname_ = 'InfNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcNfse
class tcInfPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, IdentificacaoNfse=None, CodigoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.IdentificacaoNfse = IdentificacaoNfse
self.CodigoCancelamento = CodigoCancelamento
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfPedidoCancelamento.subclass:
return tcInfPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoNfse(self): return self.IdentificacaoNfse
def set_IdentificacaoNfse(self, IdentificacaoNfse): self.IdentificacaoNfse = IdentificacaoNfse
def get_CodigoCancelamento(self): return self.CodigoCancelamento
def set_CodigoCancelamento(self, CodigoCancelamento): self.CodigoCancelamento = CodigoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsCodigoCancelamentoNfse(self, value):
# Validate type tsCodigoCancelamentoNfse, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoCancelamentoNfse' % {"value" : value.encode("utf-8")} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoNfse is not None or
self.CodigoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfPedidoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoNfse is not None:
self.IdentificacaoNfse.export(outfile, level, namespace_, name_='IdentificacaoNfse', pretty_print=pretty_print)
if self.CodigoCancelamento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<CodigoCancelamento>%s</CodigoCancelamento>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.CodigoCancelamento), input_name='CodigoCancelamento')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoNfse':
obj_ = tcIdentificacaoNfse.factory()
obj_.build(child_)
self.IdentificacaoNfse = obj_
obj_.original_tagname_ = 'IdentificacaoNfse'
elif nodeName_ == 'CodigoCancelamento':
CodigoCancelamento_ = child_.text
CodigoCancelamento_ = self.gds_validate_string(CodigoCancelamento_, node, 'CodigoCancelamento')
self.CodigoCancelamento = CodigoCancelamento_
# validate type tsCodigoCancelamentoNfse
self.validate_tsCodigoCancelamentoNfse(self.CodigoCancelamento)
# end class tcInfPedidoCancelamento
class tcPedidoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, InfPedidoCancelamento=None, Signature=None):
self.original_tagname_ = None
self.InfPedidoCancelamento = InfPedidoCancelamento
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcPedidoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcPedidoCancelamento.subclass:
return tcPedidoCancelamento.subclass(*args_, **kwargs_)
else:
return tcPedidoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_InfPedidoCancelamento(self): return self.InfPedidoCancelamento
def set_InfPedidoCancelamento(self, InfPedidoCancelamento): self.InfPedidoCancelamento = InfPedidoCancelamento
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.InfPedidoCancelamento is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcPedidoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcPedidoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcPedidoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcPedidoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcPedidoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.InfPedidoCancelamento is not None:
self.InfPedidoCancelamento.export(outfile, level, namespace_, name_='InfPedidoCancelamento', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'InfPedidoCancelamento':
obj_ = tcInfPedidoCancelamento.factory()
obj_.build(child_)
self.InfPedidoCancelamento = obj_
obj_.original_tagname_ = 'InfPedidoCancelamento'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcPedidoCancelamento
class tcInfConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Sucesso=None, DataHora=None):
self.original_tagname_ = None
self.Sucesso = Sucesso
if isinstance(DataHora, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(DataHora, '%Y-%m-%dT%H:%M:%S')
else:
initvalue_ = DataHora
self.DataHora = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfConfirmacaoCancelamento.subclass:
return tcInfConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcInfConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Sucesso(self): return self.Sucesso
def set_Sucesso(self, Sucesso): self.Sucesso = Sucesso
def get_DataHora(self): return self.DataHora
def set_DataHora(self, DataHora): self.DataHora = DataHora
def hasContent_(self):
if (
self.Sucesso is not None or
self.DataHora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfConfirmacaoCancelamento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcInfConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Sucesso is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Sucesso>%s</Sucesso>%s' % (self.gds_format_boolean(self.Sucesso, input_name='Sucesso'), eol_))
if self.DataHora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<DataHora>%s</DataHora>%s' % (self.gds_format_datetime(self.DataHora, input_name='DataHora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Sucesso':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'Sucesso')
self.Sucesso = ival_
elif nodeName_ == 'DataHora':
sval_ = child_.text
dval_ = self.gds_parse_datetime(sval_)
self.DataHora = dval_
# end class tcInfConfirmacaoCancelamento
class tcConfirmacaoCancelamento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Pedido=None, InfConfirmacaoCancelamento=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.Pedido = Pedido
self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcConfirmacaoCancelamento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcConfirmacaoCancelamento.subclass:
return tcConfirmacaoCancelamento.subclass(*args_, **kwargs_)
else:
return tcConfirmacaoCancelamento(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Pedido(self): return self.Pedido
def set_Pedido(self, Pedido): self.Pedido = Pedido
def get_InfConfirmacaoCancelamento(self): return self.InfConfirmacaoCancelamento
def set_InfConfirmacaoCancelamento(self, InfConfirmacaoCancelamento): self.InfConfirmacaoCancelamento = InfConfirmacaoCancelamento
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Pedido is not None or
self.InfConfirmacaoCancelamento is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcConfirmacaoCancelamento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcConfirmacaoCancelamento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcConfirmacaoCancelamento', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcConfirmacaoCancelamento'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcConfirmacaoCancelamento', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Pedido is not None:
self.Pedido.export(outfile, level, namespace_, name_='Pedido', pretty_print=pretty_print)
if self.InfConfirmacaoCancelamento is not None:
self.InfConfirmacaoCancelamento.export(outfile, level, namespace_, name_='InfConfirmacaoCancelamento', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Pedido':
obj_ = tcPedidoCancelamento.factory()
obj_.build(child_)
self.Pedido = obj_
obj_.original_tagname_ = 'Pedido'
elif nodeName_ == 'InfConfirmacaoCancelamento':
obj_ = tcInfConfirmacaoCancelamento.factory()
obj_.build(child_)
self.InfConfirmacaoCancelamento = obj_
obj_.original_tagname_ = 'InfConfirmacaoCancelamento'
# end class tcConfirmacaoCancelamento
class tcCancelamentoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Confirmacao=None, Signature=None):
self.original_tagname_ = None
self.Confirmacao = Confirmacao
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCancelamentoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCancelamentoNfse.subclass:
return tcCancelamentoNfse.subclass(*args_, **kwargs_)
else:
return tcCancelamentoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Confirmacao(self): return self.Confirmacao
def set_Confirmacao(self, Confirmacao): self.Confirmacao = Confirmacao
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.Confirmacao is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCancelamentoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCancelamentoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCancelamentoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCancelamentoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCancelamentoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Confirmacao is not None:
self.Confirmacao.export(outfile, level, namespace_, name_='Confirmacao', pretty_print=pretty_print)
if self.Signature is not None:
self.Signature.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Confirmacao':
obj_ = tcConfirmacaoCancelamento.factory()
obj_.build(child_)
self.Confirmacao = obj_
obj_.original_tagname_ = 'Confirmacao'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature = obj_
obj_.original_tagname_ = 'Signature'
# end class tcCancelamentoNfse
class tcInfSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NfseSubstituidora=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NfseSubstituidora = NfseSubstituidora
self.validate_tsNumeroNfse(self.NfseSubstituidora)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcInfSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcInfSubstituicaoNfse.subclass:
return tcInfSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcInfSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NfseSubstituidora(self): return self.NfseSubstituidora
def set_NfseSubstituidora(self, NfseSubstituidora): self.NfseSubstituidora = NfseSubstituidora
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroNfse(self, value):
# Validate type tsNumeroNfse, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroNfse' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NfseSubstituidora is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcInfSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcInfSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcInfSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcInfSubstituicaoNfse'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcInfSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NfseSubstituidora is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NfseSubstituidora>%s</NfseSubstituidora>%s' % (self.gds_format_integer(self.NfseSubstituidora, input_name='NfseSubstituidora'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NfseSubstituidora':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NfseSubstituidora')
self.NfseSubstituidora = ival_
# validate type tsNumeroNfse
self.validate_tsNumeroNfse(self.NfseSubstituidora)
# end class tcInfSubstituicaoNfse
class tcSubstituicaoNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SubstituicaoNfse=None, Signature=None):
self.original_tagname_ = None
self.SubstituicaoNfse = SubstituicaoNfse
if Signature is None:
self.Signature = []
else:
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcSubstituicaoNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcSubstituicaoNfse.subclass:
return tcSubstituicaoNfse.subclass(*args_, **kwargs_)
else:
return tcSubstituicaoNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SubstituicaoNfse(self): return self.SubstituicaoNfse
def set_SubstituicaoNfse(self, SubstituicaoNfse): self.SubstituicaoNfse = SubstituicaoNfse
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def add_Signature(self, value): self.Signature.append(value)
def insert_Signature_at(self, index, value): self.Signature.insert(index, value)
def replace_Signature_at(self, index, value): self.Signature[index] = value
def hasContent_(self):
if (
self.SubstituicaoNfse is not None or
self.Signature
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcSubstituicaoNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcSubstituicaoNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcSubstituicaoNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcSubstituicaoNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcSubstituicaoNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SubstituicaoNfse is not None:
self.SubstituicaoNfse.export(outfile, level, namespace_, name_='SubstituicaoNfse', pretty_print=pretty_print)
for Signature_ in self.Signature:
Signature_.export(outfile, level, namespace_='dsig:', name_='Signature', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SubstituicaoNfse':
obj_ = tcInfSubstituicaoNfse.factory()
obj_.build(child_)
self.SubstituicaoNfse = obj_
obj_.original_tagname_ = 'SubstituicaoNfse'
elif nodeName_ == 'Signature':
obj_ = SignatureType.factory()
obj_.build(child_)
self.Signature.append(obj_)
obj_.original_tagname_ = 'Signature'
# end class tcSubstituicaoNfse
class tcCompNfse(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Nfse=None, NfseCancelamento=None, NfseSubstituicao=None):
self.original_tagname_ = None
self.Nfse = Nfse
self.NfseCancelamento = NfseCancelamento
self.NfseSubstituicao = NfseSubstituicao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcCompNfse)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcCompNfse.subclass:
return tcCompNfse.subclass(*args_, **kwargs_)
else:
return tcCompNfse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Nfse(self): return self.Nfse
def set_Nfse(self, Nfse): self.Nfse = Nfse
def get_NfseCancelamento(self): return self.NfseCancelamento
def set_NfseCancelamento(self, NfseCancelamento): self.NfseCancelamento = NfseCancelamento
def get_NfseSubstituicao(self): return self.NfseSubstituicao
def set_NfseSubstituicao(self, NfseSubstituicao): self.NfseSubstituicao = NfseSubstituicao
def hasContent_(self):
if (
self.Nfse is not None or
self.NfseCancelamento is not None or
self.NfseSubstituicao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcCompNfse', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcCompNfse')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcCompNfse')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcCompNfse', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcCompNfse'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcCompNfse', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Nfse is not None:
self.Nfse.export(outfile, level, namespace_, name_='Nfse', pretty_print=pretty_print)
if self.NfseCancelamento is not None:
self.NfseCancelamento.export(outfile, level, namespace_, name_='NfseCancelamento', pretty_print=pretty_print)
if self.NfseSubstituicao is not None:
self.NfseSubstituicao.export(outfile, level, namespace_, name_='NfseSubstituicao', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Nfse':
obj_ = tcNfse.factory()
obj_.build(child_)
self.Nfse = obj_
obj_.original_tagname_ = 'Nfse'
elif nodeName_ == 'NfseCancelamento':
obj_ = tcCancelamentoNfse.factory()
obj_.build(child_)
self.NfseCancelamento = obj_
obj_.original_tagname_ = 'NfseCancelamento'
elif nodeName_ == 'NfseSubstituicao':
obj_ = tcSubstituicaoNfse.factory()
obj_.build(child_)
self.NfseSubstituicao = obj_
obj_.original_tagname_ = 'NfseSubstituicao'
# end class tcCompNfse
class ListaMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, MensagemRetorno=None):
self.original_tagname_ = None
if MensagemRetorno is None:
self.MensagemRetorno = []
else:
self.MensagemRetorno = MensagemRetorno
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaMensagemRetorno.subclass:
return ListaMensagemRetorno.subclass(*args_, **kwargs_)
else:
return ListaMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_MensagemRetorno(self): return self.MensagemRetorno
def set_MensagemRetorno(self, MensagemRetorno): self.MensagemRetorno = MensagemRetorno
def add_MensagemRetorno(self, value): self.MensagemRetorno.append(value)
def insert_MensagemRetorno_at(self, index, value): self.MensagemRetorno.insert(index, value)
def replace_MensagemRetorno_at(self, index, value): self.MensagemRetorno[index] = value
def hasContent_(self):
if (
self.MensagemRetorno
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for MensagemRetorno_ in self.MensagemRetorno:
MensagemRetorno_.export(outfile, level, namespace_, name_='MensagemRetorno', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'MensagemRetorno':
obj_ = tcMensagemRetorno.factory()
obj_.build(child_)
self.MensagemRetorno.append(obj_)
obj_.original_tagname_ = 'MensagemRetorno'
# end class ListaMensagemRetorno
class tcMensagemRetorno(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Codigo=None, Mensagem=None, Correcao=None):
self.original_tagname_ = None
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
self.Correcao = Correcao
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetorno)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetorno.subclass:
return tcMensagemRetorno.subclass(*args_, **kwargs_)
else:
return tcMensagemRetorno(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def get_Correcao(self): return self.Correcao
def set_Correcao(self, Correcao): self.Correcao = Correcao
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.Codigo is not None or
self.Mensagem is not None or
self.Correcao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetorno', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetorno')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetorno')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetorno', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetorno'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetorno', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
if self.Correcao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Correcao>%s</Correcao>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Correcao), input_name='Correcao')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
elif nodeName_ == 'Correcao':
Correcao_ = child_.text
Correcao_ = self.gds_validate_string(Correcao_, node, 'Correcao')
self.Correcao = Correcao_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Correcao)
# end class tcMensagemRetorno
class tcMensagemRetornoLote(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, IdentificacaoRps=None, Codigo=None, Mensagem=None):
self.original_tagname_ = None
self.IdentificacaoRps = IdentificacaoRps
self.Codigo = Codigo
self.validate_tsCodigoMensagemAlerta(self.Codigo)
self.Mensagem = Mensagem
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcMensagemRetornoLote)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcMensagemRetornoLote.subclass:
return tcMensagemRetornoLote.subclass(*args_, **kwargs_)
else:
return tcMensagemRetornoLote(*args_, **kwargs_)
factory = staticmethod(factory)
def get_IdentificacaoRps(self): return self.IdentificacaoRps
def set_IdentificacaoRps(self, IdentificacaoRps): self.IdentificacaoRps = IdentificacaoRps
def get_Codigo(self): return self.Codigo
def set_Codigo(self, Codigo): self.Codigo = Codigo
def get_Mensagem(self): return self.Mensagem
def set_Mensagem(self, Mensagem): self.Mensagem = Mensagem
def validate_tsCodigoMensagemAlerta(self, value):
# Validate type tsCodigoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 4:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsCodigoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def validate_tsDescricaoMensagemAlerta(self, value):
# Validate type tsDescricaoMensagemAlerta, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 200:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsDescricaoMensagemAlerta' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.IdentificacaoRps is not None or
self.Codigo is not None or
self.Mensagem is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcMensagemRetornoLote')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcMensagemRetornoLote')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcMensagemRetornoLote', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcMensagemRetornoLote'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tcMensagemRetornoLote', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.IdentificacaoRps is not None:
self.IdentificacaoRps.export(outfile, level, namespace_, name_='IdentificacaoRps', pretty_print=pretty_print)
if self.Codigo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Codigo>%s</Codigo>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Codigo), input_name='Codigo')), eol_))
if self.Mensagem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Mensagem>%s</Mensagem>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Mensagem), input_name='Mensagem')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'IdentificacaoRps':
obj_ = tcIdentificacaoRps.factory()
obj_.build(child_)
self.IdentificacaoRps = obj_
obj_.original_tagname_ = 'IdentificacaoRps'
elif nodeName_ == 'Codigo':
Codigo_ = child_.text
Codigo_ = self.gds_validate_string(Codigo_, node, 'Codigo')
self.Codigo = Codigo_
# validate type tsCodigoMensagemAlerta
self.validate_tsCodigoMensagemAlerta(self.Codigo)
elif nodeName_ == 'Mensagem':
Mensagem_ = child_.text
Mensagem_ = self.gds_validate_string(Mensagem_, node, 'Mensagem')
self.Mensagem = Mensagem_
# validate type tsDescricaoMensagemAlerta
self.validate_tsDescricaoMensagemAlerta(self.Mensagem)
# end class tcMensagemRetornoLote
class tcLoteRps(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, NumeroLote=None, Cnpj=None, InscricaoMunicipal=None, QuantidadeRps=None, ListaRps=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.NumeroLote = NumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
self.Cnpj = Cnpj
self.validate_tsCnpj(self.Cnpj)
self.InscricaoMunicipal = InscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
self.QuantidadeRps = QuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
self.ListaRps = ListaRps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tcLoteRps)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tcLoteRps.subclass:
return tcLoteRps.subclass(*args_, **kwargs_)
else:
return tcLoteRps(*args_, **kwargs_)
factory = staticmethod(factory)
def get_NumeroLote(self): return self.NumeroLote
def set_NumeroLote(self, NumeroLote): self.NumeroLote = NumeroLote
def get_Cnpj(self): return self.Cnpj
def set_Cnpj(self, Cnpj): self.Cnpj = Cnpj
def get_InscricaoMunicipal(self): return self.InscricaoMunicipal
def set_InscricaoMunicipal(self, InscricaoMunicipal): self.InscricaoMunicipal = InscricaoMunicipal
def get_QuantidadeRps(self): return self.QuantidadeRps
def set_QuantidadeRps(self, QuantidadeRps): self.QuantidadeRps = QuantidadeRps
def get_ListaRps(self): return self.ListaRps
def set_ListaRps(self, ListaRps): self.ListaRps = ListaRps
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def validate_tsNumeroLote(self, value):
# Validate type tsNumeroLote, a restriction on xsd:nonNegativeInteger.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 15:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsNumeroLote' % {"value" : value} )
def validate_tsCnpj(self, value):
# Validate type tsCnpj, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) != 14:
warnings_.warn('Value "%(value)s" does not match xsd length restriction on tsCnpj' % {"value" : value.encode("utf-8")} )
def validate_tsInscricaoMunicipal(self, value):
# Validate type tsInscricaoMunicipal, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 15:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
if len(value) < 1:
warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tsInscricaoMunicipal' % {"value" : value.encode("utf-8")} )
def validate_tsQuantidadeRps(self, value):
# Validate type tsQuantidadeRps, a restriction on xsd:int.
if value is not None and Validate_simpletypes_:
if len(str(value)) >= 4:
warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tsQuantidadeRps' % {"value" : value} )
def validate_tsIdTag(self, value):
# Validate type tsIdTag, a restriction on xsd:string.
if value is not None and Validate_simpletypes_:
if len(value) > 255:
warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tsIdTag' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.NumeroLote is not None or
self.Cnpj is not None or
self.InscricaoMunicipal is not None or
self.QuantidadeRps is not None or
self.ListaRps is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tcLoteRps', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tcLoteRps')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tcLoteRps')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tcLoteRps', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tcLoteRps'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='tcLoteRps', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.NumeroLote is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<NumeroLote>%s</NumeroLote>%s' % (self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), eol_))
if self.Cnpj is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Cnpj>%s</Cnpj>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.Cnpj), input_name='Cnpj')), eol_))
if self.InscricaoMunicipal is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<InscricaoMunicipal>%s</InscricaoMunicipal>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), eol_))
if self.QuantidadeRps is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<QuantidadeRps>%s</QuantidadeRps>%s' % (self.gds_format_integer(self.QuantidadeRps, input_name='QuantidadeRps'), eol_))
if self.ListaRps is not None:
self.ListaRps.export(outfile, level, namespace_, name_='ListaRps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
self.validate_tsIdTag(self.Id) # validate type tsIdTag
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'NumeroLote':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
if ival_ < 0:
raise_parse_error(child_, 'requires nonNegativeInteger')
ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote')
self.NumeroLote = ival_
# validate type tsNumeroLote
self.validate_tsNumeroLote(self.NumeroLote)
elif nodeName_ == 'Cnpj':
Cnpj_ = child_.text
Cnpj_ = self.gds_validate_string(Cnpj_, node, 'Cnpj')
self.Cnpj = Cnpj_
# validate type tsCnpj
self.validate_tsCnpj(self.Cnpj)
elif nodeName_ == 'InscricaoMunicipal':
InscricaoMunicipal_ = child_.text
InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal')
self.InscricaoMunicipal = InscricaoMunicipal_
# validate type tsInscricaoMunicipal
self.validate_tsInscricaoMunicipal(self.InscricaoMunicipal)
elif nodeName_ == 'QuantidadeRps':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'QuantidadeRps')
self.QuantidadeRps = ival_
# validate type tsQuantidadeRps
self.validate_tsQuantidadeRps(self.QuantidadeRps)
elif nodeName_ == 'ListaRps':
obj_ = ListaRpsType.factory()
obj_.build(child_)
self.ListaRps = obj_
obj_.original_tagname_ = 'ListaRps'
# end class tcLoteRps
class SignatureType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None, Object=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.SignedInfo = SignedInfo
self.SignatureValue = SignatureValue
self.KeyInfo = KeyInfo
if Object is None:
self.Object = []
else:
self.Object = Object
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureType.subclass:
return SignatureType.subclass(*args_, **kwargs_)
else:
return SignatureType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignedInfo(self): return self.SignedInfo
def set_SignedInfo(self, SignedInfo): self.SignedInfo = SignedInfo
def get_SignatureValue(self): return self.SignatureValue
def set_SignatureValue(self, SignatureValue): self.SignatureValue = SignatureValue
def get_KeyInfo(self): return self.KeyInfo
def set_KeyInfo(self, KeyInfo): self.KeyInfo = KeyInfo
def get_Object(self): return self.Object
def set_Object(self, Object): self.Object = Object
def add_Object(self, value): self.Object.append(value)
def insert_Object_at(self, index, value): self.Object.insert(index, value)
def replace_Object_at(self, index, value): self.Object[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignedInfo is not None or
self.SignatureValue is not None or
self.KeyInfo is not None or
self.Object
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.SignedInfo is not None:
self.SignedInfo.export(outfile, level, namespace_='ds:', name_='SignedInfo', pretty_print=pretty_print)
if self.SignatureValue is not None:
self.SignatureValue.export(outfile, level, namespace_='ds:', name_='SignatureValue', pretty_print=pretty_print)
if self.KeyInfo is not None:
self.KeyInfo.export(outfile, level, namespace_='ds:', name_='KeyInfo', pretty_print=pretty_print)
for Object_ in self.Object:
Object_.export(outfile, level, namespace_='ds:', name_='Object', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignedInfo':
obj_ = SignedInfoType.factory()
obj_.build(child_)
self.SignedInfo = obj_
obj_.original_tagname_ = 'SignedInfo'
elif nodeName_ == 'SignatureValue':
obj_ = SignatureValueType.factory()
obj_.build(child_)
self.SignatureValue = obj_
obj_.original_tagname_ = 'SignatureValue'
elif nodeName_ == 'KeyInfo':
obj_ = KeyInfoType.factory()
obj_.build(child_)
self.KeyInfo = obj_
obj_.original_tagname_ = 'KeyInfo'
elif nodeName_ == 'Object':
obj_ = ObjectType.factory()
obj_.build(child_)
self.Object.append(obj_)
obj_.original_tagname_ = 'Object'
# end class SignatureType
class SignatureValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, valueOf_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureValueType.subclass:
return SignatureValueType.subclass(*args_, **kwargs_)
else:
return SignatureValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureValueType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureValueType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureValueType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureValueType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SignatureValueType
class SignedInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.CanonicalizationMethod = CanonicalizationMethod
self.SignatureMethod = SignatureMethod
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignedInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignedInfoType.subclass:
return SignedInfoType.subclass(*args_, **kwargs_)
else:
return SignedInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CanonicalizationMethod(self): return self.CanonicalizationMethod
def set_CanonicalizationMethod(self, CanonicalizationMethod): self.CanonicalizationMethod = CanonicalizationMethod
def get_SignatureMethod(self): return self.SignatureMethod
def set_SignatureMethod(self, SignatureMethod): self.SignatureMethod = SignatureMethod
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.CanonicalizationMethod is not None or
self.SignatureMethod is not None or
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignedInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignedInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignedInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignedInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignedInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignedInfoType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.CanonicalizationMethod is not None:
self.CanonicalizationMethod.export(outfile, level, namespace_='ds:', name_='CanonicalizationMethod', pretty_print=pretty_print)
if self.SignatureMethod is not None:
self.SignatureMethod.export(outfile, level, namespace_='ds:', name_='SignatureMethod', pretty_print=pretty_print)
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CanonicalizationMethod':
obj_ = CanonicalizationMethodType.factory()
obj_.build(child_)
self.CanonicalizationMethod = obj_
obj_.original_tagname_ = 'CanonicalizationMethod'
elif nodeName_ == 'SignatureMethod':
obj_ = SignatureMethodType.factory()
obj_.build(child_)
self.SignatureMethod = obj_
obj_.original_tagname_ = 'SignatureMethod'
elif nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class SignedInfoType
class CanonicalizationMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, CanonicalizationMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if CanonicalizationMethodType.subclass:
return CanonicalizationMethodType.subclass(*args_, **kwargs_)
else:
return CanonicalizationMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalizationMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CanonicalizationMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='CanonicalizationMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='CanonicalizationMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class CanonicalizationMethodType
class SignatureMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, HMACOutputLength=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.HMACOutputLength = HMACOutputLength
self.validate_HMACOutputLengthType(self.HMACOutputLength)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignatureMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignatureMethodType.subclass:
return SignatureMethodType.subclass(*args_, **kwargs_)
else:
return SignatureMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_HMACOutputLength(self): return self.HMACOutputLength
def set_HMACOutputLength(self, HMACOutputLength): self.HMACOutputLength = HMACOutputLength
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def validate_HMACOutputLengthType(self, value):
# Validate type HMACOutputLengthType, a restriction on integer.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.HMACOutputLength is not None or
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignatureMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignatureMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignatureMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignatureMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.HMACOutputLength is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<HMACOutputLength>%s</HMACOutputLength>%s' % (self.gds_format_integer(self.HMACOutputLength, input_name='HMACOutputLength'), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'HMACOutputLength' and child_.text is not None:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeInteger, 'HMACOutputLength', ival_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignatureMethodType
class ReferenceType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, URI=None, Type=None, Transforms=None, DigestMethod=None, DigestValue=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
self.DigestMethod = DigestMethod
self.DigestValue = DigestValue
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ReferenceType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ReferenceType.subclass:
return ReferenceType.subclass(*args_, **kwargs_)
else:
return ReferenceType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_DigestMethod(self): return self.DigestMethod
def set_DigestMethod(self, DigestMethod): self.DigestMethod = DigestMethod
def get_DigestValue(self): return self.DigestValue
def set_DigestValue(self, DigestValue): self.DigestValue = DigestValue
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None or
self.DigestMethod is not None or
self.DigestValue is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ReferenceType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReferenceType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReferenceType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ReferenceType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ReferenceType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='ReferenceType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
if self.DigestMethod is not None:
self.DigestMethod.export(outfile, level, namespace_='ds:', name_='DigestMethod', pretty_print=pretty_print)
if self.DigestValue is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:DigestValue>%s</ds:DigestValue>%s' % (self.gds_format_base64(self.DigestValue, input_name='DigestValue'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
elif nodeName_ == 'DigestMethod':
obj_ = DigestMethodType.factory()
obj_.build(child_)
self.DigestMethod = obj_
obj_.original_tagname_ = 'DigestMethod'
elif nodeName_ == 'DigestValue':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'DigestValue')
else:
bval_ = None
self.DigestValue = bval_
# end class ReferenceType
class TransformsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Transform=None):
self.original_tagname_ = None
if Transform is None:
self.Transform = []
else:
self.Transform = Transform
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformsType.subclass:
return TransformsType.subclass(*args_, **kwargs_)
else:
return TransformsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transform(self): return self.Transform
def set_Transform(self, Transform): self.Transform = Transform
def add_Transform(self, value): self.Transform.append(value)
def insert_Transform_at(self, index, value): self.Transform.insert(index, value)
def replace_Transform_at(self, index, value): self.Transform[index] = value
def hasContent_(self):
if (
self.Transform
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TransformsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Transform_ in self.Transform:
Transform_.export(outfile, level, namespace_='ds:', name_='Transform', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transform':
obj_ = TransformType.factory()
obj_.build(child_)
self.Transform.append(obj_)
obj_.original_tagname_ = 'Transform'
# end class TransformsType
class TransformType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, XPath=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
self.anytypeobjs_ = anytypeobjs_
if XPath is None:
self.XPath = []
else:
self.XPath = XPath
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TransformType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TransformType.subclass:
return TransformType.subclass(*args_, **kwargs_)
else:
return TransformType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_XPath(self): return self.XPath
def set_XPath(self, XPath): self.XPath = XPath
def add_XPath(self, value): self.XPath.append(value)
def insert_XPath_at(self, index, value): self.XPath.insert(index, value)
def replace_XPath_at(self, index, value): self.XPath[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
self.XPath or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TransformType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TransformType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TransformType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TransformType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='TransformType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for XPath_ in self.XPath:
showIndent(outfile, level, pretty_print)
outfile.write('<XPath>%s</XPath>%s' % (self.gds_encode(self.gds_format_string(quote_xml(XPath_), input_name='XPath')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
elif nodeName_ == 'XPath' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'XPath', valuestr_)
self.content_.append(obj_)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class TransformType
class DigestMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Algorithm=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Algorithm = _cast(None, Algorithm)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DigestMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DigestMethodType.subclass:
return DigestMethodType.subclass(*args_, **kwargs_)
else:
return DigestMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_Algorithm(self): return self.Algorithm
def set_Algorithm(self, Algorithm): self.Algorithm = Algorithm
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DigestMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DigestMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DigestMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DigestMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DigestMethodType'):
if self.Algorithm is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
outfile.write(' Algorithm=%s' % (quote_attrib(self.Algorithm), ))
def exportChildren(self, outfile, level, namespace_='', name_='DigestMethodType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Algorithm', node)
if value is not None and 'Algorithm' not in already_processed:
already_processed.add('Algorithm')
self.Algorithm = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class DigestMethodType
class KeyInfoType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, KeyName=None, KeyValue=None, RetrievalMethod=None, X509Data=None, PGPData=None, SPKIData=None, MgmtData=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if KeyName is None:
self.KeyName = []
else:
self.KeyName = KeyName
if KeyValue is None:
self.KeyValue = []
else:
self.KeyValue = KeyValue
if RetrievalMethod is None:
self.RetrievalMethod = []
else:
self.RetrievalMethod = RetrievalMethod
if X509Data is None:
self.X509Data = []
else:
self.X509Data = X509Data
if PGPData is None:
self.PGPData = []
else:
self.PGPData = PGPData
if SPKIData is None:
self.SPKIData = []
else:
self.SPKIData = SPKIData
if MgmtData is None:
self.MgmtData = []
else:
self.MgmtData = MgmtData
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyInfoType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyInfoType.subclass:
return KeyInfoType.subclass(*args_, **kwargs_)
else:
return KeyInfoType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_KeyName(self): return self.KeyName
def set_KeyName(self, KeyName): self.KeyName = KeyName
def add_KeyName(self, value): self.KeyName.append(value)
def insert_KeyName_at(self, index, value): self.KeyName.insert(index, value)
def replace_KeyName_at(self, index, value): self.KeyName[index] = value
def get_KeyValue(self): return self.KeyValue
def set_KeyValue(self, KeyValue): self.KeyValue = KeyValue
def add_KeyValue(self, value): self.KeyValue.append(value)
def insert_KeyValue_at(self, index, value): self.KeyValue.insert(index, value)
def replace_KeyValue_at(self, index, value): self.KeyValue[index] = value
def get_RetrievalMethod(self): return self.RetrievalMethod
def set_RetrievalMethod(self, RetrievalMethod): self.RetrievalMethod = RetrievalMethod
def add_RetrievalMethod(self, value): self.RetrievalMethod.append(value)
def insert_RetrievalMethod_at(self, index, value): self.RetrievalMethod.insert(index, value)
def replace_RetrievalMethod_at(self, index, value): self.RetrievalMethod[index] = value
def get_X509Data(self): return self.X509Data
def set_X509Data(self, X509Data): self.X509Data = X509Data
def add_X509Data(self, value): self.X509Data.append(value)
def insert_X509Data_at(self, index, value): self.X509Data.insert(index, value)
def replace_X509Data_at(self, index, value): self.X509Data[index] = value
def get_PGPData(self): return self.PGPData
def set_PGPData(self, PGPData): self.PGPData = PGPData
def add_PGPData(self, value): self.PGPData.append(value)
def insert_PGPData_at(self, index, value): self.PGPData.insert(index, value)
def replace_PGPData_at(self, index, value): self.PGPData[index] = value
def get_SPKIData(self): return self.SPKIData
def set_SPKIData(self, SPKIData): self.SPKIData = SPKIData
def add_SPKIData(self, value): self.SPKIData.append(value)
def insert_SPKIData_at(self, index, value): self.SPKIData.insert(index, value)
def replace_SPKIData_at(self, index, value): self.SPKIData[index] = value
def get_MgmtData(self): return self.MgmtData
def set_MgmtData(self, MgmtData): self.MgmtData = MgmtData
def add_MgmtData(self, value): self.MgmtData.append(value)
def insert_MgmtData_at(self, index, value): self.MgmtData.insert(index, value)
def replace_MgmtData_at(self, index, value): self.MgmtData[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.KeyName or
self.KeyValue or
self.RetrievalMethod or
self.X509Data or
self.PGPData or
self.SPKIData or
self.MgmtData or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyInfoType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyInfoType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyInfoType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyInfoType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyInfoType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='KeyInfoType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for KeyName_ in self.KeyName:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:KeyName>%s</ds:KeyName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(KeyName_), input_name='KeyName')), eol_))
for KeyValue_ in self.KeyValue:
KeyValue_.export(outfile, level, namespace_='ds:', name_='KeyValue', pretty_print=pretty_print)
for RetrievalMethod_ in self.RetrievalMethod:
RetrievalMethod_.export(outfile, level, namespace_='ds:', name_='RetrievalMethod', pretty_print=pretty_print)
for X509Data_ in self.X509Data:
X509Data_.export(outfile, level, namespace_='ds:', name_='X509Data', pretty_print=pretty_print)
for PGPData_ in self.PGPData:
PGPData_.export(outfile, level, namespace_='ds:', name_='PGPData', pretty_print=pretty_print)
for SPKIData_ in self.SPKIData:
SPKIData_.export(outfile, level, namespace_='ds:', name_='SPKIData', pretty_print=pretty_print)
for MgmtData_ in self.MgmtData:
showIndent(outfile, level, pretty_print)
outfile.write('<ds:MgmtData>%s</ds:MgmtData>%s' % (self.gds_encode(self.gds_format_string(quote_xml(MgmtData_), input_name='MgmtData')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'KeyName' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'KeyName', valuestr_)
self.content_.append(obj_)
elif nodeName_ == 'KeyValue':
obj_ = KeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'KeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_KeyValue'):
self.add_KeyValue(obj_.value)
elif hasattr(self, 'set_KeyValue'):
self.set_KeyValue(obj_.value)
elif nodeName_ == 'RetrievalMethod':
obj_ = RetrievalMethodType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RetrievalMethod', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RetrievalMethod'):
self.add_RetrievalMethod(obj_.value)
elif hasattr(self, 'set_RetrievalMethod'):
self.set_RetrievalMethod(obj_.value)
elif nodeName_ == 'X509Data':
obj_ = X509DataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'X509Data', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_X509Data'):
self.add_X509Data(obj_.value)
elif hasattr(self, 'set_X509Data'):
self.set_X509Data(obj_.value)
elif nodeName_ == 'PGPData':
obj_ = PGPDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'PGPData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_PGPData'):
self.add_PGPData(obj_.value)
elif hasattr(self, 'set_PGPData'):
self.set_PGPData(obj_.value)
elif nodeName_ == 'SPKIData':
obj_ = SPKIDataType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'SPKIData', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_SPKIData'):
self.add_SPKIData(obj_.value)
elif hasattr(self, 'set_SPKIData'):
self.set_SPKIData(obj_.value)
elif nodeName_ == 'MgmtData' and child_.text is not None:
valuestr_ = child_.text
obj_ = self.mixedclass_(MixedContainer.CategorySimple,
MixedContainer.TypeString, 'MgmtData', valuestr_)
self.content_.append(obj_)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyInfoType
class KeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, DSAKeyValue=None, RSAKeyValue=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.DSAKeyValue = DSAKeyValue
self.RSAKeyValue = RSAKeyValue
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, KeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if KeyValueType.subclass:
return KeyValueType.subclass(*args_, **kwargs_)
else:
return KeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_DSAKeyValue(self): return self.DSAKeyValue
def set_DSAKeyValue(self, DSAKeyValue): self.DSAKeyValue = DSAKeyValue
def get_RSAKeyValue(self): return self.RSAKeyValue
def set_RSAKeyValue(self, RSAKeyValue): self.RSAKeyValue = RSAKeyValue
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.DSAKeyValue is not None or
self.RSAKeyValue is not None or
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='KeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='KeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='KeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='KeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='KeyValueType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.DSAKeyValue is not None:
self.DSAKeyValue.export(outfile, level, namespace_='ds:', name_='DSAKeyValue', pretty_print=pretty_print)
if self.RSAKeyValue is not None:
self.RSAKeyValue.export(outfile, level, namespace_='ds:', name_='RSAKeyValue', pretty_print=pretty_print)
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'DSAKeyValue':
obj_ = DSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'DSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_DSAKeyValue'):
self.add_DSAKeyValue(obj_.value)
elif hasattr(self, 'set_DSAKeyValue'):
self.set_DSAKeyValue(obj_.value)
elif nodeName_ == 'RSAKeyValue':
obj_ = RSAKeyValueType.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, 'RSAKeyValue', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_RSAKeyValue'):
self.add_RSAKeyValue(obj_.value)
elif hasattr(self, 'set_RSAKeyValue'):
self.set_RSAKeyValue(obj_.value)
elif nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class KeyValueType
class RetrievalMethodType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, URI=None, Type=None, Transforms=None):
self.original_tagname_ = None
self.URI = _cast(None, URI)
self.Type = _cast(None, Type)
self.Transforms = Transforms
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RetrievalMethodType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RetrievalMethodType.subclass:
return RetrievalMethodType.subclass(*args_, **kwargs_)
else:
return RetrievalMethodType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Transforms(self): return self.Transforms
def set_Transforms(self, Transforms): self.Transforms = Transforms
def get_URI(self): return self.URI
def set_URI(self, URI): self.URI = URI
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Transforms is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RetrievalMethodType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetrievalMethodType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RetrievalMethodType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RetrievalMethodType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RetrievalMethodType'):
if self.URI is not None and 'URI' not in already_processed:
already_processed.add('URI')
outfile.write(' URI=%s' % (quote_attrib(self.URI), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (quote_attrib(self.Type), ))
def exportChildren(self, outfile, level, namespace_='', name_='RetrievalMethodType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Transforms is not None:
self.Transforms.export(outfile, level, namespace_='ds:', name_='Transforms', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('URI', node)
if value is not None and 'URI' not in already_processed:
already_processed.add('URI')
self.URI = value
value = find_attr_value_('Type', node)
if value is not None and 'Type' not in already_processed:
already_processed.add('Type')
self.Type = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Transforms':
obj_ = TransformsType.factory()
obj_.build(child_)
self.Transforms = obj_
obj_.original_tagname_ = 'Transforms'
# end class RetrievalMethodType
class X509DataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerSerial=None, X509SKI=None, X509SubjectName=None, X509Certificate=None, X509CRL=None, anytypeobjs_=None):
self.original_tagname_ = None
if X509IssuerSerial is None:
self.X509IssuerSerial = []
else:
self.X509IssuerSerial = X509IssuerSerial
if X509SKI is None:
self.X509SKI = []
else:
self.X509SKI = X509SKI
if X509SubjectName is None:
self.X509SubjectName = []
else:
self.X509SubjectName = X509SubjectName
if X509Certificate is None:
self.X509Certificate = []
else:
self.X509Certificate = X509Certificate
if X509CRL is None:
self.X509CRL = []
else:
self.X509CRL = X509CRL
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509DataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509DataType.subclass:
return X509DataType.subclass(*args_, **kwargs_)
else:
return X509DataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerSerial(self): return self.X509IssuerSerial
def set_X509IssuerSerial(self, X509IssuerSerial): self.X509IssuerSerial = X509IssuerSerial
def add_X509IssuerSerial(self, value): self.X509IssuerSerial.append(value)
def insert_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial.insert(index, value)
def replace_X509IssuerSerial_at(self, index, value): self.X509IssuerSerial[index] = value
def get_X509SKI(self): return self.X509SKI
def set_X509SKI(self, X509SKI): self.X509SKI = X509SKI
def add_X509SKI(self, value): self.X509SKI.append(value)
def insert_X509SKI_at(self, index, value): self.X509SKI.insert(index, value)
def replace_X509SKI_at(self, index, value): self.X509SKI[index] = value
def get_X509SubjectName(self): return self.X509SubjectName
def set_X509SubjectName(self, X509SubjectName): self.X509SubjectName = X509SubjectName
def add_X509SubjectName(self, value): self.X509SubjectName.append(value)
def insert_X509SubjectName_at(self, index, value): self.X509SubjectName.insert(index, value)
def replace_X509SubjectName_at(self, index, value): self.X509SubjectName[index] = value
def get_X509Certificate(self): return self.X509Certificate
def set_X509Certificate(self, X509Certificate): self.X509Certificate = X509Certificate
def add_X509Certificate(self, value): self.X509Certificate.append(value)
def insert_X509Certificate_at(self, index, value): self.X509Certificate.insert(index, value)
def replace_X509Certificate_at(self, index, value): self.X509Certificate[index] = value
def get_X509CRL(self): return self.X509CRL
def set_X509CRL(self, X509CRL): self.X509CRL = X509CRL
def add_X509CRL(self, value): self.X509CRL.append(value)
def insert_X509CRL_at(self, index, value): self.X509CRL.insert(index, value)
def replace_X509CRL_at(self, index, value): self.X509CRL[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.X509IssuerSerial or
self.X509SKI or
self.X509SubjectName or
self.X509Certificate or
self.X509CRL or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509DataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509DataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509DataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509DataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509DataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509DataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for X509IssuerSerial_ in self.X509IssuerSerial:
X509IssuerSerial_.export(outfile, level, namespace_, name_='X509IssuerSerial', pretty_print=pretty_print)
for X509SKI_ in self.X509SKI:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SKI>%s</X509SKI>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SKI_), input_name='X509SKI')), eol_))
for X509SubjectName_ in self.X509SubjectName:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SubjectName>%s</X509SubjectName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509SubjectName_), input_name='X509SubjectName')), eol_))
for X509Certificate_ in self.X509Certificate:
showIndent(outfile, level, pretty_print)
outfile.write('<X509Certificate>%s</X509Certificate>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509Certificate_), input_name='X509Certificate')), eol_))
for X509CRL_ in self.X509CRL:
showIndent(outfile, level, pretty_print)
outfile.write('<X509CRL>%s</X509CRL>%s' % (self.gds_encode(self.gds_format_string(quote_xml(X509CRL_), input_name='X509CRL')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerSerial':
obj_ = X509IssuerSerialType.factory()
obj_.build(child_)
self.X509IssuerSerial.append(obj_)
obj_.original_tagname_ = 'X509IssuerSerial'
elif nodeName_ == 'X509SKI':
X509SKI_ = child_.text
X509SKI_ = self.gds_validate_string(X509SKI_, node, 'X509SKI')
self.X509SKI.append(X509SKI_)
elif nodeName_ == 'X509SubjectName':
X509SubjectName_ = child_.text
X509SubjectName_ = self.gds_validate_string(X509SubjectName_, node, 'X509SubjectName')
self.X509SubjectName.append(X509SubjectName_)
elif nodeName_ == 'X509Certificate':
X509Certificate_ = child_.text
X509Certificate_ = self.gds_validate_string(X509Certificate_, node, 'X509Certificate')
self.X509Certificate.append(X509Certificate_)
elif nodeName_ == 'X509CRL':
X509CRL_ = child_.text
X509CRL_ = self.gds_validate_string(X509CRL_, node, 'X509CRL')
self.X509CRL.append(X509CRL_)
else:
obj_ = self.gds_build_any(child_, 'X509DataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class X509DataType
class X509IssuerSerialType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, X509IssuerName=None, X509SerialNumber=None):
self.original_tagname_ = None
self.X509IssuerName = X509IssuerName
self.X509SerialNumber = X509SerialNumber
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, X509IssuerSerialType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if X509IssuerSerialType.subclass:
return X509IssuerSerialType.subclass(*args_, **kwargs_)
else:
return X509IssuerSerialType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_X509IssuerName(self): return self.X509IssuerName
def set_X509IssuerName(self, X509IssuerName): self.X509IssuerName = X509IssuerName
def get_X509SerialNumber(self): return self.X509SerialNumber
def set_X509SerialNumber(self, X509SerialNumber): self.X509SerialNumber = X509SerialNumber
def hasContent_(self):
if (
self.X509IssuerName is not None or
self.X509SerialNumber is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='X509IssuerSerialType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509IssuerSerialType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='X509IssuerSerialType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='X509IssuerSerialType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='X509IssuerSerialType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='X509IssuerSerialType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.X509IssuerName is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509IssuerName>%s</X509IssuerName>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509IssuerName), input_name='X509IssuerName')), eol_))
if self.X509SerialNumber is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<X509SerialNumber>%s</X509SerialNumber>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.X509SerialNumber), input_name='X509SerialNumber')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'X509IssuerName':
X509IssuerName_ = child_.text
X509IssuerName_ = self.gds_validate_string(X509IssuerName_, node, 'X509IssuerName')
self.X509IssuerName = X509IssuerName_
elif nodeName_ == 'X509SerialNumber':
X509SerialNumber_ = child_.text
X509SerialNumber_ = self.gds_validate_string(X509SerialNumber_, node, 'X509SerialNumber')
self.X509SerialNumber = X509SerialNumber_
# end class X509IssuerSerialType
class PGPDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, PGPKeyID=None, PGPKeyPacket=None, anytypeobjs_=None):
self.original_tagname_ = None
self.PGPKeyID = PGPKeyID
self.PGPKeyPacket = PGPKeyPacket
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PGPDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PGPDataType.subclass:
return PGPDataType.subclass(*args_, **kwargs_)
else:
return PGPDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_PGPKeyID(self): return self.PGPKeyID
def set_PGPKeyID(self, PGPKeyID): self.PGPKeyID = PGPKeyID
def get_PGPKeyPacket(self): return self.PGPKeyPacket
def set_PGPKeyPacket(self, PGPKeyPacket): self.PGPKeyPacket = PGPKeyPacket
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def hasContent_(self):
if (
self.PGPKeyID is not None or
self.PGPKeyPacket is not None or
self.anytypeobjs_
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='PGPDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PGPDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PGPDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='PGPDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='PGPDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='PGPDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.PGPKeyID is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyID>%s</PGPKeyID>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyID), input_name='PGPKeyID')), eol_))
if self.PGPKeyPacket is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PGPKeyPacket>%s</PGPKeyPacket>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.PGPKeyPacket), input_name='PGPKeyPacket')), eol_))
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'PGPKeyID':
PGPKeyID_ = child_.text
PGPKeyID_ = self.gds_validate_string(PGPKeyID_, node, 'PGPKeyID')
self.PGPKeyID = PGPKeyID_
elif nodeName_ == 'PGPKeyPacket':
PGPKeyPacket_ = child_.text
PGPKeyPacket_ = self.gds_validate_string(PGPKeyPacket_, node, 'PGPKeyPacket')
self.PGPKeyPacket = PGPKeyPacket_
else:
obj_ = self.gds_build_any(child_, 'PGPDataType')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PGPDataType
class SPKIDataType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, SPKISexp=None, anytypeobjs_=None):
self.original_tagname_ = None
if SPKISexp is None:
self.SPKISexp = []
else:
self.SPKISexp = SPKISexp
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SPKIDataType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SPKIDataType.subclass:
return SPKIDataType.subclass(*args_, **kwargs_)
else:
return SPKIDataType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SPKISexp(self): return self.SPKISexp
def set_SPKISexp(self, SPKISexp): self.SPKISexp = SPKISexp
def add_SPKISexp(self, value): self.SPKISexp.append(value)
def insert_SPKISexp_at(self, index, value): self.SPKISexp.insert(index, value)
def replace_SPKISexp_at(self, index, value): self.SPKISexp[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.SPKISexp or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SPKIDataType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SPKIDataType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SPKIDataType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SPKIDataType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SPKIDataType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='SPKIDataType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SPKISexp_ in self.SPKISexp:
showIndent(outfile, level, pretty_print)
outfile.write('<SPKISexp>%s</SPKISexp>%s' % (self.gds_encode(self.gds_format_string(quote_xml(SPKISexp_), input_name='SPKISexp')), eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SPKISexp':
SPKISexp_ = child_.text
SPKISexp_ = self.gds_validate_string(SPKISexp_, node, 'SPKISexp')
self.SPKISexp.append(SPKISexp_)
else:
obj_ = self.gds_build_any(child_, 'SPKIDataType')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class SPKIDataType
class ObjectType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, MimeType=None, Encoding=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.MimeType = _cast(None, MimeType)
self.Encoding = _cast(None, Encoding)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ObjectType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ObjectType.subclass:
return ObjectType.subclass(*args_, **kwargs_)
else:
return ObjectType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_MimeType(self): return self.MimeType
def set_MimeType(self, MimeType): self.MimeType = MimeType
def get_Encoding(self): return self.Encoding
def set_Encoding(self, Encoding): self.Encoding = Encoding
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ObjectType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ObjectType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ObjectType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ObjectType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ObjectType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
if self.MimeType is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
outfile.write(' MimeType=%s' % (quote_attrib(self.MimeType), ))
if self.Encoding is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
outfile.write(' Encoding=%s' % (quote_attrib(self.Encoding), ))
def exportChildren(self, outfile, level, namespace_='', name_='ObjectType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
value = find_attr_value_('MimeType', node)
if value is not None and 'MimeType' not in already_processed:
already_processed.add('MimeType')
self.MimeType = value
value = find_attr_value_('Encoding', node)
if value is not None and 'Encoding' not in already_processed:
already_processed.add('Encoding')
self.Encoding = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class ObjectType
class ManifestType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, Reference=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if Reference is None:
self.Reference = []
else:
self.Reference = Reference
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ManifestType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ManifestType.subclass:
return ManifestType.subclass(*args_, **kwargs_)
else:
return ManifestType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Reference(self): return self.Reference
def set_Reference(self, Reference): self.Reference = Reference
def add_Reference(self, value): self.Reference.append(value)
def insert_Reference_at(self, index, value): self.Reference.insert(index, value)
def replace_Reference_at(self, index, value): self.Reference[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.Reference
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ManifestType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ManifestType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ManifestType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ManifestType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ManifestType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='ManifestType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Reference_ in self.Reference:
Reference_.export(outfile, level, namespace_='ds:', name_='Reference', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Reference':
obj_ = ReferenceType.factory()
obj_.build(child_)
self.Reference.append(obj_)
obj_.original_tagname_ = 'Reference'
# end class ManifestType
class SignaturePropertiesType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Id=None, SignatureProperty=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
if SignatureProperty is None:
self.SignatureProperty = []
else:
self.SignatureProperty = SignatureProperty
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertiesType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertiesType.subclass:
return SignaturePropertiesType.subclass(*args_, **kwargs_)
else:
return SignaturePropertiesType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_SignatureProperty(self): return self.SignatureProperty
def set_SignatureProperty(self, SignatureProperty): self.SignatureProperty = SignatureProperty
def add_SignatureProperty(self, value): self.SignatureProperty.append(value)
def insert_SignatureProperty_at(self, index, value): self.SignatureProperty.insert(index, value)
def replace_SignatureProperty_at(self, index, value): self.SignatureProperty[index] = value
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.SignatureProperty
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertiesType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertiesType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertiesType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertiesType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertiesType'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertiesType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for SignatureProperty_ in self.SignatureProperty:
SignatureProperty_.export(outfile, level, namespace_='ds:', name_='SignatureProperty', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'SignatureProperty':
obj_ = SignaturePropertyType.factory()
obj_.build(child_)
self.SignatureProperty.append(obj_)
obj_.original_tagname_ = 'SignatureProperty'
# end class SignaturePropertiesType
class SignaturePropertyType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Target=None, Id=None, anytypeobjs_=None, valueOf_=None, mixedclass_=None, content_=None):
self.original_tagname_ = None
self.Target = _cast(None, Target)
self.Id = _cast(None, Id)
self.anytypeobjs_ = anytypeobjs_
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SignaturePropertyType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SignaturePropertyType.subclass:
return SignaturePropertyType.subclass(*args_, **kwargs_)
else:
return SignaturePropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def get_Target(self): return self.Target
def set_Target(self, Target): self.Target = Target
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.anytypeobjs_ is not None or
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='SignaturePropertyType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignaturePropertyType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SignaturePropertyType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='SignaturePropertyType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SignaturePropertyType'):
if self.Target is not None and 'Target' not in already_processed:
already_processed.add('Target')
outfile.write(' Target=%s' % (quote_attrib(self.Target), ))
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (quote_attrib(self.Id), ))
def exportChildren(self, outfile, level, namespace_='', name_='SignaturePropertyType', fromsubclass_=False, pretty_print=True):
if not fromsubclass_:
for item_ in self.content_:
item_.export(outfile, level, item_.name, namespace_, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Target', node)
if value is not None and 'Target' not in already_processed:
already_processed.add('Target')
self.Target = value
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == '':
obj_ = __ANY__.factory()
obj_.build(child_)
obj_ = self.mixedclass_(MixedContainer.CategoryComplex,
MixedContainer.TypeNone, '', obj_)
self.content_.append(obj_)
if hasattr(self, 'add_'):
self.add_(obj_.value)
elif hasattr(self, 'set_'):
self.set_(obj_.value)
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
# end class SignaturePropertyType
class DSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, P=None, Q=None, G=None, Y=None, J=None, Seed=None, PgenCounter=None):
self.original_tagname_ = None
self.P = P
self.validate_CryptoBinary(self.P)
self.Q = Q
self.validate_CryptoBinary(self.Q)
self.G = G
self.validate_CryptoBinary(self.G)
self.Y = Y
self.validate_CryptoBinary(self.Y)
self.J = J
self.validate_CryptoBinary(self.J)
self.Seed = Seed
self.validate_CryptoBinary(self.Seed)
self.PgenCounter = PgenCounter
self.validate_CryptoBinary(self.PgenCounter)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, DSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if DSAKeyValueType.subclass:
return DSAKeyValueType.subclass(*args_, **kwargs_)
else:
return DSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_P(self): return self.P
def set_P(self, P): self.P = P
def get_Q(self): return self.Q
def set_Q(self, Q): self.Q = Q
def get_G(self): return self.G
def set_G(self, G): self.G = G
def get_Y(self): return self.Y
def set_Y(self, Y): self.Y = Y
def get_J(self): return self.J
def set_J(self, J): self.J = J
def get_Seed(self): return self.Seed
def set_Seed(self, Seed): self.Seed = Seed
def get_PgenCounter(self): return self.PgenCounter
def set_PgenCounter(self, PgenCounter): self.PgenCounter = PgenCounter
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.P is not None or
self.Q is not None or
self.G is not None or
self.Y is not None or
self.J is not None or
self.Seed is not None or
self.PgenCounter is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='DSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('DSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='DSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='DSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.P is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<P>%s</P>%s' % (self.gds_format_base64(self.P, input_name='P'), eol_))
if self.Q is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Q>%s</Q>%s' % (self.gds_format_base64(self.Q, input_name='Q'), eol_))
if self.G is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<G>%s</G>%s' % (self.gds_format_base64(self.G, input_name='G'), eol_))
if self.Y is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Y>%s</Y>%s' % (self.gds_format_base64(self.Y, input_name='Y'), eol_))
if self.J is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<J>%s</J>%s' % (self.gds_format_base64(self.J, input_name='J'), eol_))
if self.Seed is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Seed>%s</Seed>%s' % (self.gds_format_base64(self.Seed, input_name='Seed'), eol_))
if self.PgenCounter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<PgenCounter>%s</PgenCounter>%s' % (self.gds_format_base64(self.PgenCounter, input_name='PgenCounter'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'P':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'P')
else:
bval_ = None
self.P = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.P)
elif nodeName_ == 'Q':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Q')
else:
bval_ = None
self.Q = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Q)
elif nodeName_ == 'G':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'G')
else:
bval_ = None
self.G = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.G)
elif nodeName_ == 'Y':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Y')
else:
bval_ = None
self.Y = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Y)
elif nodeName_ == 'J':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'J')
else:
bval_ = None
self.J = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.J)
elif nodeName_ == 'Seed':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Seed')
else:
bval_ = None
self.Seed = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Seed)
elif nodeName_ == 'PgenCounter':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'PgenCounter')
else:
bval_ = None
self.PgenCounter = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.PgenCounter)
# end class DSAKeyValueType
class RSAKeyValueType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Modulus=None, Exponent=None):
self.original_tagname_ = None
self.Modulus = Modulus
self.validate_CryptoBinary(self.Modulus)
self.Exponent = Exponent
self.validate_CryptoBinary(self.Exponent)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RSAKeyValueType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RSAKeyValueType.subclass:
return RSAKeyValueType.subclass(*args_, **kwargs_)
else:
return RSAKeyValueType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Modulus(self): return self.Modulus
def set_Modulus(self, Modulus): self.Modulus = Modulus
def get_Exponent(self): return self.Exponent
def set_Exponent(self, Exponent): self.Exponent = Exponent
def validate_CryptoBinary(self, value):
# Validate type CryptoBinary, a restriction on base64Binary.
if value is not None and Validate_simpletypes_:
pass
def hasContent_(self):
if (
self.Modulus is not None or
self.Exponent is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='RSAKeyValueType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RSAKeyValueType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RSAKeyValueType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='RSAKeyValueType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RSAKeyValueType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='RSAKeyValueType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Modulus is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Modulus>%s</Modulus>%s' % (self.gds_format_base64(self.Modulus, input_name='Modulus'), eol_))
if self.Exponent is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<Exponent>%s</Exponent>%s' % (self.gds_format_base64(self.Exponent, input_name='Exponent'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Modulus':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Modulus')
else:
bval_ = None
self.Modulus = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Modulus)
elif nodeName_ == 'Exponent':
sval_ = child_.text
if sval_ is not None:
try:
bval_ = base64.b64decode(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires base64 encoded string: %s' % exp)
bval_ = self.gds_validate_base64(bval_, node, 'Exponent')
else:
bval_ = None
self.Exponent = bval_
# validate type CryptoBinary
self.validate_CryptoBinary(self.Exponent)
# end class RSAKeyValueType
class ListaRpsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Rps=None):
self.original_tagname_ = None
if Rps is None:
self.Rps = []
else:
self.Rps = Rps
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ListaRpsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ListaRpsType.subclass:
return ListaRpsType.subclass(*args_, **kwargs_)
else:
return ListaRpsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Rps(self): return self.Rps
def set_Rps(self, Rps): self.Rps = Rps
def add_Rps(self, value): self.Rps.append(value)
def insert_Rps_at(self, index, value): self.Rps.insert(index, value)
def replace_Rps_at(self, index, value): self.Rps[index] = value
def hasContent_(self):
if (
self.Rps
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ListaRpsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ListaRpsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ListaRpsType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ListaRpsType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ListaRpsType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='ListaRpsType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Rps_ in self.Rps:
Rps_.export(outfile, level, namespace_, name_='Rps', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Rps':
obj_ = tcRps.factory()
obj_.build(child_)
self.Rps.append(obj_)
obj_.original_tagname_ = 'Rps'
# end class ListaRpsType
GDSClassesMapping = {
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'ConsultarSituacaoLoteRpsEnvio'
rootClass = ConsultarSituacaoLoteRpsEnvio
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from servico_consultar_situacao_lote_rps_envio_v03 import *\n\n')
sys.stdout.write('import servico_consultar_situacao_lote_rps_envio_v03 as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"CanonicalizationMethodType",
"ConsultarSituacaoLoteRpsEnvio",
"DSAKeyValueType",
"DigestMethodType",
"KeyInfoType",
"KeyValueType",
"ListaMensagemRetorno",
"ListaRpsType",
"ManifestType",
"ObjectType",
"PGPDataType",
"RSAKeyValueType",
"ReferenceType",
"RetrievalMethodType",
"SPKIDataType",
"SignatureMethodType",
"SignaturePropertiesType",
"SignaturePropertyType",
"SignatureType",
"SignatureValueType",
"SignedInfoType",
"TransformType",
"TransformsType",
"X509DataType",
"X509IssuerSerialType",
"tcCancelamentoNfse",
"tcCompNfse",
"tcConfirmacaoCancelamento",
"tcContato",
"tcCpfCnpj",
"tcDadosConstrucaoCivil",
"tcDadosPrestador",
"tcDadosServico",
"tcDadosTomador",
"tcEndereco",
"tcIdentificacaoIntermediarioServico",
"tcIdentificacaoNfse",
"tcIdentificacaoOrgaoGerador",
"tcIdentificacaoPrestador",
"tcIdentificacaoRps",
"tcIdentificacaoTomador",
"tcInfConfirmacaoCancelamento",
"tcInfNfse",
"tcInfPedidoCancelamento",
"tcInfRps",
"tcInfSubstituicaoNfse",
"tcLoteRps",
"tcMensagemRetorno",
"tcMensagemRetornoLote",
"tcNfse",
"tcPedidoCancelamento",
"tcRps",
"tcSubstituicaoNfse",
"tcValores"
]
| true | true |
1c3d6bd5c855108ef14e3d5e779ee2ba8e527e77 | 32,717 | py | Python | pyaer/device.py | duguyue100/pyaer-beta | c000c6676ae870978c86d491ed704cb69bc1c12e | [
"MIT"
] | 23 | 2017-12-15T10:18:29.000Z | 2022-03-10T16:39:14.000Z | pyaer/device.py | duguyue100/pyaer-beta | c000c6676ae870978c86d491ed704cb69bc1c12e | [
"MIT"
] | 18 | 2018-04-22T11:41:26.000Z | 2022-03-02T14:51:59.000Z | pyaer/device.py | duguyue100/pyaer-beta | c000c6676ae870978c86d491ed704cb69bc1c12e | [
"MIT"
] | 5 | 2018-08-10T10:53:02.000Z | 2022-02-08T17:01:42.000Z | """Generic Device.
Author: Yuhuang Hu
Email : duguyue100@gmail.com
"""
from __future__ import print_function, absolute_import
import abc
from pyaer import libcaer
class USBDevice(object):
"""Base class for all USB devices.
This class is the base of DVS128, DAVIS240, DAVIS346 and DYNAPSE.
"""
def __init__(self):
"""Device."""
self.handle = None
# functions for get events number and packet functions
self.get_event_number_funcs = {
libcaer.POLARITY_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.SPECIAL_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.IMU6_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.IMU9_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.SPIKE_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber
}
self.get_event_packet_funcs = {
libcaer.POLARITY_EVENT:
libcaer.caerPolarityEventPacketFromPacketHeader,
libcaer.SPECIAL_EVENT:
libcaer.caerSpecialEventPacketFromPacketHeader,
libcaer.FRAME_EVENT:
libcaer.caerFrameEventPacketFromPacketHeader,
libcaer.IMU6_EVENT:
libcaer.caerIMU6EventPacketFromPacketHeader,
libcaer.IMU9_EVENT:
libcaer.caerIMU9EventPacketFromPacketHeader,
libcaer.SPIKE_EVENT:
libcaer.caerSpikeEventPacketFromPacketHeader
}
@abc.abstractmethod
def obtain_device_info(self, handle):
"""Obtain device handle.
This abstract method should be implemented in all derived classes.
This method collects the general information about the USB device
such as the width and height of the camera or the serial number
of the device.
# Arguments
handle: `caerDeviceHandle`<br/>
a valid device handle that can be used with the other
`libcaer` functions, or `None` on error.
"""
return
@abc.abstractmethod
def get_event(self):
"""Get Event.
This abstract method should be implemented in all derived classes.
This method returns a packet of events according to the type of
the sensor.
"""
return
def open(self,
device_type,
device_id=1,
bus_number_restrict=0,
dev_address_restrict=0,
serial_number=""):
"""Open USB deivce.
# Arguments
device_type: `int`<br/>
Device type:<br/>
`libcaer.CAER_DEVICE_DVS128`,
`libcaer.CAER_DEVICE_EDVS`,
`libcaer.CAER_DEVICE_DAVIS`,
`libcaer.CAER_DEVICE_DAVIS_FX2`,
`libcaer.CAER_DEVICE_DAVIS_FX3`,
`libcaer.CAER_DEVICE_DAVIS_RPI`,
`libcaer.CAER_DEVICE_DYNAPSE`.
device_id: `int`<br/>
a unique ID to identify the device from others.
Will be used as the source for EventPackets being
generate from its data.<br/>
`default is 1`.
bus_number_restrict: `int`<br/>
restrict the search for viable devices to only this USB
bus number.<br/>
`default is 0`.
dev_address_restrict: `int`<br/>
restrict the search for viable devices to only this USB
device address.<br/>
`default is 0`.
serial_number: `str`<br/>
restrict the search for viable devices to only devices which do
possess the given Serial Number in their USB
SerialNumber descriptor.<br/>
`default is ""`
"""
self.handle = libcaer.caerDeviceOpen(
device_id, device_type, bus_number_restrict,
dev_address_restrict, serial_number)
if self.handle is None:
raise ValueError("The device is failed to open.")
def close(self):
"""Close USB device.
This method closes an opened USB device if the respective
handle is not None.
"""
if self.handle is not None:
libcaer.caerDeviceClose(self.handle)
def shutdown(self):
"""Shutdown device.
This method is a combination of `data_stop` and `close`.
This is a preferred way of shutting down a device.
"""
self.data_stop()
self.close()
def data_start(self):
"""Start data transmission.
# Returns
flag: `bool`<br/>
Return `True` if the data transmission is
initialized successfully. Otherwise `False`.
"""
# TODO figure out the parameter meaning
if self.handle is not None:
data_start_success = libcaer.caerDeviceDataStart(
self.handle, None, None, None, None, None)
return data_start_success
else:
return False
def data_stop(self):
"""Stop data transmission.
This method stops the data transmission only.
Note that this method does not destroy the respective
device `handle`.
"""
libcaer.caerDeviceDataStop(self.handle)
def send_default_config(self):
"""Send default configuration.
Each type of devices has a set of default configurations (e.g. bias)
that are pre-defined in the `libcaer` library.
Note that the default configuration might not be suitable for your
needs.
# Returns
flag: `bool`<br/>
return `True` if the default config is set successfully,
`False` otherwise.
"""
if self.handle is not None:
send_success = libcaer.caerDeviceSendDefaultConfig(self.handle)
return send_success
else:
return False
def set_max_container_packet_size(self, max_packet_size=0):
"""Set max container packet size.
# Arguments
max_packet_size: `int`<br/>
set the maximum number of events any of a packet container's
packets may hold before it's made available to the user.
Set to zero to disable.<br/>
The default is `0`.
"""
return self.set_config(
libcaer.CAER_HOST_CONFIG_PACKETS,
libcaer.CAER_HOST_CONFIG_PACKETS_MAX_CONTAINER_PACKET_SIZE,
max_packet_size)
def set_max_container_interval(self, max_packet_interval=10000):
"""Set max packet interval.
# Arguments
max_packet_interval: `int`<br/>
set the time interval between subsequent packet containers.
Must be at least 1 microsecond.
The value is in microseconds, and is checked across all
types of events contained in the EventPacketContainer.<br/>
The default is `10000` (10ms or 100 packets/s)
"""
return self.set_config(
libcaer.CAER_HOST_CONFIG_PACKETS,
libcaer.CAER_HOST_CONFIG_PACKETS_MAX_CONTAINER_INTERVAL,
max_packet_interval)
def set_data_exchange_blocking(self, exchange_blocking=True):
"""Set data exchange blocking.
# Arguments
exchange_blocking: `bool`<br/>
whether to start all the data producer modules on the device
(DVS, APS, Mux, ...) automatically when starting the
data transfer thread with `caerDeviceDataStart()` or not.
If disabled, be aware you will have to start the right modules
manually, which can be useful if you need precise control
over which ones are running at any time.<br/>
The default is `True`.
"""
return self.set_config(
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE,
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING,
exchange_blocking)
def set_config(self, mod_addr, param_addr, param):
"""Set configuration.
The main function of setting configurations (e.g., bias).
# Arguments
mod_addr: `int`<br/>
a module address, used to specify which configuration module
one wants to update. Negative addresses are used for host-side
configuration, while positive addresses (including zero) are
used for device-side configuration.
param_addr: `int`<br/>
a parameter address, to select a specific parameter to update
from this particular configuration module.
Only positive numbers
(including zero) are allowed.
param: `int` or `bool`<br/>
a configuration parameter's new value.
# Returns
flag: `bool`<br/>
returns `True` if the config is set successfully,
`False` otherwise.
"""
if self.handle is not None:
set_success = libcaer.caerDeviceConfigSet(
self.handle, mod_addr, param_addr, param)
return set_success
else:
return False
def get_config(self, mod_addr, param_addr):
"""Get Configuration.
# Arguments
mod_addr: `int`<br/>
a module address, used to specify which configuration module
one wants to update. Negative addresses are used for host-side
configuration, while positive addresses (including zero) are
used for device-side configuration.
param_addr: `int`<br/>
a parameter address, to select a specific parameter to update
from this particular configuration module.
Only positive numbers
(including zero) are allowed.
# Returns
param: `int` or `bool`<br/>
a configuration parameter's new value. Returns None
if the handle is not valid.
"""
if self.handle is not None:
return libcaer.caerDeviceConfigGet(
self.handle, mod_addr, param_addr)
else:
return None
def get_packet_container(self):
"""Get event packet container.
# Returns
packet_container: `caerEventPacketContainer`<br/>
a container that consists of event packets.
packet_number: `int`<br/>
number of event packet in the container.
"""
packet_container = libcaer.caerDeviceDataGet(self.handle)
if packet_container is not None:
packet_number = \
libcaer.caerEventPacketContainerGetEventPacketsNumber(
packet_container)
return packet_container, packet_number
else:
return None, None
def get_packet_header(self, packet_container, idx):
"""Get a single packet header.
# Arguments
packet_container: `caerEventPacketContainer`<br/>
the event packet container
idx: `int`<br/>
the index of the packet header
# Returns
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
packet_type: `caerEventPacketType`<br/>
the type of the event packet
"""
packet_header = \
libcaer.caerEventPacketContainerGetEventPacket(
packet_container, idx)
if packet_header is None:
return (None, None)
else:
packet_type = libcaer.caerEventPacketHeaderGetEventType(
packet_header)
return packet_header, packet_type
def get_event_packet(self, packet_header, packet_type):
"""Get event packet from packet header.
# Arguments
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet.
packet_type: `caerEventPacketType`<br/>
the type of the event packet, can be one of the following:
`libcaer.POLARITY_EVENT`,
`libcaer.SPECIAL_EVENT`,
`libcaer.FRAME_EVENT`,
`libcaer.IMU6_EVENT`,
`libcaer.SPIKE_EVENT`
# Returns
num_events: `int`<br/>
number of events, return None if there is no events.
event_packet: `caerEventPacket`<br/>
a packet of events that are ready to be read.
"""
num_events = self.get_event_number_funcs[packet_type](
packet_header) if packet_type in self.get_event_number_funcs \
else None
event_packet = self.get_event_packet_funcs[packet_type](
packet_header) if packet_type in self.get_event_packet_funcs \
else None
return num_events, event_packet
def get_polarity_event(self, packet_header):
"""Get a packet of polarity event.
# Arguments
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
# Returns
events: `numpy.ndarray`<br/>
a 2-D array that has the shape of (N, 4) where N
is the number of events in the event packet.
Each row in the array represents a single polarity event.
The first number is the timestamp.
The second number is the X position of the event.
The third number is the Y position of the event.
The fourth number represents the polarity of the event
(positive or negative).
num_events: `int`<br/>
number of the polarity events available in the packet.
"""
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
# TODO: to implement a noise filtering process
# or reimplement this function into specific classes
events = libcaer.get_polarity_event(
polarity, num_events*4).reshape(num_events, 4)
return events, num_events
def get_polarity_hist(self, packet_header, device_type=None):
"""Get the positive and negative histogram for a packet."""
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
hist = libcaer.get_polarity_event_histogram_240(
polarity, num_events)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
hist = libcaer.get_polarity_event_histogram_346(
polarity, num_events)
elif device_type == "DVS128":
hist = libcaer.get_polarity_event_histogram_128(
polarity, num_events)
elif device_type == libcaer.DVXPLORER_CHIP_ID:
hist = libcaer.get_polarity_event_histogram_dvxplorer(
polarity, num_events)
elif device_type == libcaer.DVXPLORER_LITE_CHIP_ID:
hist = libcaer.get_polarity_event_histogram_dvxplorer_lite(
polarity, num_events)
else:
return None, 0
return hist, num_events
def get_counter_neuron_event(self, packet_header, device_type=None):
"""Get the positive and negative histogram for a packet."""
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
hist = libcaer.get_counter_neuron_frame_240(
polarity, num_events)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
hist = libcaer.get_polarity_event_histogram_346(
polarity, num_events)
elif device_type == "DVS128":
hist = libcaer.get_polarity_event_histogram_128(
polarity, num_events)
else:
return None, 0
return hist, num_events
def get_special_event(self, packet_header):
"""Get a packet of special event.
# Arguments
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
# Returns
events: `numpy.ndarray`<br/>
a 2-D array that has the shape of (N, 2) where N
is the number of events in the event packet.
Each row in the array represents a single special event.
The first value is the timestamp of the event.
The second value is the special event data.
num_events: `int`<br/>
number of the special events in the packet.
"""
num_events, special = self.get_event_packet(
packet_header, libcaer.SPECIAL_EVENT)
events = libcaer.get_special_event(
special, num_events*2).reshape(num_events, 2)
return events, num_events
def get_frame_event(self, packet_header, device_type=None,
aps_filter_type=libcaer.MONO):
"""Get a packet of frame event.
# Arguments
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
# Returns
frame_mat: `numpy.ndarray`<br/>
a 2-D array that has the shape of (height, width).
The height and width of the APS frame is determined by
the specific DAVIS device (e.g., DAVIS240 will have
a 180x240 APS frame.
For DAVIS346Red that has RGB outputs, the output array
has the shape of (height, width, 3)
frame_ts: `int`<br/>
the APS frame timestamp.
"""
_, frame = self.get_event_packet(packet_header, libcaer.FRAME_EVENT)
first_event = libcaer.caerFrameEventPacketGetEventConst(frame, 0)
frame_ts = libcaer.caerFrameEventGetTimestamp64(first_event, frame)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
frame_mat = libcaer.get_frame_event_240(first_event)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
frame_mat = libcaer.get_frame_event_346(first_event) \
if aps_filter_type == libcaer.MONO else \
libcaer.get_rgb_frame_event_346(first_event)
else:
Y_range = libcaer.caerFrameEventGetLengthY(first_event)
X_range = libcaer.caerFrameEventGetLengthX(first_event)
frame_mat = libcaer.get_frame_event(
first_event, Y_range*X_range).reshape(Y_range, X_range) \
if aps_filter_type == libcaer.MONO else \
libcaer.get_frame_event(
first_event, Y_range*X_range*3).reshape(
Y_range, X_range, 3)
return frame_mat, frame_ts
def get_imu6_event(self, packet_header):
"""Get IMU6 event.
# Arguments
packet_header: `caerEventPacketHeader`
the header that represents a event packet
# Returns
events: `numpy.ndarray`<br/>
a 2-D array that has the shape of (N, 8) where N
is the number of IMU6 events in the packet.
Each row of the array consists a single IMU6 event.
The first value is the timestamp of the event.
The next three values are accelerations on the X, Y, and Z
axes. The next three values are angular velocity
on the X, Y and Z axes.
The last value is the temperature in Celsius scale.
num_events: `int`<br/>
number of the IMU6 events.
"""
num_events, imu = self.get_event_packet(
packet_header, libcaer.IMU6_EVENT)
events = libcaer.get_imu6_event(
imu, num_events*8).reshape(num_events, 8)
return events, num_events
def get_imu9_event(self, packet_header):
"""Get IMU9 event.
# Arguments
packet_header: `caerEventPacketHeader`
the header that represents a event packet
# Returns
events: `numpy.ndarray`<br/>
a 2-D array that has the shape of (N, 11) where N
is the number of IMU9 events in the packet.
Each row of the array consists a single IMU9 event.
The first value is the timestamp of the event.
The next three values are accelerations on the X, Y, and Z
axes. The next three values are angular velocity
on the X, Y and Z axes. The next three values are
X, Y, Z axis compass heading.
The last value is the temperature in Celsius scale.
num_events: `int`<br/>
number of the IMU9 events.
"""
num_events, imu = self.get_event_packet(
packet_header, libcaer.IMU9_EVENT)
events = libcaer.get_imu9_event(
imu, num_events*11).reshape(num_events, 11)
return events, num_events
def get_spike_event(self, packet_header):
"""Get Spike Event.
# Arguments
packet_header: `caerEventPacketHeader`
the header that represents a event packet
# Returns
events: `numpy.ndarray`<br/>
a 2-D array that has the shape of (N, 4) where N
is the number of spike events in the packet.
Each row of the array has a single spike event.
The first value is the timestamp of the event.
The second value is the neuron ID.
The third value is the chip ID.
The last value is the source core ID.
num_events: `int`<br/>
the number of the spike events.
"""
num_events, spike = self.get_event_packet(
packet_header, self.SPIKE_EVENT)
events = libcaer.get_spike_event(
spike, num_events*4).reshape(num_events, 4)
return events, num_events
class SerialDevice(object):
"""Base class for serial devices.
The base class for devices that use the serial port.
eDVS is the only current supported device in this family.
"""
def __init__(self):
"""Device."""
self.handle = None
@abc.abstractmethod
def obtain_device_info(self, handle):
"""Obtain device handle.
This abstract method should be implemented in all derived classes.
This method collects the general information about the USB device
such as the width and height of the camera or the serial number
of the device.
# Arguments
handle: `caerDeviceHandle`<br/>
a valid device handle that can be used with the other
`libcaer` functions, or `None` on error.
"""
return
@abc.abstractmethod
def get_event(self):
"""Get Event.
This abstract method should be implemented in all derived classes.
This method returns a packet of events according to the type of
the sensor.
"""
return
def open(self,
device_type,
device_id=1,
serial_port_name="/dev/ttyUSB0",
serial_baud_rate=libcaer.CAER_HOST_CONFIG_SERIAL_BAUD_RATE_12M):
"""Open USB deivce.
# Arguments
device_type: `int`<br/>
Device type:<br/>
`libcaer.CAER_DEVICE_DVS128`,
`libcaer.CAER_DEVICE_EDVS`,
`libcaer.CAER_DEVICE_DAVIS`,
`libcaer.CAER_DEVICE_DAVIS_FX2`,
`libcaer.CAER_DEVICE_DAVIS_FX3`,
`libcaer.CAER_DEVICE_DAVIS_RPI`,
`libcaer.CAER_DEVICE_DYNAPSE`.
device_id: `int`<br/>
a unique ID to identify the device from others.
Will be used as the source for EventPackets being
generate from its data.<br/>
`default is 1`.
serial_port_name: `str`<br/>
name of the serial port device to open.<br/>
`default is /dev/ttyUSB0`
serial_baud_rate: `uint32_t`
baud-rate for serial port communication.<br/>
`default is 12M`
"""
self.handle = libcaer.caerDeviceOpenSerial(
device_id, device_type, serial_port_name,
serial_baud_rate)
if self.handle is None:
raise ValueError("The device is failed to open.")
def close(self):
"""Close USB device.
This method closes an opened USB device if the respective
handle is not None.
"""
if self.handle is not None:
libcaer.caerDeviceClose(self.handle)
def shutdown(self):
"""Shutdown device.
This method is a combination of `data_stop` and `close`.
This is a preferred way of shutting down a device.
"""
self.data_stop()
self.close()
def data_start(self):
"""Start data transmission.
# Returns
flag: `bool`<br/>
Return `True` if the data transmission is
initialized successfully. Otherwise `False`.
"""
# TODO figure out the parameter meaning
if self.handle is not None:
data_start_success = libcaer.caerDeviceDataStart(
self.handle, None, None, None, None, None)
return data_start_success
else:
return False
def data_stop(self):
"""Stop data transmission.
This method stops the data transmission only.
Note that this method does not destroy the respective
device `handle`.
"""
libcaer.caerDeviceDataStop(self.handle)
def send_default_config(self):
"""Send default configuration.
Each type of devices has a set of default configurations (e.g. bias)
that are pre-defined in the `libcaer` library.
Note that the default configuration might not be suitable for your
needs.
# Returns
flag: `bool`<br/>
return `True` if the default config is set successfully,
`False` otherwise.
"""
if self.handle is not None:
send_success = libcaer.caerDeviceSendDefaultConfig(self.handle)
return send_success
else:
return False
def set_data_exchange_blocking(self, exchange_blocking=True):
"""Set data exchange blocking.
# Arguments
exchange_blocking: `bool`<br/>
whether to start all the data producer modules on the device
(DVS, APS, Mux, ...) automatically when starting the
data transfer thread with `caerDeviceDataStart()` or not.
If disabled, be aware you will have to start the right modules
manually, which can be useful if you need precise control
over which ones are running at any time.<br/>
The default is `True`.
"""
return self.set_config(
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE,
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING,
exchange_blocking)
def set_config(self, mod_addr, param_addr, param):
"""Set configuration.
The main function of setting configurations (e.g., bias).
# Arguments
mod_addr: `int`<br/>
a module address, used to specify which configuration module
one wants to update. Negative addresses are used for host-side
configuration, while positive addresses (including zero) are
used for device-side configuration.
param_addr: `int`<br/>
a parameter address, to select a specific parameter to update
from this particular configuration module.
Only positive numbers
(including zero) are allowed.
param: `int` or `bool`<br/>
a configuration parameter's new value.
# Returns
flag: `bool`<br/>
returns `True` if the config is set successfully,
`False` otherwise.
"""
if self.handle is not None:
set_success = libcaer.caerDeviceConfigSet(
self.handle, mod_addr, param_addr, param)
return set_success
else:
return False
def get_config(self, mod_addr, param_addr):
"""Get Configuration.
# Arguments
mod_addr: `int`<br/>
a module address, used to specify which configuration module
one wants to update. Negative addresses are used for host-side
configuration, while positive addresses (including zero) are
used for device-side configuration.
param_addr: `int`<br/>
a parameter address, to select a specific parameter to update
from this particular configuration module.
Only positive numbers
(including zero) are allowed.
# Returns
param: `int` or `bool`<br/>
a configuration parameter's new value. Returns None
if the handle is not valid.
"""
if self.handle is not None:
return libcaer.caerDeviceConfigGet(
self.handle, mod_addr, param_addr)
else:
return None
def get_packet_container(self):
"""Get event packet container.
# Returns
packet_container: `caerEventPacketContainer`<br/>
a container that consists of event packets.
packet_number: `int`<br/>
number of event packet in the container.
"""
packet_container = libcaer.caerDeviceDataGet(self.handle)
if packet_container is not None:
packet_number = \
libcaer.caerEventPacketContainerGetEventPacketsNumber(
packet_container)
return packet_container, packet_number
else:
return None, None
def get_packet_header(self, packet_container, idx):
"""Get a single packet header.
# Arguments
packet_container: `caerEventPacketContainer`<br/>
the event packet container
idx: `int`<br/>
the index of the packet header
# Returns
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
packet_type: `caerEventPacketType`<br/>
the type of the event packet
"""
packet_header = \
libcaer.caerEventPacketContainerGetEventPacket(
packet_container, idx)
if packet_header is None:
return (None, None)
else:
packet_type = libcaer.caerEventPacketHeaderGetEventType(
packet_header)
return packet_header, packet_type
def get_polarity_event(self, packet_header):
"""Get a packet of polarity event.
# Arguments
packet_header: `caerEventPacketHeader`<br/>
the header that represents a event packet
# Returns
events: `numpy.ndarray`
a 2-D array that has the shape of (N, 4) where N
is the number of events in the event packet.
Each row in the array represents a single polarity event.
The first number is the timestamp.
The second number is the X position of the event.
The third number is the Y position of the event.
The fourth number represents the polarity of the event
(positive or negative).
num_events: `int`
number of the polarity events available in the packet.
"""
num_events = libcaer.caerEventPacketHeaderGetEventNumber(
packet_header)
polarity = libcaer.caerPolarityEventPacketFromPacketHeader(
packet_header)
events = libcaer.get_polarity_event(
polarity, num_events*4).reshape(num_events, 4)
return events, num_events
| 37.866898 | 79 | 0.590488 | from __future__ import print_function, absolute_import
import abc
from pyaer import libcaer
class USBDevice(object):
def __init__(self):
self.handle = None
self.get_event_number_funcs = {
libcaer.POLARITY_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.SPECIAL_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.IMU6_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.IMU9_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber,
libcaer.SPIKE_EVENT:
libcaer.caerEventPacketHeaderGetEventNumber
}
self.get_event_packet_funcs = {
libcaer.POLARITY_EVENT:
libcaer.caerPolarityEventPacketFromPacketHeader,
libcaer.SPECIAL_EVENT:
libcaer.caerSpecialEventPacketFromPacketHeader,
libcaer.FRAME_EVENT:
libcaer.caerFrameEventPacketFromPacketHeader,
libcaer.IMU6_EVENT:
libcaer.caerIMU6EventPacketFromPacketHeader,
libcaer.IMU9_EVENT:
libcaer.caerIMU9EventPacketFromPacketHeader,
libcaer.SPIKE_EVENT:
libcaer.caerSpikeEventPacketFromPacketHeader
}
@abc.abstractmethod
def obtain_device_info(self, handle):
return
@abc.abstractmethod
def get_event(self):
return
def open(self,
device_type,
device_id=1,
bus_number_restrict=0,
dev_address_restrict=0,
serial_number=""):
self.handle = libcaer.caerDeviceOpen(
device_id, device_type, bus_number_restrict,
dev_address_restrict, serial_number)
if self.handle is None:
raise ValueError("The device is failed to open.")
def close(self):
if self.handle is not None:
libcaer.caerDeviceClose(self.handle)
def shutdown(self):
self.data_stop()
self.close()
def data_start(self):
if self.handle is not None:
data_start_success = libcaer.caerDeviceDataStart(
self.handle, None, None, None, None, None)
return data_start_success
else:
return False
def data_stop(self):
libcaer.caerDeviceDataStop(self.handle)
def send_default_config(self):
if self.handle is not None:
send_success = libcaer.caerDeviceSendDefaultConfig(self.handle)
return send_success
else:
return False
def set_max_container_packet_size(self, max_packet_size=0):
return self.set_config(
libcaer.CAER_HOST_CONFIG_PACKETS,
libcaer.CAER_HOST_CONFIG_PACKETS_MAX_CONTAINER_PACKET_SIZE,
max_packet_size)
def set_max_container_interval(self, max_packet_interval=10000):
return self.set_config(
libcaer.CAER_HOST_CONFIG_PACKETS,
libcaer.CAER_HOST_CONFIG_PACKETS_MAX_CONTAINER_INTERVAL,
max_packet_interval)
def set_data_exchange_blocking(self, exchange_blocking=True):
return self.set_config(
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE,
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING,
exchange_blocking)
def set_config(self, mod_addr, param_addr, param):
if self.handle is not None:
set_success = libcaer.caerDeviceConfigSet(
self.handle, mod_addr, param_addr, param)
return set_success
else:
return False
def get_config(self, mod_addr, param_addr):
if self.handle is not None:
return libcaer.caerDeviceConfigGet(
self.handle, mod_addr, param_addr)
else:
return None
def get_packet_container(self):
packet_container = libcaer.caerDeviceDataGet(self.handle)
if packet_container is not None:
packet_number = \
libcaer.caerEventPacketContainerGetEventPacketsNumber(
packet_container)
return packet_container, packet_number
else:
return None, None
def get_packet_header(self, packet_container, idx):
packet_header = \
libcaer.caerEventPacketContainerGetEventPacket(
packet_container, idx)
if packet_header is None:
return (None, None)
else:
packet_type = libcaer.caerEventPacketHeaderGetEventType(
packet_header)
return packet_header, packet_type
def get_event_packet(self, packet_header, packet_type):
num_events = self.get_event_number_funcs[packet_type](
packet_header) if packet_type in self.get_event_number_funcs \
else None
event_packet = self.get_event_packet_funcs[packet_type](
packet_header) if packet_type in self.get_event_packet_funcs \
else None
return num_events, event_packet
def get_polarity_event(self, packet_header):
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
events = libcaer.get_polarity_event(
polarity, num_events*4).reshape(num_events, 4)
return events, num_events
def get_polarity_hist(self, packet_header, device_type=None):
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
hist = libcaer.get_polarity_event_histogram_240(
polarity, num_events)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
hist = libcaer.get_polarity_event_histogram_346(
polarity, num_events)
elif device_type == "DVS128":
hist = libcaer.get_polarity_event_histogram_128(
polarity, num_events)
elif device_type == libcaer.DVXPLORER_CHIP_ID:
hist = libcaer.get_polarity_event_histogram_dvxplorer(
polarity, num_events)
elif device_type == libcaer.DVXPLORER_LITE_CHIP_ID:
hist = libcaer.get_polarity_event_histogram_dvxplorer_lite(
polarity, num_events)
else:
return None, 0
return hist, num_events
def get_counter_neuron_event(self, packet_header, device_type=None):
num_events, polarity = self.get_event_packet(
packet_header, libcaer.POLARITY_EVENT)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
hist = libcaer.get_counter_neuron_frame_240(
polarity, num_events)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
hist = libcaer.get_polarity_event_histogram_346(
polarity, num_events)
elif device_type == "DVS128":
hist = libcaer.get_polarity_event_histogram_128(
polarity, num_events)
else:
return None, 0
return hist, num_events
def get_special_event(self, packet_header):
num_events, special = self.get_event_packet(
packet_header, libcaer.SPECIAL_EVENT)
events = libcaer.get_special_event(
special, num_events*2).reshape(num_events, 2)
return events, num_events
def get_frame_event(self, packet_header, device_type=None,
aps_filter_type=libcaer.MONO):
_, frame = self.get_event_packet(packet_header, libcaer.FRAME_EVENT)
first_event = libcaer.caerFrameEventPacketGetEventConst(frame, 0)
frame_ts = libcaer.caerFrameEventGetTimestamp64(first_event, frame)
if device_type == libcaer.DAVIS_CHIP_DAVIS240C:
frame_mat = libcaer.get_frame_event_240(first_event)
elif device_type == libcaer.DAVIS_CHIP_DAVIS346B:
frame_mat = libcaer.get_frame_event_346(first_event) \
if aps_filter_type == libcaer.MONO else \
libcaer.get_rgb_frame_event_346(first_event)
else:
Y_range = libcaer.caerFrameEventGetLengthY(first_event)
X_range = libcaer.caerFrameEventGetLengthX(first_event)
frame_mat = libcaer.get_frame_event(
first_event, Y_range*X_range).reshape(Y_range, X_range) \
if aps_filter_type == libcaer.MONO else \
libcaer.get_frame_event(
first_event, Y_range*X_range*3).reshape(
Y_range, X_range, 3)
return frame_mat, frame_ts
def get_imu6_event(self, packet_header):
num_events, imu = self.get_event_packet(
packet_header, libcaer.IMU6_EVENT)
events = libcaer.get_imu6_event(
imu, num_events*8).reshape(num_events, 8)
return events, num_events
def get_imu9_event(self, packet_header):
num_events, imu = self.get_event_packet(
packet_header, libcaer.IMU9_EVENT)
events = libcaer.get_imu9_event(
imu, num_events*11).reshape(num_events, 11)
return events, num_events
def get_spike_event(self, packet_header):
num_events, spike = self.get_event_packet(
packet_header, self.SPIKE_EVENT)
events = libcaer.get_spike_event(
spike, num_events*4).reshape(num_events, 4)
return events, num_events
class SerialDevice(object):
def __init__(self):
self.handle = None
@abc.abstractmethod
def obtain_device_info(self, handle):
return
@abc.abstractmethod
def get_event(self):
return
def open(self,
device_type,
device_id=1,
serial_port_name="/dev/ttyUSB0",
serial_baud_rate=libcaer.CAER_HOST_CONFIG_SERIAL_BAUD_RATE_12M):
self.handle = libcaer.caerDeviceOpenSerial(
device_id, device_type, serial_port_name,
serial_baud_rate)
if self.handle is None:
raise ValueError("The device is failed to open.")
def close(self):
if self.handle is not None:
libcaer.caerDeviceClose(self.handle)
def shutdown(self):
self.data_stop()
self.close()
def data_start(self):
if self.handle is not None:
data_start_success = libcaer.caerDeviceDataStart(
self.handle, None, None, None, None, None)
return data_start_success
else:
return False
def data_stop(self):
libcaer.caerDeviceDataStop(self.handle)
def send_default_config(self):
if self.handle is not None:
send_success = libcaer.caerDeviceSendDefaultConfig(self.handle)
return send_success
else:
return False
def set_data_exchange_blocking(self, exchange_blocking=True):
return self.set_config(
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE,
libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING,
exchange_blocking)
def set_config(self, mod_addr, param_addr, param):
if self.handle is not None:
set_success = libcaer.caerDeviceConfigSet(
self.handle, mod_addr, param_addr, param)
return set_success
else:
return False
def get_config(self, mod_addr, param_addr):
if self.handle is not None:
return libcaer.caerDeviceConfigGet(
self.handle, mod_addr, param_addr)
else:
return None
def get_packet_container(self):
packet_container = libcaer.caerDeviceDataGet(self.handle)
if packet_container is not None:
packet_number = \
libcaer.caerEventPacketContainerGetEventPacketsNumber(
packet_container)
return packet_container, packet_number
else:
return None, None
def get_packet_header(self, packet_container, idx):
packet_header = \
libcaer.caerEventPacketContainerGetEventPacket(
packet_container, idx)
if packet_header is None:
return (None, None)
else:
packet_type = libcaer.caerEventPacketHeaderGetEventType(
packet_header)
return packet_header, packet_type
def get_polarity_event(self, packet_header):
num_events = libcaer.caerEventPacketHeaderGetEventNumber(
packet_header)
polarity = libcaer.caerPolarityEventPacketFromPacketHeader(
packet_header)
events = libcaer.get_polarity_event(
polarity, num_events*4).reshape(num_events, 4)
return events, num_events
| true | true |
1c3d6cebcfe98ad3da6657a63bccc75c58791058 | 1,047 | py | Python | yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py | mythwm/yardstick | ea13581f450c9c44f6f73d383e6a192697a95cc1 | [
"Apache-2.0"
] | null | null | null | yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py | mythwm/yardstick | ea13581f450c9c44f6f73d383e6a192697a95cc1 | [
"Apache-2.0"
] | null | null | null | yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py | mythwm/yardstick | ea13581f450c9c44f6f73d383e6a192697a95cc1 | [
"Apache-2.0"
] | null | null | null | ##############################################################################
# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import mock
import unittest
from yardstick.benchmark.scenarios.lib import create_keypair
class CreateKeypairTestCase(unittest.TestCase):
@mock.patch.object(create_keypair, 'paramiko')
@mock.patch.object(create_keypair, 'op_utils')
def test_create_keypair(self, mock_op_utils, *args):
options = {
'key_name': 'yardstick_key',
'key_path': '/tmp/yardstick_key'
}
args = {"options": options}
obj = create_keypair.CreateKeypair(args, {})
obj.run({})
mock_op_utils.create_keypair.assert_called_once()
| 37.392857 | 78 | 0.601719 | true | true | |
1c3d6eb61bbde7806f9df08892a26c211fe45661 | 478 | py | Python | src/posts/migrations/0009_auto_20200530_1911.py | arabindamahato/blog | 419bdb9c9e650d5c4b41efae0bfcf504703e60bf | [
"MIT"
] | null | null | null | src/posts/migrations/0009_auto_20200530_1911.py | arabindamahato/blog | 419bdb9c9e650d5c4b41efae0bfcf504703e60bf | [
"MIT"
] | 9 | 2020-06-06T01:46:35.000Z | 2022-02-10T15:01:07.000Z | src/posts/migrations/0009_auto_20200530_1911.py | arabindamahato/just_django | 419bdb9c9e650d5c4b41efae0bfcf504703e60bf | [
"MIT"
] | null | null | null | # Generated by Django 2.2.7 on 2020-05-30 13:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0008_comment'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='posts.Post'),
),
]
| 23.9 | 123 | 0.633891 |
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0008_comment'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='posts.Post'),
),
]
| true | true |
1c3d6f7dd2178f52483813d47460bc84ca34328c | 1,627 | py | Python | msk/exceptions.py | andlo/mycroft-skills-kit | cef589a0aeebdb04c68c55138cb4c58c5c76c492 | [
"Apache-2.0"
] | 1 | 2020-08-13T12:21:56.000Z | 2020-08-13T12:21:56.000Z | msk/exceptions.py | andlo/mycroft-skills-kit | cef589a0aeebdb04c68c55138cb4c58c5c76c492 | [
"Apache-2.0"
] | 3 | 2021-06-08T21:06:32.000Z | 2022-01-13T02:22:38.000Z | msk/exceptions.py | andlo/mycroft-skills-kit | cef589a0aeebdb04c68c55138cb4c58c5c76c492 | [
"Apache-2.0"
] | 1 | 2020-08-13T12:21:58.000Z | 2020-08-13T12:21:58.000Z | # Copyright (c) 2018 Mycroft AI, Inc.
#
# This file is part of Mycroft Light
# (see https://github.com/MatthewScholefield/mycroft-light).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from msm import MsmException
class MskException(MsmException):
pass
class AlreadyUpdated(MskException):
pass
class GithubRepoExists(MskException):
pass
class NotUploaded(MskException):
pass
class PRModified(MskException):
pass
class SkillNameTaken(GithubRepoExists):
"""
raised when you try to submit a skill with an already taken unique name
"""
pass
class UnrelatedGithubHistory(GithubRepoExists):
"""
raised when you try to update a remote with unrelated commit history
"""
pass
class NoGitRepository(MskException):
"""
Raised when a skill cannot be updated because it does not belong to any
git repo
"""
pass
| 24.651515 | 75 | 0.739398 |
from msm import MsmException
class MskException(MsmException):
pass
class AlreadyUpdated(MskException):
pass
class GithubRepoExists(MskException):
pass
class NotUploaded(MskException):
pass
class PRModified(MskException):
pass
class SkillNameTaken(GithubRepoExists):
pass
class UnrelatedGithubHistory(GithubRepoExists):
pass
class NoGitRepository(MskException):
pass
| true | true |
1c3d7184f6210c7a0767abe3eb809e89c9d400b9 | 13,971 | py | Python | hipi/relabelling_replay_buffer.py | muell-monster/google-research | 04d2024f4723bc4be3d639a668c19fb1f6a31478 | [
"Apache-2.0"
] | 3 | 2021-01-18T04:46:49.000Z | 2021-03-05T09:21:40.000Z | hipi/relabelling_replay_buffer.py | Alfaxad/google-research | 2c0043ecd507e75e2df9973a3015daf9253e1467 | [
"Apache-2.0"
] | 7 | 2021-11-10T19:44:38.000Z | 2022-02-10T06:48:39.000Z | hipi/relabelling_replay_buffer.py | Alfaxad/google-research | 2c0043ecd507e75e2df9973a3015daf9253e1467 | [
"Apache-2.0"
] | 4 | 2021-02-08T10:25:45.000Z | 2021-04-17T14:46:26.000Z | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Replay buffer that performs relabeling."""
import gin
import numpy as np
import tensorflow as tf
from tf_agents.replay_buffers import tf_uniform_replay_buffer
from tf_agents.utils import common
@gin.configurable
class RelabellingReplayBuffer(tf_uniform_replay_buffer.TFUniformReplayBuffer):
"""A replay buffer that relabels experience."""
def __init__(self, *args, **kwargs):
"""Initialize the replay buffer.
Args:
*args: Arguments.
**kwargs: Keyword arguments.
Additional arguments:
task_distribution: an instance of multitask.TaskDistribution.
sample_batch_size: (int) the batch size.
num_parallel_calls: (int) number of parallel calls for sampling.
num_future_states: (int) number of future states to consider for
future state relabeling.
actor: the actor network.
critic: the critic network.
gamma: (float) the discount factor.
relabel_type: (str) indicator of the relabeling strategy.
candidate_task_type: (str) within each back, should we use the states,
next_states, or originally commanded tasks as possible tasks when
relabeling.
relabel_prob: (float) fraction of experience to relabel when sampling.
keep_current_goal: (bool) for ``last'' and ``final'' relabeling,
should we add both the originally commanded task and the relabeled
task when inserting new experience into the replay buffer.
normalize_cols: (bool) Normalizing the columns has the effect of
including the partition function.
"""
self._task_distribution = kwargs.pop("task_distribution")
self._sample_batch_size = kwargs.pop("sample_batch_size")
self._num_parallel_calls = kwargs.pop("num_parallel_calls")
self._num_future_states = kwargs.pop("num_future_states", 4)
self._actor = kwargs.pop("actor")
self._critic = kwargs.pop("critic")
self._gamma = kwargs.pop("gamma")
self._relabel_type = kwargs.pop("relabel_type", None)
assert self._relabel_type in [None, "last", "future", "soft", "random"]
self._candidate_task_type = kwargs.pop("candidate_task_type", "states")
assert self._candidate_task_type in ["states", "next_states", "tasks"]
self._relabel_prob = kwargs.pop("relabel_prob", 1.0)
self._keep_current_goal = kwargs.pop("keep_current_goal", False)
self._normalize_cols = kwargs.pop("normalize_cols", True)
self._iterator = None
super(RelabellingReplayBuffer, self).__init__(*args, **kwargs)
def get_batch(self):
if self._iterator is None:
dataset = self.as_dataset(
sample_batch_size=self._sample_batch_size,
num_parallel_calls=self._num_parallel_calls,
num_steps=2,
).prefetch(3)
self._iterator = iter(dataset)
experience, unused_info = next(self._iterator)
if self._relabel_type in ["soft", "random"]:
experience = self._soft_relabel(experience)
elif self._relabel_type in ["last", "future"]:
# Reassign the next_states to have the same goal as the current states
_, tasks = self._task_distribution.split(experience.observation[:, 0])
next_states, _ = self._task_distribution.split(experience.observation[:,
1])
next_states_and_tasks = self._task_distribution.combine(
next_states, tasks)
new_observation = tf.concat(
[
experience.observation[:, 0][:, None], next_states_and_tasks[:,
None]
],
axis=1,
)
assert new_observation.shape == experience.observation.shape
experience = experience.replace(observation=new_observation)
if self._relabel_type is not None:
# Recompute rewards and done flags
states, tasks = self._task_distribution.split(experience.observation[:,
0])
next_states, next_tasks = self._task_distribution.split(
experience.observation[:, 1])
rewards, dones = self._task_distribution.evaluate(states,
experience.action[:, 0],
tasks)
# Strictly speaking, we don't need to relabel the next rewards and next
# dones because they end up being thrown away. Only the current rewards
# and dones end up being important.
next_rewards, next_dones = self._task_distribution.evaluate(
next_states, experience.action[:, 1], next_tasks)
new_rewards = tf.concat([rewards[:, None], next_rewards[:, None]], axis=1)
new_dones = tf.concat([dones[:, None], next_dones[:, None]], axis=1)
# 0 if episode is done, 1 if episode is continuing
new_discount = 1.0 - tf.cast(new_dones, tf.float32)
assert new_rewards.shape == experience.reward.shape
assert new_discount.shape == experience.discount.shape
experience = experience.replace(reward=new_rewards, discount=new_discount)
return experience
def _soft_relabel(self, experience):
"""Reassigns tasks to each state and next state.
Does not recompute the rewards or done flags.
Args:
experience: The experience that we want to relabel with inverse RL.
Returns:
relabeled_experience: The relabeled experience.
"""
raise NotImplementedError
def _add_batch(self, items):
"""Adds a trajectory to the replay buffer."""
assert items[0].is_first()
for item in items:
# The items are batched already, so we remove the first dimension.
assert item.observation.shape[1:] == self.data_spec.observation.shape
super(RelabellingReplayBuffer, self)._add_batch(item)
class GoalRelabellingReplayBuffer(RelabellingReplayBuffer):
"""Implements a replay buffer for relabeling goals."""
def _add_batch(self, items):
"""Adds a trajectory to the replay buffer."""
batch_size = len(items)
if self._relabel_type in ["future", "last"]:
relabelled_items = []
for i in range(batch_size):
if self._relabel_type == "future":
relabel_indices = np.random.randint(
i, batch_size, size=self._num_future_states)
else:
relabel_indices = [batch_size - 1]
if self._keep_current_goal:
relabelled_items.append(items[i])
for j in relabel_indices:
state, _ = self._task_distribution.split(items[i].observation)
next_state, _ = self._task_distribution.split(items[j].observation)
task = self._task_distribution.state_to_task(next_state)
state_and_task = self._task_distribution.combine(state, task)
new_item = items[i].replace(observation=state_and_task)
relabelled_items.append(new_item)
items = relabelled_items
super(GoalRelabellingReplayBuffer, self)._add_batch(items)
@tf.function
def _soft_relabel(self, experience):
# experience.observation.shape = [B x T=2 x obs_dim+state_dim]
states, orig_tasks = self._task_distribution.split(
experience.observation[:, 0])
if self._task_distribution.tasks is None:
tasks = orig_tasks
else:
tasks = tf.constant(self._task_distribution.tasks, dtype=tf.float32)
next_states, _ = self._task_distribution.split(experience.observation[:, 1])
if self._candidate_task_type == "states":
candidate_tasks = self._task_distribution.state_to_task(states)
elif self._candidate_task_type == "next_states":
candidate_tasks = self._task_distribution.state_to_task(next_states)
else:
assert self._candidate_task_type == "tasks"
candidate_tasks = tasks
actions = experience.action[:, 0]
num_tasks = tasks.shape[0]
batch_size = states.shape[0]
task_dim = tasks.shape[1]
obs_dim = states.shape[1]
action_dim = actions.shape[1]
action_spec = self._actor.output_tensor_spec
states_tiled = tf.tile(states[:, None], [1, num_tasks, 1]) # B x B x D
states_tiled = tf.reshape(states_tiled,
[batch_size * num_tasks, obs_dim]) # B*B x D
actions_tiled = tf.tile(actions[:, None], [1, num_tasks, 1]) # B x B x D
actions_tiled = tf.reshape(actions_tiled,
[batch_size * num_tasks, action_dim]) # B*B x D
tasks_tiled = tf.tile(tasks[None], [batch_size, 1, 1]) # B x B x D
tasks_tiled = tf.reshape(tasks_tiled,
[batch_size * num_tasks, task_dim]) # B*B x D
next_states_tiled = tf.tile(next_states[:, None], [1, num_tasks, 1])
next_states_tiled = tf.reshape(next_states_tiled,
[batch_size * num_tasks, obs_dim]) # B*B x D
next_relabelled_obs = self._task_distribution.combine(
next_states_tiled, tasks_tiled)
sampled_actions_tiled = self._actor(
next_relabelled_obs, step_type=(), network_state=())[0].sample()
critic_input = (next_relabelled_obs, sampled_actions_tiled)
q_vals, _ = self._critic(critic_input, training=False)
q_vals_vec = tf.reshape(q_vals, (batch_size, num_tasks))
rewards, dones = self._task_distribution.evaluate(states_tiled,
actions_tiled,
tasks_tiled)
dones = tf.cast(dones, tf.float32)
rewards_vec = tf.reshape(rewards, (batch_size, num_tasks))
dones_vec = tf.reshape(dones, (batch_size, num_tasks))
relabelled_obs = self._task_distribution.combine(states_tiled, tasks_tiled)
action_distribution = self._actor(
relabelled_obs, step_type=(), network_state=())[0]
log_pi = common.log_probability(action_distribution, actions_tiled,
action_spec)
log_pi_vec = tf.reshape(log_pi, (batch_size, num_tasks))
logits_vec = (
rewards_vec - log_pi_vec + self._gamma * (1.0 - dones_vec) * q_vals_vec)
if self._relabel_type == "random":
logits_vec = tf.ones_like(logits_vec) # Hack to make sampling random
## End new version
if self._normalize_cols:
logits_vec = logits_vec - tf.math.reduce_logsumexp(
logits_vec, axis=0)[None]
relabel_indices = tf.random.categorical(logits=logits_vec, num_samples=1)
### Metrics
global_step = tf.compat.v1.train.get_or_create_global_step()
orig_indices = tf.range(
self._sample_batch_size, dtype=relabel_indices.dtype)
with tf.name_scope("relabelling"):
# How often are the originally commanded goals most optimal?
opt_indices = tf.argmax(logits_vec, axis=1)
orig_is_opt = opt_indices == orig_indices
orig_opt_frac = tf.reduce_mean(tf.cast(orig_is_opt, tf.float32))
tf.compat.v2.summary.scalar(
name="orig_task_optimal", data=orig_opt_frac, step=global_step)
# How often is the relabelled goal optimal?
# The relabel_indices are [B, 1], so we need to remove the extra dim.
relabel_is_opt = tf.squeeze(relabel_indices) == orig_indices
relabel_opt_frac = tf.reduce_mean(tf.cast(relabel_is_opt, tf.float32))
tf.compat.v2.summary.scalar(
name="relabel_task_optimal", data=relabel_opt_frac, step=global_step)
# What are the average Q values of the original tasks?
if batch_size == num_tasks:
indices = tf.transpose(tf.stack([orig_indices, orig_indices], axis=0))
orig_q_vals = tf.gather_nd(logits_vec, indices)
tf.compat.v2.summary.scalar(
name="orig_q_vals",
data=tf.reduce_mean(orig_q_vals),
step=global_step,
)
# What are the average Q values of the relabelled tasks?
indices = tf.transpose(
tf.stack([orig_indices, tf.squeeze(relabel_indices)], axis=0))
relabel_q_vals = tf.gather_nd(logits_vec, indices)
tf.compat.v2.summary.scalar(
name="relabel_q_vals",
data=tf.reduce_mean(relabel_q_vals),
step=global_step,
)
max_q = tf.reduce_max(logits_vec, axis=1)
tf.compat.v2.summary.scalar(
name="max_q", data=tf.reduce_mean(max_q), step=global_step)
### End metrics
# For both state-centric and goal-centric relabelling, the implementation of
# mixing is the same: we randomly replace some of the indices with the
# diagonal.
relabelled_tasks = tf.gather(candidate_tasks, tf.squeeze(relabel_indices))
if self._relabel_prob == 0:
relabelled_tasks = orig_tasks
elif 0 < self._relabel_prob < 1:
logits = tf.log([1.0 - self._relabel_prob, self._relabel_prob])
mask = tf.squeeze(
tf.random.categorical(
logits[None], num_samples=self._sample_batch_size))
mask = tf.cast(mask, tf.float32)[:, None]
relabelled_tasks = mask * orig_tasks + (1 - mask) * relabelled_tasks
states_and_tasks = self._task_distribution.combine(states, relabelled_tasks)
next_states_and_tasks = self._task_distribution.combine(
next_states, relabelled_tasks)
new_observation = tf.concat(
[states_and_tasks[:, None], next_states_and_tasks[:, None]], axis=1)
assert new_observation.shape == experience.observation.shape
experience = experience.replace(observation=new_observation)
return experience
| 44.493631 | 80 | 0.671319 |
import gin
import numpy as np
import tensorflow as tf
from tf_agents.replay_buffers import tf_uniform_replay_buffer
from tf_agents.utils import common
@gin.configurable
class RelabellingReplayBuffer(tf_uniform_replay_buffer.TFUniformReplayBuffer):
def __init__(self, *args, **kwargs):
self._task_distribution = kwargs.pop("task_distribution")
self._sample_batch_size = kwargs.pop("sample_batch_size")
self._num_parallel_calls = kwargs.pop("num_parallel_calls")
self._num_future_states = kwargs.pop("num_future_states", 4)
self._actor = kwargs.pop("actor")
self._critic = kwargs.pop("critic")
self._gamma = kwargs.pop("gamma")
self._relabel_type = kwargs.pop("relabel_type", None)
assert self._relabel_type in [None, "last", "future", "soft", "random"]
self._candidate_task_type = kwargs.pop("candidate_task_type", "states")
assert self._candidate_task_type in ["states", "next_states", "tasks"]
self._relabel_prob = kwargs.pop("relabel_prob", 1.0)
self._keep_current_goal = kwargs.pop("keep_current_goal", False)
self._normalize_cols = kwargs.pop("normalize_cols", True)
self._iterator = None
super(RelabellingReplayBuffer, self).__init__(*args, **kwargs)
def get_batch(self):
if self._iterator is None:
dataset = self.as_dataset(
sample_batch_size=self._sample_batch_size,
num_parallel_calls=self._num_parallel_calls,
num_steps=2,
).prefetch(3)
self._iterator = iter(dataset)
experience, unused_info = next(self._iterator)
if self._relabel_type in ["soft", "random"]:
experience = self._soft_relabel(experience)
elif self._relabel_type in ["last", "future"]:
_, tasks = self._task_distribution.split(experience.observation[:, 0])
next_states, _ = self._task_distribution.split(experience.observation[:,
1])
next_states_and_tasks = self._task_distribution.combine(
next_states, tasks)
new_observation = tf.concat(
[
experience.observation[:, 0][:, None], next_states_and_tasks[:,
None]
],
axis=1,
)
assert new_observation.shape == experience.observation.shape
experience = experience.replace(observation=new_observation)
if self._relabel_type is not None:
states, tasks = self._task_distribution.split(experience.observation[:,
0])
next_states, next_tasks = self._task_distribution.split(
experience.observation[:, 1])
rewards, dones = self._task_distribution.evaluate(states,
experience.action[:, 0],
tasks)
# dones because they end up being thrown away. Only the current rewards
# and dones end up being important.
next_rewards, next_dones = self._task_distribution.evaluate(
next_states, experience.action[:, 1], next_tasks)
new_rewards = tf.concat([rewards[:, None], next_rewards[:, None]], axis=1)
new_dones = tf.concat([dones[:, None], next_dones[:, None]], axis=1)
# 0 if episode is done, 1 if episode is continuing
new_discount = 1.0 - tf.cast(new_dones, tf.float32)
assert new_rewards.shape == experience.reward.shape
assert new_discount.shape == experience.discount.shape
experience = experience.replace(reward=new_rewards, discount=new_discount)
return experience
def _soft_relabel(self, experience):
raise NotImplementedError
def _add_batch(self, items):
assert items[0].is_first()
for item in items:
# The items are batched already, so we remove the first dimension.
assert item.observation.shape[1:] == self.data_spec.observation.shape
super(RelabellingReplayBuffer, self)._add_batch(item)
class GoalRelabellingReplayBuffer(RelabellingReplayBuffer):
def _add_batch(self, items):
batch_size = len(items)
if self._relabel_type in ["future", "last"]:
relabelled_items = []
for i in range(batch_size):
if self._relabel_type == "future":
relabel_indices = np.random.randint(
i, batch_size, size=self._num_future_states)
else:
relabel_indices = [batch_size - 1]
if self._keep_current_goal:
relabelled_items.append(items[i])
for j in relabel_indices:
state, _ = self._task_distribution.split(items[i].observation)
next_state, _ = self._task_distribution.split(items[j].observation)
task = self._task_distribution.state_to_task(next_state)
state_and_task = self._task_distribution.combine(state, task)
new_item = items[i].replace(observation=state_and_task)
relabelled_items.append(new_item)
items = relabelled_items
super(GoalRelabellingReplayBuffer, self)._add_batch(items)
@tf.function
def _soft_relabel(self, experience):
# experience.observation.shape = [B x T=2 x obs_dim+state_dim]
states, orig_tasks = self._task_distribution.split(
experience.observation[:, 0])
if self._task_distribution.tasks is None:
tasks = orig_tasks
else:
tasks = tf.constant(self._task_distribution.tasks, dtype=tf.float32)
next_states, _ = self._task_distribution.split(experience.observation[:, 1])
if self._candidate_task_type == "states":
candidate_tasks = self._task_distribution.state_to_task(states)
elif self._candidate_task_type == "next_states":
candidate_tasks = self._task_distribution.state_to_task(next_states)
else:
assert self._candidate_task_type == "tasks"
candidate_tasks = tasks
actions = experience.action[:, 0]
num_tasks = tasks.shape[0]
batch_size = states.shape[0]
task_dim = tasks.shape[1]
obs_dim = states.shape[1]
action_dim = actions.shape[1]
action_spec = self._actor.output_tensor_spec
states_tiled = tf.tile(states[:, None], [1, num_tasks, 1]) # B x B x D
states_tiled = tf.reshape(states_tiled,
[batch_size * num_tasks, obs_dim]) # B*B x D
actions_tiled = tf.tile(actions[:, None], [1, num_tasks, 1]) # B x B x D
actions_tiled = tf.reshape(actions_tiled,
[batch_size * num_tasks, action_dim]) # B*B x D
tasks_tiled = tf.tile(tasks[None], [batch_size, 1, 1]) # B x B x D
tasks_tiled = tf.reshape(tasks_tiled,
[batch_size * num_tasks, task_dim]) # B*B x D
next_states_tiled = tf.tile(next_states[:, None], [1, num_tasks, 1])
next_states_tiled = tf.reshape(next_states_tiled,
[batch_size * num_tasks, obs_dim]) # B*B x D
next_relabelled_obs = self._task_distribution.combine(
next_states_tiled, tasks_tiled)
sampled_actions_tiled = self._actor(
next_relabelled_obs, step_type=(), network_state=())[0].sample()
critic_input = (next_relabelled_obs, sampled_actions_tiled)
q_vals, _ = self._critic(critic_input, training=False)
q_vals_vec = tf.reshape(q_vals, (batch_size, num_tasks))
rewards, dones = self._task_distribution.evaluate(states_tiled,
actions_tiled,
tasks_tiled)
dones = tf.cast(dones, tf.float32)
rewards_vec = tf.reshape(rewards, (batch_size, num_tasks))
dones_vec = tf.reshape(dones, (batch_size, num_tasks))
relabelled_obs = self._task_distribution.combine(states_tiled, tasks_tiled)
action_distribution = self._actor(
relabelled_obs, step_type=(), network_state=())[0]
log_pi = common.log_probability(action_distribution, actions_tiled,
action_spec)
log_pi_vec = tf.reshape(log_pi, (batch_size, num_tasks))
logits_vec = (
rewards_vec - log_pi_vec + self._gamma * (1.0 - dones_vec) * q_vals_vec)
if self._relabel_type == "random":
logits_vec = tf.ones_like(logits_vec) # Hack to make sampling random
## End new version
if self._normalize_cols:
logits_vec = logits_vec - tf.math.reduce_logsumexp(
logits_vec, axis=0)[None]
relabel_indices = tf.random.categorical(logits=logits_vec, num_samples=1)
### Metrics
global_step = tf.compat.v1.train.get_or_create_global_step()
orig_indices = tf.range(
self._sample_batch_size, dtype=relabel_indices.dtype)
with tf.name_scope("relabelling"):
# How often are the originally commanded goals most optimal?
opt_indices = tf.argmax(logits_vec, axis=1)
orig_is_opt = opt_indices == orig_indices
orig_opt_frac = tf.reduce_mean(tf.cast(orig_is_opt, tf.float32))
tf.compat.v2.summary.scalar(
name="orig_task_optimal", data=orig_opt_frac, step=global_step)
# How often is the relabelled goal optimal?
# The relabel_indices are [B, 1], so we need to remove the extra dim.
relabel_is_opt = tf.squeeze(relabel_indices) == orig_indices
relabel_opt_frac = tf.reduce_mean(tf.cast(relabel_is_opt, tf.float32))
tf.compat.v2.summary.scalar(
name="relabel_task_optimal", data=relabel_opt_frac, step=global_step)
# What are the average Q values of the original tasks?
if batch_size == num_tasks:
indices = tf.transpose(tf.stack([orig_indices, orig_indices], axis=0))
orig_q_vals = tf.gather_nd(logits_vec, indices)
tf.compat.v2.summary.scalar(
name="orig_q_vals",
data=tf.reduce_mean(orig_q_vals),
step=global_step,
)
# What are the average Q values of the relabelled tasks?
indices = tf.transpose(
tf.stack([orig_indices, tf.squeeze(relabel_indices)], axis=0))
relabel_q_vals = tf.gather_nd(logits_vec, indices)
tf.compat.v2.summary.scalar(
name="relabel_q_vals",
data=tf.reduce_mean(relabel_q_vals),
step=global_step,
)
max_q = tf.reduce_max(logits_vec, axis=1)
tf.compat.v2.summary.scalar(
name="max_q", data=tf.reduce_mean(max_q), step=global_step)
### End metrics
# For both state-centric and goal-centric relabelling, the implementation of
# mixing is the same: we randomly replace some of the indices with the
# diagonal.
relabelled_tasks = tf.gather(candidate_tasks, tf.squeeze(relabel_indices))
if self._relabel_prob == 0:
relabelled_tasks = orig_tasks
elif 0 < self._relabel_prob < 1:
logits = tf.log([1.0 - self._relabel_prob, self._relabel_prob])
mask = tf.squeeze(
tf.random.categorical(
logits[None], num_samples=self._sample_batch_size))
mask = tf.cast(mask, tf.float32)[:, None]
relabelled_tasks = mask * orig_tasks + (1 - mask) * relabelled_tasks
states_and_tasks = self._task_distribution.combine(states, relabelled_tasks)
next_states_and_tasks = self._task_distribution.combine(
next_states, relabelled_tasks)
new_observation = tf.concat(
[states_and_tasks[:, None], next_states_and_tasks[:, None]], axis=1)
assert new_observation.shape == experience.observation.shape
experience = experience.replace(observation=new_observation)
return experience
| true | true |
1c3d71c61f025e4d128508f7268321617ae891cc | 1,325 | py | Python | molecule/python/test/test_currency_balance_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | molecule/python/test/test_currency_balance_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | molecule/python/test/test_currency_balance_api.py | sumit4-ttn/SDK | b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Molecule API Documentation
The Hydrogen Molecule API # noqa: E501
OpenAPI spec version: 1.3.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import molecule_api
from molecule_api.api.currency_balance_api import CurrencyBalanceApi # noqa: E501
from molecule_api.rest import ApiException
class TestCurrencyBalanceApi(unittest.TestCase):
"""CurrencyBalanceApi unit test stubs"""
def setUp(self):
self.api = molecule_api.api.currency_balance_api.CurrencyBalanceApi() # noqa: E501
def tearDown(self):
pass
def test_get_currency_balance_all_using_get(self):
"""Test case for get_currency_balance_all_using_get
Fetch Currency Balance list # noqa: E501
"""
pass
def test_get_currency_balance_using_get(self):
"""Test case for get_currency_balance_using_get
Fetch Currency Balance details # noqa: E501
"""
pass
def test_update_currency_balance_using_post(self):
"""Test case for update_currency_balance_using_post
Update(Fetch) latest Currency Balance # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 23.660714 | 91 | 0.701887 |
from __future__ import absolute_import
import unittest
import molecule_api
from molecule_api.api.currency_balance_api import CurrencyBalanceApi
from molecule_api.rest import ApiException
class TestCurrencyBalanceApi(unittest.TestCase):
def setUp(self):
self.api = molecule_api.api.currency_balance_api.CurrencyBalanceApi()
def tearDown(self):
pass
def test_get_currency_balance_all_using_get(self):
pass
def test_get_currency_balance_using_get(self):
pass
def test_update_currency_balance_using_post(self):
pass
if __name__ == '__main__':
unittest.main()
| true | true |
1c3d71e3800ea8228572573f5e1ab76021d0d583 | 4,790 | py | Python | ijson/common.py | KOLANICH/ijson | 7728b1d9718461a68b2ff001a8513d83f6e88ca0 | [
"BSD-3-Clause"
] | null | null | null | ijson/common.py | KOLANICH/ijson | 7728b1d9718461a68b2ff001a8513d83f6e88ca0 | [
"BSD-3-Clause"
] | null | null | null | ijson/common.py | KOLANICH/ijson | 7728b1d9718461a68b2ff001a8513d83f6e88ca0 | [
"BSD-3-Clause"
] | null | null | null | '''
Backend independent higher level interfaces, common exceptions.
'''
import decimal
class JSONError(Exception):
'''
Base exception for all parsing errors.
'''
pass
class IncompleteJSONError(JSONError):
'''
Raised when the parser can't read expected data from a stream.
'''
pass
def parse(basic_events):
'''
An iterator returning parsing events with the information about their location
with the JSON object tree. Events are tuples ``(prefix, type, value)``.
Available types and values are:
('null', None)
('boolean', <True or False>)
('number', <int or Decimal>)
('string', <unicode>)
('map_key', <str>)
('start_map', None)
('end_map', None)
('start_array', None)
('end_array', None)
Prefixes represent the path to the nested elements from the root of the JSON
document. For example, given this document::
{
"array": [1, 2],
"map": {
"key": "value"
}
}
the parser would yield events:
('', 'start_map', None)
('', 'map_key', 'array')
('array', 'start_array', None)
('array.item', 'number', 1)
('array.item', 'number', 2)
('array', 'end_array', None)
('', 'map_key', 'map')
('map', 'start_map', None)
('map', 'map_key', 'key')
('map.key', 'string', u'value')
('map', 'end_map', None)
('', 'end_map', None)
'''
path = []
for event, value in basic_events:
if event == 'map_key':
prefix = '.'.join(path[:-1])
path[-1] = value
elif event == 'start_map':
prefix = '.'.join(path)
path.append(None)
elif event == 'end_map':
path.pop()
prefix = '.'.join(path)
elif event == 'start_array':
prefix = '.'.join(path)
path.append('item')
elif event == 'end_array':
path.pop()
prefix = '.'.join(path)
else: # any scalar value
prefix = '.'.join(path)
yield prefix, event, value
class ObjectBuilder(object):
'''
Incrementally builds an object from JSON parser events. Events are passed
into the `event` function that accepts two parameters: event type and
value. The object being built is available at any time from the `value`
attribute.
Example::
>>> from ijson import basic_parse
>>> from ijson.common import ObjectBuilder
>>> from ijson.compat import BytesIO
>>> builder = ObjectBuilder()
>>> f = BytesIO(b'{"key": "value"}')
>>> for event, value in basic_parse(f):
... builder.event(event, value)
>>> builder.value == {'key': 'value'}
True
'''
def __init__(self, map_type=None):
def initial_set(value):
self.value = value
self.containers = [initial_set]
self.map_type = map_type or dict
def event(self, event, value):
if event == 'map_key':
self.key = value
elif event == 'start_map':
mappable = self.map_type()
self.containers[-1](mappable)
def setter(value):
mappable[self.key] = value
self.containers.append(setter)
elif event == 'start_array':
array = []
self.containers[-1](array)
self.containers.append(array.append)
elif event == 'end_array' or event == 'end_map':
self.containers.pop()
else:
self.containers[-1](value)
def items(prefixed_events, prefix, map_type=None):
'''
An iterator returning native Python objects constructed from the events
under a given prefix.
'''
prefixed_events = iter(prefixed_events)
try:
while True:
current, event, value = next(prefixed_events)
if current == prefix:
if event in ('start_map', 'start_array'):
builder = ObjectBuilder(map_type=map_type)
end_event = event.replace('start', 'end')
while (current, event) != (prefix, end_event):
builder.event(event, value)
current, event, value = next(prefixed_events)
del builder.containers[:]
yield builder.value
else:
yield value
except StopIteration:
pass
def number(str_value):
'''
Converts string with a numeric value into an int or a Decimal.
Used in different backends for consistent number representation.
'''
if not ('.' in str_value or 'e' in str_value or 'E' in str_value):
return int(str_value)
return decimal.Decimal(str_value)
| 29.207317 | 82 | 0.551148 | import decimal
class JSONError(Exception):
pass
class IncompleteJSONError(JSONError):
pass
def parse(basic_events):
path = []
for event, value in basic_events:
if event == 'map_key':
prefix = '.'.join(path[:-1])
path[-1] = value
elif event == 'start_map':
prefix = '.'.join(path)
path.append(None)
elif event == 'end_map':
path.pop()
prefix = '.'.join(path)
elif event == 'start_array':
prefix = '.'.join(path)
path.append('item')
elif event == 'end_array':
path.pop()
prefix = '.'.join(path)
else:
prefix = '.'.join(path)
yield prefix, event, value
class ObjectBuilder(object):
def __init__(self, map_type=None):
def initial_set(value):
self.value = value
self.containers = [initial_set]
self.map_type = map_type or dict
def event(self, event, value):
if event == 'map_key':
self.key = value
elif event == 'start_map':
mappable = self.map_type()
self.containers[-1](mappable)
def setter(value):
mappable[self.key] = value
self.containers.append(setter)
elif event == 'start_array':
array = []
self.containers[-1](array)
self.containers.append(array.append)
elif event == 'end_array' or event == 'end_map':
self.containers.pop()
else:
self.containers[-1](value)
def items(prefixed_events, prefix, map_type=None):
prefixed_events = iter(prefixed_events)
try:
while True:
current, event, value = next(prefixed_events)
if current == prefix:
if event in ('start_map', 'start_array'):
builder = ObjectBuilder(map_type=map_type)
end_event = event.replace('start', 'end')
while (current, event) != (prefix, end_event):
builder.event(event, value)
current, event, value = next(prefixed_events)
del builder.containers[:]
yield builder.value
else:
yield value
except StopIteration:
pass
def number(str_value):
if not ('.' in str_value or 'e' in str_value or 'E' in str_value):
return int(str_value)
return decimal.Decimal(str_value)
| true | true |
1c3d7315b75ad79df3daf8091300b9d4cc79a3bc | 8,389 | py | Python | sdk/python/pulumi_azure/netapp/account.py | pulumi-bot/pulumi-azure | 64d5c30a77b3f4de117068add359ef85293cef8e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/netapp/account.py | pulumi-bot/pulumi-azure | 64d5c30a77b3f4de117068add359ef85293cef8e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure/netapp/account.py | pulumi-bot/pulumi-azure | 64d5c30a77b3f4de117068add359ef85293cef8e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Account(pulumi.CustomResource):
active_directory: pulumi.Output[dict]
"""
A `active_directory` block as defined below.
* `dns_servers` (`list`) - A list of DNS server IP addresses for the Active Directory domain. Only allows `IPv4` address.
* `domain` (`str`) - The name of the Active Directory domain.
* `organizationalUnit` (`str`) - The Organizational Unit (OU) within the Active Directory Domain.
* `password` (`str`) - The password associated with the `username`.
* `smbServerName` (`str`) - The NetBIOS name which should be used for the NetApp SMB Server, which will be registered as a computer account in the AD and used to mount volumes.
* `username` (`str`) - The Username of Active Directory Domain Administrator.
"""
location: pulumi.Output[str]
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
name: pulumi.Output[str]
"""
The name of the NetApp Account. Changing this forces a new resource to be created.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group where the NetApp Account should be created. Changing this forces a new resource to be created.
"""
tags: pulumi.Output[dict]
"""
A mapping of tags to assign to the resource.
"""
def __init__(__self__, resource_name, opts=None, active_directory=None, location=None, name=None, resource_group_name=None, tags=None, __props__=None, __name__=None, __opts__=None):
"""
Manages a NetApp Account.
> **NOTE:** Azure allows only one active directory can be joined to a single subscription at a time for NetApp Account.
## NetApp Account Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.netapp.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
active_directory={
"username": "aduser",
"password": "aduserpwd",
"smbServerName": "SMBSERVER",
"dns_servers": ["1.2.3.4"],
"domain": "westcentralus.com",
"organizationalUnit": "OU=FirstLevel",
})
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[dict] active_directory: A `active_directory` block as defined below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the NetApp Account. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the NetApp Account should be created. Changing this forces a new resource to be created.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **active_directory** object supports the following:
* `dns_servers` (`pulumi.Input[list]`) - A list of DNS server IP addresses for the Active Directory domain. Only allows `IPv4` address.
* `domain` (`pulumi.Input[str]`) - The name of the Active Directory domain.
* `organizationalUnit` (`pulumi.Input[str]`) - The Organizational Unit (OU) within the Active Directory Domain.
* `password` (`pulumi.Input[str]`) - The password associated with the `username`.
* `smbServerName` (`pulumi.Input[str]`) - The NetBIOS name which should be used for the NetApp SMB Server, which will be registered as a computer account in the AD and used to mount volumes.
* `username` (`pulumi.Input[str]`) - The Username of Active Directory Domain Administrator.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['active_directory'] = active_directory
__props__['location'] = location
__props__['name'] = name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['tags'] = tags
super(Account, __self__).__init__(
'azure:netapp/account:Account',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, active_directory=None, location=None, name=None, resource_group_name=None, tags=None):
"""
Get an existing Account resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[dict] active_directory: A `active_directory` block as defined below.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the NetApp Account. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the NetApp Account should be created. Changing this forces a new resource to be created.
:param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
The **active_directory** object supports the following:
* `dns_servers` (`pulumi.Input[list]`) - A list of DNS server IP addresses for the Active Directory domain. Only allows `IPv4` address.
* `domain` (`pulumi.Input[str]`) - The name of the Active Directory domain.
* `organizationalUnit` (`pulumi.Input[str]`) - The Organizational Unit (OU) within the Active Directory Domain.
* `password` (`pulumi.Input[str]`) - The password associated with the `username`.
* `smbServerName` (`pulumi.Input[str]`) - The NetBIOS name which should be used for the NetApp SMB Server, which will be registered as a computer account in the AD and used to mount volumes.
* `username` (`pulumi.Input[str]`) - The Username of Active Directory Domain Administrator.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["active_directory"] = active_directory
__props__["location"] = location
__props__["name"] = name
__props__["resource_group_name"] = resource_group_name
__props__["tags"] = tags
return Account(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 54.830065 | 200 | 0.677316 |
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Account(pulumi.CustomResource):
active_directory: pulumi.Output[dict]
location: pulumi.Output[str]
name: pulumi.Output[str]
resource_group_name: pulumi.Output[str]
tags: pulumi.Output[dict]
def __init__(__self__, resource_name, opts=None, active_directory=None, location=None, name=None, resource_group_name=None, tags=None, __props__=None, __name__=None, __opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['active_directory'] = active_directory
__props__['location'] = location
__props__['name'] = name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['tags'] = tags
super(Account, __self__).__init__(
'azure:netapp/account:Account',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, active_directory=None, location=None, name=None, resource_group_name=None, tags=None):
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["active_directory"] = active_directory
__props__["location"] = location
__props__["name"] = name
__props__["resource_group_name"] = resource_group_name
__props__["tags"] = tags
return Account(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
1c3d7315ca9962eb12dd2e713d0ce503378537fe | 2,028 | py | Python | make_ilsvrc_dataset.py | roxanneluo/colorization-pytorch | 4b5cab85ea2f503f17d13241ae2b7ba54158ccac | [
"MIT"
] | null | null | null | make_ilsvrc_dataset.py | roxanneluo/colorization-pytorch | 4b5cab85ea2f503f17d13241ae2b7ba54158ccac | [
"MIT"
] | null | null | null | make_ilsvrc_dataset.py | roxanneluo/colorization-pytorch | 4b5cab85ea2f503f17d13241ae2b7ba54158ccac | [
"MIT"
] | null | null | null |
import os
import sys
from util import util
import numpy as np
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--in_path', type=str, default='/data/big/dataset/ILSVRC2012')
parser.add_argument('--out_path', type=str, default='./dataset/ilsvrc2012/')
opt = parser.parse_args()
orig_path = opt.in_path
print('Copying ILSVRC from...[%s]'%orig_path)
# Copy over part of training set (for initializer)
trn_small_path = os.path.join(opt.out_path,'train_small')
util.mkdirs(opt.out_path)
util.mkdirs(trn_small_path)
train_subdirs = os.listdir(os.path.join(opt.in_path,'train'))
for train_subdir in train_subdirs[:10]:
os.symlink(os.path.join(opt.in_path,'train',train_subdir),os.path.join(trn_small_path,train_subdir))
print('Making small training set in...[%s]'%trn_small_path)
# Copy over whole training set
trn_path = os.path.join(opt.out_path,'train')
util.mkdirs(opt.out_path)
os.symlink(os.path.join(opt.in_path,'train'),trn_path)
print('Making training set in...[%s]'%trn_path)
# Copy over subset of ILSVRC12 val set for colorization val set
val_path = os.path.join(opt.out_path,'val/imgs')
util.mkdirs(val_path)
print('Making validation set in...[%s]'%val_path)
for val_ind in range(1000):
os.system('ln -s %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,val_path,val_ind+1))
# os.system('cp %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,val_path,val_ind+1))
# Copy over subset of ILSVRC12 val set for colorization test set
test_path = os.path.join(opt.out_path,'test/imgs')
util.mkdirs(test_path)
val_inds = np.load('./resources/ilsvrclin12_val_inds.npy')
print('Making test set in...[%s]'%test_path)
for val_ind in val_inds:
os.system('ln -s %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,test_path,val_ind+1))
# os.system('cp %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,test_path,val_ind+1))
| 43.148936 | 121 | 0.769724 |
import os
import sys
from util import util
import numpy as np
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--in_path', type=str, default='/data/big/dataset/ILSVRC2012')
parser.add_argument('--out_path', type=str, default='./dataset/ilsvrc2012/')
opt = parser.parse_args()
orig_path = opt.in_path
print('Copying ILSVRC from...[%s]'%orig_path)
trn_small_path = os.path.join(opt.out_path,'train_small')
util.mkdirs(opt.out_path)
util.mkdirs(trn_small_path)
train_subdirs = os.listdir(os.path.join(opt.in_path,'train'))
for train_subdir in train_subdirs[:10]:
os.symlink(os.path.join(opt.in_path,'train',train_subdir),os.path.join(trn_small_path,train_subdir))
print('Making small training set in...[%s]'%trn_small_path)
trn_path = os.path.join(opt.out_path,'train')
util.mkdirs(opt.out_path)
os.symlink(os.path.join(opt.in_path,'train'),trn_path)
print('Making training set in...[%s]'%trn_path)
val_path = os.path.join(opt.out_path,'val/imgs')
util.mkdirs(val_path)
print('Making validation set in...[%s]'%val_path)
for val_ind in range(1000):
os.system('ln -s %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,val_path,val_ind+1))
test_path = os.path.join(opt.out_path,'test/imgs')
util.mkdirs(test_path)
val_inds = np.load('./resources/ilsvrclin12_val_inds.npy')
print('Making test set in...[%s]'%test_path)
for val_ind in val_inds:
os.system('ln -s %s/val/ILSVRC2012_val_%08d.JPEG %s/ILSVRC2012_val_%08d.JPEG'%(orig_path,val_ind+1,test_path,val_ind+1))
| true | true |
1c3d73eac179b57cd9334247c20800e0cf89eef9 | 4,873 | py | Python | stog/data/fields/label_field.py | sfillwo/stog | b02e2dbe8989078ccdc3df611d8b08b63d28fcae | [
"MIT"
] | 172 | 2019-11-19T05:52:36.000Z | 2022-03-26T04:58:24.000Z | stog/data/fields/label_field.py | sfillwo/stog | b02e2dbe8989078ccdc3df611d8b08b63d28fcae | [
"MIT"
] | 22 | 2019-07-13T09:11:57.000Z | 2021-11-17T05:30:36.000Z | stog/data/fields/label_field.py | sfillwo/stog | b02e2dbe8989078ccdc3df611d8b08b63d28fcae | [
"MIT"
] | 39 | 2019-06-26T11:37:51.000Z | 2022-03-07T13:30:26.000Z | from typing import Dict, Union, Set
import logging
from overrides import overrides
import torch
from stog.data.fields.field import Field
from stog.data.vocabulary import Vocabulary
from stog.utils.checks import ConfigurationError
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class LabelField(Field[torch.Tensor]):
"""
A ``LabelField`` is a categorical label of some kind, where the labels are either strings of
text or 0-indexed integers (if you wish to skip indexing by passing skip_indexing=True).
If the labels need indexing, we will use a :class:`Vocabulary` to convert the string labels
into integers.
This field will get converted into an integer index representing the class label.
Parameters
----------
label : ``Union[str, int]``
label_namespace : ``str``, optional (default="labels")
The namespace to use for converting label strings into integers. We map label strings to
integers for you (e.g., "entailment" and "contradiction" get converted to 0, 1, ...),
and this namespace tells the ``Vocabulary`` object which mapping from strings to integers
to use (so "entailment" as a label doesn't get the same integer id as "entailment" as a
word). If you have multiple different label fields in your data, you should make sure you
use different namespaces for each one, always using the suffix "labels" (e.g.,
"passage_labels" and "question_labels").
skip_indexing : ``bool``, optional (default=False)
If your labels are 0-indexed integers, you can pass in this flag, and we'll skip the indexing
step. If this is ``False`` and your labels are not strings, this throws a ``ConfigurationError``.
"""
# Most often, you probably don't want to have OOV/PAD tokens with a LabelField, so we warn you
# about it when you pick a namespace that will getting these tokens by default. It is
# possible, however, that you _do_ actually want OOV/PAD tokens with this Field. This class
# variable is used to make sure that we only log a single warning for this per namespace, and
# not every time you create one of these Field objects.
_already_warned_namespaces: Set[str] = set()
def __init__(self,
label: Union[str, int],
label_namespace: str = 'labels',
skip_indexing: bool = False) -> None:
self.label = label
self._label_namespace = label_namespace
self._label_id = None
self._maybe_warn_for_namespace(label_namespace)
if skip_indexing:
if not isinstance(label, int):
raise ConfigurationError("In order to skip indexing, your labels must be integers. "
"Found label = {}".format(label))
else:
self._label_id = label
else:
if not isinstance(label, str):
raise ConfigurationError("LabelFields must be passed a string label if skip_indexing=False. "
"Found label: {} with type: {}.".format(label, type(label)))
def _maybe_warn_for_namespace(self, label_namespace: str) -> None:
if not (self._label_namespace.endswith("labels") or self._label_namespace.endswith("tags")):
if label_namespace not in self._already_warned_namespaces:
logger.warning("Your label namespace was '%s'. We recommend you use a namespace "
"ending with 'labels' or 'tags', so we don't add UNK and PAD tokens by "
"default to your vocabulary. See documentation for "
"`non_padded_namespaces` parameter in Vocabulary.",
self._label_namespace)
self._already_warned_namespaces.add(label_namespace)
@overrides
def count_vocab_items(self, counter: Dict[str, Dict[str, int]]):
if self._label_id is None:
counter[self._label_namespace][self.label] += 1 # type: ignore
@overrides
def index(self, vocab: Vocabulary):
if self._label_id is None:
self._label_id = vocab.get_token_index(self.label, self._label_namespace) # type: ignore
@overrides
def get_padding_lengths(self) -> Dict[str, int]: # pylint: disable=no-self-use
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> torch.Tensor:
# pylint: disable=unused-argument,not-callable
tensor = torch.tensor(self._label_id, dtype=torch.long)
return tensor
@overrides
def empty_field(self):
return LabelField(-1, self._label_namespace, skip_indexing=True)
def __str__(self) -> str:
return f"LabelField with label: {self.label} in namespace: '{self._label_namespace}'.'"
| 48.247525 | 109 | 0.654217 | from typing import Dict, Union, Set
import logging
from overrides import overrides
import torch
from stog.data.fields.field import Field
from stog.data.vocabulary import Vocabulary
from stog.utils.checks import ConfigurationError
logger = logging.getLogger(__name__)
class LabelField(Field[torch.Tensor]):
# about it when you pick a namespace that will getting these tokens by default. It is
# possible, however, that you _do_ actually want OOV/PAD tokens with this Field. This class
# variable is used to make sure that we only log a single warning for this per namespace, and
# not every time you create one of these Field objects.
_already_warned_namespaces: Set[str] = set()
def __init__(self,
label: Union[str, int],
label_namespace: str = 'labels',
skip_indexing: bool = False) -> None:
self.label = label
self._label_namespace = label_namespace
self._label_id = None
self._maybe_warn_for_namespace(label_namespace)
if skip_indexing:
if not isinstance(label, int):
raise ConfigurationError("In order to skip indexing, your labels must be integers. "
"Found label = {}".format(label))
else:
self._label_id = label
else:
if not isinstance(label, str):
raise ConfigurationError("LabelFields must be passed a string label if skip_indexing=False. "
"Found label: {} with type: {}.".format(label, type(label)))
def _maybe_warn_for_namespace(self, label_namespace: str) -> None:
if not (self._label_namespace.endswith("labels") or self._label_namespace.endswith("tags")):
if label_namespace not in self._already_warned_namespaces:
logger.warning("Your label namespace was '%s'. We recommend you use a namespace "
"ending with 'labels' or 'tags', so we don't add UNK and PAD tokens by "
"default to your vocabulary. See documentation for "
"`non_padded_namespaces` parameter in Vocabulary.",
self._label_namespace)
self._already_warned_namespaces.add(label_namespace)
@overrides
def count_vocab_items(self, counter: Dict[str, Dict[str, int]]):
if self._label_id is None:
counter[self._label_namespace][self.label] += 1
@overrides
def index(self, vocab: Vocabulary):
if self._label_id is None:
self._label_id = vocab.get_token_index(self.label, self._label_namespace)
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> torch.Tensor:
tensor = torch.tensor(self._label_id, dtype=torch.long)
return tensor
@overrides
def empty_field(self):
return LabelField(-1, self._label_namespace, skip_indexing=True)
def __str__(self) -> str:
return f"LabelField with label: {self.label} in namespace: '{self._label_namespace}'.'"
| true | true |
1c3d741be07b4e3e3b73ea3f2257fb3e49d46f45 | 3,065 | py | Python | utils/scripts/OOOlevelGen/src/levels/The_Bridge_Too_Far.py | fullscreennl/monkeyswipe | c56192e202674dd5ab18023f6cf14cf51e95fbd0 | [
"MIT"
] | null | null | null | utils/scripts/OOOlevelGen/src/levels/The_Bridge_Too_Far.py | fullscreennl/monkeyswipe | c56192e202674dd5ab18023f6cf14cf51e95fbd0 | [
"MIT"
] | null | null | null | utils/scripts/OOOlevelGen/src/levels/The_Bridge_Too_Far.py | fullscreennl/monkeyswipe | c56192e202674dd5ab18023f6cf14cf51e95fbd0 | [
"MIT"
] | null | null | null | import LevelBuilder
from sprites import *
def render(name,bg):
lb = LevelBuilder.LevelBuilder(name+".plist",background=bg)
lb.addObject(Beam.BeamSprite(x=275, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Beam4'))
lb.addObject(Beam.BeamSprite(x=209, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Beam3'))
lb.addObject(Beam.BeamSprite(x=143, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Beam2'))
lb.addObject(Beam.BeamSprite(x=342, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Beam5'))
lb.addObject(Beam.BeamSprite(x=408, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.1,density=10 ).setName('Beam6'))
lb.addObject(Hero.HeroSprite(x=95, y=243,width=32,height=32))
lb.addObject(Joints.DistanceJoint(body1='Beam1',body2='Beam2',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam2',body2='Beam3',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Friend.FriendSprite(x=27, y=221,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Friend1'))
lb.addObject(Beam.BeamSprite(x=77, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.1,density=10 ).setName('Beam1'))
lb.addObject(Friend.FriendSprite(x=459, y=219,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Friend2'))
lb.addObject(Beam.BeamSprite(x=69, y=290,width=126,height=14,angle='82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=414, y=290,width=126,height=14,angle='-82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=44, y=86,width=251,height=27,angle='82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=440, y=86,width=251,height=27,angle='-82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Joints.DistanceJoint(body1='Beam3',body2='Beam4',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam4',body2='Beam5',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam5',body2='Beam6',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Enemy.EnemySprite(x=242, y=279,width=139,height=139,angle='0',restitution=0.5,static='false',friction=0.5,density=1 ).setName('Enemy'))
lb.addObject(Star.StarSprite(x=384, y=243,width=32,height=32))
lb.addObject(Joints.DistanceJoint(body1='Friend1',body2='Beam1',damping='0.2',freq='10' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam6',body2='Friend2',damping='0.2',freq='10' , b1_Xoffset = '30' ))
lb.render() | 3,065 | 3,065 | 0.727243 | import LevelBuilder
from sprites import *
def render(name,bg):
lb = LevelBuilder.LevelBuilder(name+".plist",background=bg)
lb.addObject(Beam.BeamSprite(x=275, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Beam4'))
lb.addObject(Beam.BeamSprite(x=209, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=20 ).setName('Beam3'))
lb.addObject(Beam.BeamSprite(x=143, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Beam2'))
lb.addObject(Beam.BeamSprite(x=342, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Beam5'))
lb.addObject(Beam.BeamSprite(x=408, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.1,density=10 ).setName('Beam6'))
lb.addObject(Hero.HeroSprite(x=95, y=243,width=32,height=32))
lb.addObject(Joints.DistanceJoint(body1='Beam1',body2='Beam2',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam2',body2='Beam3',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Friend.FriendSprite(x=27, y=221,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Friend1'))
lb.addObject(Beam.BeamSprite(x=77, y=218,width=60,height=10,angle='0',restitution=0.2,static='false',friction=0.1,density=10 ).setName('Beam1'))
lb.addObject(Friend.FriendSprite(x=459, y=219,width=32,height=32,angle='0',restitution=0.2,static='false',friction=0.5,density=10 ).setName('Friend2'))
lb.addObject(Beam.BeamSprite(x=69, y=290,width=126,height=14,angle='82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=414, y=290,width=126,height=14,angle='-82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=44, y=86,width=251,height=27,angle='82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Beam.BeamSprite(x=440, y=86,width=251,height=27,angle='-82',restitution=0.2,static='true',friction=0.5,density=20 ).setName('Beam'))
lb.addObject(Joints.DistanceJoint(body1='Beam3',body2='Beam4',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam4',body2='Beam5',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam5',body2='Beam6',damping='0.2',freq='10' , b1_Xoffset = '30' , b2_Xoffset = '-30' ))
lb.addObject(Enemy.EnemySprite(x=242, y=279,width=139,height=139,angle='0',restitution=0.5,static='false',friction=0.5,density=1 ).setName('Enemy'))
lb.addObject(Star.StarSprite(x=384, y=243,width=32,height=32))
lb.addObject(Joints.DistanceJoint(body1='Friend1',body2='Beam1',damping='0.2',freq='10' , b2_Xoffset = '-30' ))
lb.addObject(Joints.DistanceJoint(body1='Beam6',body2='Friend2',damping='0.2',freq='10' , b1_Xoffset = '30' ))
lb.render() | true | true |
1c3d744e44ffe15ed720333243938b8167b30078 | 6,635 | py | Python | ImageNet-C/test.py | RobbieEarle/robustness | 2f4381900015bf7fcd9975d43b8104d2d14f8568 | [
"Apache-2.0"
] | null | null | null | ImageNet-C/test.py | RobbieEarle/robustness | 2f4381900015bf7fcd9975d43b8104d2d14f8568 | [
"Apache-2.0"
] | null | null | null | ImageNet-C/test.py | RobbieEarle/robustness | 2f4381900015bf7fcd9975d43b8104d2d14f8568 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import argparse
import os
import time
import torch
from torch.autograd import Variable as V
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision.datasets as dset
import torchvision.transforms as trn
import torchvision.models as models
import torch.utils.model_zoo as model_zoo
import numpy as np
import collections
# /////////////// Further Setup ///////////////
def auc(errs): # area under the distortion-error curve
area = 0
for i in range(1, len(errs)):
area += (errs[i] + errs[i - 1]) / 2
area /= len(errs) - 1
return area
def show_performance(distortion_name,
net,
alexnet,
imagenet_clean_path,
imagenet_c_path,
mean, std,
batch_size):
errs_resnet = []
errs_alexnet = []
n = 0
with torch.no_grad():
for severity in range(1, 6):
curr_severity_path = os.path.join(imagenet_c_path, distortion_name, str(severity))
if os.path.exists(curr_severity_path):
n += 1
distorted_dataset = dset.ImageFolder(
root=curr_severity_path,
transform=trn.Compose([trn.CenterCrop(224), trn.ToTensor(), trn.Normalize(mean, std)]))
distorted_dataset_loader = torch.utils.data.DataLoader(
distorted_dataset, batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=True)
correct_resnet = 0
correct_alexnet = 0
for batch_idx, (data, target) in enumerate(distorted_dataset_loader):
data = data.cuda()
output_resnet = net(data)
pred_resnet = output_resnet.data.max(1)[1]
correct_resnet += pred_resnet.eq(target.cuda()).sum()
output_alexnet = alexnet(data)
pred_alexnet = output_alexnet.data.max(1)[1]
correct_alexnet += pred_alexnet.eq(target.cuda()).sum()
errs_resnet.append(1 - 1.*correct_resnet / len(distorted_dataset))
errs_alexnet.append(1 - 1.*correct_alexnet / len(distorted_dataset))
print('\t(n={}) Imagenet-c ResNet18 Errors: {}'.format(n, tuple(errs_resnet)), flush=True)
print('\t(n={}) Imagenet-c AlexNet Errors: {}'.format(n, tuple(errs_alexnet)), flush=True)
correct_resnet = 0
correct_alexnet = 0
for batch_idx, (data, target) in enumerate(clean_loader):
data = data.cuda()
output_resnet = net(data)
pred_resnet = output_resnet.data.max(1)[1]
correct_resnet += pred_resnet.eq(target.cuda()).sum()
output_alexnet = net(data)
pred_alexnet = output_alexnet.data.max(1)[1]
correct_alexnet += pred_alexnet.eq(target.cuda()).sum()
clean_error_resnet = 1 - correct_resnet / len(clean_loader.dataset)
clean_error_alexnet = 1 - correct_alexnet / len(clean_loader.dataset)
print('\tImagenet Clean ResNet18 Errors: {}'.format(clean_error_resnet), flush=True)
print('\tImagenet Clean AlexNet Errors: {}'.format(clean_error_alexnet), flush=True)
ce_unnormalized = torch.mean(errs_resnet).detach().cpu().numpy()
ce_normalized = (torch.sum(errs_resnet) / torch.sum(errs_alexnet)).detach().cpu().numpy()
relative_ce = ((torch.sum(errs_resnet) - clean_error_resnet) / (torch.sum(errs_alexnet) - clean_error_alexnet)).detach().cpu().numpy()
return ce_unnormalized, ce_normalized, relative_ce
# /////////////// End Further Setup ///////////////
# /////////////// Display Results ///////////////
def eval_model(net, batch_size=256, seed=0):
torch.manual_seed(seed)
np.random.seed(seed)
torch.cuda.manual_seed(seed)
net.cuda()
net.eval()
alexnet = models.alexnet(pretrained=True)
alexnet.cuda()
alexnet.eval()
cudnn.benchmark = True
imagenet_clean_path = "/scratch/ssd002/datasets/imagenet/val"
imagenet_c_path = "/scratch/hdd001/home/slowe/imagenet-c"
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
clean_loader = torch.utils.data.DataLoader(dset.ImageFolder(
root=imagenet_clean_path,
transform=trn.Compose([trn.Resize(256), trn.CenterCrop(224), trn.ToTensor(), trn.Normalize(mean, std)])),
batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=True)
print('\nUsing ImageNet data')
distortions = [
'gaussian_noise', 'shot_noise', 'impulse_noise',
'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur',
'snow', 'frost', 'fog', 'brightness',
'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression',
'speckle_noise', 'gaussian_blur', 'spatter', 'saturate'
]
errors_ce_unnormalized = []
errors_ce_normalized = []
errors_relative_ce = []
for distortion_name in distortions:
curr_dist_path = os.path.join(imagenet_c_path, distortion_name)
if os.path.exists(curr_dist_path):
print('======== Distortion: {:15s}'.format(distortion_name), flush=True)
ce_unnormalized, ce_normalized, relative_ce = show_performance(distortion_name,
net,
alexnet,
imagenet_clean_path,
imagenet_c_path,
mean, std,
batch_size)
errors_ce_unnormalized.append(ce_unnormalized)
errors_ce_normalized.append(ce_normalized)
errors_relative_ce.append(relative_ce)
print('\tCE (unnormalized) (%): {:.2f} | CE (normalized) (%): {:.2f} | Relative CE (%): {:.2f}\n'.format(
100 * ce_unnormalized, 100 * ce_normalized, 100 * relative_ce), flush=True)
print('\nmCE (unnormalized by AlexNet errors) (%): {:.2f}'.format(100 * np.mean(errors_ce_unnormalized)), flush=True)
print('mCE (normalized by AlexNet errors) (%): {:.2f}'.format(100 * np.mean(errors_ce_normalized)), flush=True)
print('Relative mCE (%): {:.2f}'.format(100 * np.mean(errors_relative_ce)), flush=True)
if __name__ == '__main__':
net = models.resnet18(pretrained=True)
eval_model(net)
| 41.993671 | 138 | 0.583723 |
import argparse
import os
import time
import torch
from torch.autograd import Variable as V
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision.datasets as dset
import torchvision.transforms as trn
import torchvision.models as models
import torch.utils.model_zoo as model_zoo
import numpy as np
import collections
def auc(errs):
area = 0
for i in range(1, len(errs)):
area += (errs[i] + errs[i - 1]) / 2
area /= len(errs) - 1
return area
def show_performance(distortion_name,
net,
alexnet,
imagenet_clean_path,
imagenet_c_path,
mean, std,
batch_size):
errs_resnet = []
errs_alexnet = []
n = 0
with torch.no_grad():
for severity in range(1, 6):
curr_severity_path = os.path.join(imagenet_c_path, distortion_name, str(severity))
if os.path.exists(curr_severity_path):
n += 1
distorted_dataset = dset.ImageFolder(
root=curr_severity_path,
transform=trn.Compose([trn.CenterCrop(224), trn.ToTensor(), trn.Normalize(mean, std)]))
distorted_dataset_loader = torch.utils.data.DataLoader(
distorted_dataset, batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=True)
correct_resnet = 0
correct_alexnet = 0
for batch_idx, (data, target) in enumerate(distorted_dataset_loader):
data = data.cuda()
output_resnet = net(data)
pred_resnet = output_resnet.data.max(1)[1]
correct_resnet += pred_resnet.eq(target.cuda()).sum()
output_alexnet = alexnet(data)
pred_alexnet = output_alexnet.data.max(1)[1]
correct_alexnet += pred_alexnet.eq(target.cuda()).sum()
errs_resnet.append(1 - 1.*correct_resnet / len(distorted_dataset))
errs_alexnet.append(1 - 1.*correct_alexnet / len(distorted_dataset))
print('\t(n={}) Imagenet-c ResNet18 Errors: {}'.format(n, tuple(errs_resnet)), flush=True)
print('\t(n={}) Imagenet-c AlexNet Errors: {}'.format(n, tuple(errs_alexnet)), flush=True)
correct_resnet = 0
correct_alexnet = 0
for batch_idx, (data, target) in enumerate(clean_loader):
data = data.cuda()
output_resnet = net(data)
pred_resnet = output_resnet.data.max(1)[1]
correct_resnet += pred_resnet.eq(target.cuda()).sum()
output_alexnet = net(data)
pred_alexnet = output_alexnet.data.max(1)[1]
correct_alexnet += pred_alexnet.eq(target.cuda()).sum()
clean_error_resnet = 1 - correct_resnet / len(clean_loader.dataset)
clean_error_alexnet = 1 - correct_alexnet / len(clean_loader.dataset)
print('\tImagenet Clean ResNet18 Errors: {}'.format(clean_error_resnet), flush=True)
print('\tImagenet Clean AlexNet Errors: {}'.format(clean_error_alexnet), flush=True)
ce_unnormalized = torch.mean(errs_resnet).detach().cpu().numpy()
ce_normalized = (torch.sum(errs_resnet) / torch.sum(errs_alexnet)).detach().cpu().numpy()
relative_ce = ((torch.sum(errs_resnet) - clean_error_resnet) / (torch.sum(errs_alexnet) - clean_error_alexnet)).detach().cpu().numpy()
return ce_unnormalized, ce_normalized, relative_ce
def eval_model(net, batch_size=256, seed=0):
torch.manual_seed(seed)
np.random.seed(seed)
torch.cuda.manual_seed(seed)
net.cuda()
net.eval()
alexnet = models.alexnet(pretrained=True)
alexnet.cuda()
alexnet.eval()
cudnn.benchmark = True
imagenet_clean_path = "/scratch/ssd002/datasets/imagenet/val"
imagenet_c_path = "/scratch/hdd001/home/slowe/imagenet-c"
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
clean_loader = torch.utils.data.DataLoader(dset.ImageFolder(
root=imagenet_clean_path,
transform=trn.Compose([trn.Resize(256), trn.CenterCrop(224), trn.ToTensor(), trn.Normalize(mean, std)])),
batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=True)
print('\nUsing ImageNet data')
distortions = [
'gaussian_noise', 'shot_noise', 'impulse_noise',
'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur',
'snow', 'frost', 'fog', 'brightness',
'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression',
'speckle_noise', 'gaussian_blur', 'spatter', 'saturate'
]
errors_ce_unnormalized = []
errors_ce_normalized = []
errors_relative_ce = []
for distortion_name in distortions:
curr_dist_path = os.path.join(imagenet_c_path, distortion_name)
if os.path.exists(curr_dist_path):
print('======== Distortion: {:15s}'.format(distortion_name), flush=True)
ce_unnormalized, ce_normalized, relative_ce = show_performance(distortion_name,
net,
alexnet,
imagenet_clean_path,
imagenet_c_path,
mean, std,
batch_size)
errors_ce_unnormalized.append(ce_unnormalized)
errors_ce_normalized.append(ce_normalized)
errors_relative_ce.append(relative_ce)
print('\tCE (unnormalized) (%): {:.2f} | CE (normalized) (%): {:.2f} | Relative CE (%): {:.2f}\n'.format(
100 * ce_unnormalized, 100 * ce_normalized, 100 * relative_ce), flush=True)
print('\nmCE (unnormalized by AlexNet errors) (%): {:.2f}'.format(100 * np.mean(errors_ce_unnormalized)), flush=True)
print('mCE (normalized by AlexNet errors) (%): {:.2f}'.format(100 * np.mean(errors_ce_normalized)), flush=True)
print('Relative mCE (%): {:.2f}'.format(100 * np.mean(errors_relative_ce)), flush=True)
if __name__ == '__main__':
net = models.resnet18(pretrained=True)
eval_model(net)
| true | true |
1c3d749751ed2e33faa4ff005513c4de17610e34 | 1,289 | py | Python | example/examplesite/urls.py | eduardocp/wagtail-geo-widget | df3ed88c2fbae8867402290645463fbdde75a2f0 | [
"MIT"
] | 105 | 2016-11-04T06:56:11.000Z | 2022-03-23T08:08:56.000Z | example/examplesite/urls.py | eduardocp/wagtail-geo-widget | df3ed88c2fbae8867402290645463fbdde75a2f0 | [
"MIT"
] | 82 | 2016-11-25T09:39:27.000Z | 2022-03-08T18:52:59.000Z | example/examplesite/urls.py | eduardocp/wagtail-geo-widget | df3ed88c2fbae8867402290645463fbdde75a2f0 | [
"MIT"
] | 26 | 2016-12-31T02:26:29.000Z | 2022-02-15T17:16:13.000Z | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from wagtail.admin import urls as wagtailadmin_urls
from wagtail.core import urls as wagtail_urls
from wagtail.documents import urls as wagtaildocs_urls
from search import views as search_views
urlpatterns = [
url(r'^django-admin/', admin.site.urls),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
# Alternatively, if you want Wagtail pages to be served from a subpath
# of your site, rather than the site root:
# url(r'^pages/', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 33.051282 | 80 | 0.748642 | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from wagtail.admin import urls as wagtailadmin_urls
from wagtail.core import urls as wagtail_urls
from wagtail.documents import urls as wagtaildocs_urls
from search import views as search_views
urlpatterns = [
url(r'^django-admin/', admin.site.urls),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
# the list:
url(r'', include(wagtail_urls)),
# Alternatively, if you want Wagtail pages to be served from a subpath
# of your site, rather than the site root:
# url(r'^pages/', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| true | true |
1c3d74b7556d9eb284b5e1f15b12be6e75477011 | 373 | py | Python | proximal/lin_ops/edge.py | kyleaj/ProxImaL | 2986b1ed40b58057822922522145bfbbdd2cf9de | [
"MIT"
] | 101 | 2016-07-24T00:33:12.000Z | 2022-03-23T23:51:58.000Z | proximal/lin_ops/edge.py | kyleaj/ProxImaL | 2986b1ed40b58057822922522145bfbbdd2cf9de | [
"MIT"
] | 57 | 2016-07-26T18:12:37.000Z | 2022-02-14T04:19:26.000Z | proximal/lin_ops/edge.py | kyleaj/ProxImaL | 2986b1ed40b58057822922522145bfbbdd2cf9de | [
"MIT"
] | 30 | 2016-07-26T22:51:59.000Z | 2021-01-15T14:45:42.000Z | import numpy as np
class Edge(object):
"""The edge between two lin ops.
"""
def __init__(self, start, end, shape):
self.start = start
self.end = end
self.shape = shape
self.data = np.zeros(self.shape)
self.mag = None # Used to get norm bounds.
@property
def size(self):
return np.prod(self.shape)
| 21.941176 | 51 | 0.571046 | import numpy as np
class Edge(object):
def __init__(self, start, end, shape):
self.start = start
self.end = end
self.shape = shape
self.data = np.zeros(self.shape)
self.mag = None
@property
def size(self):
return np.prod(self.shape)
| true | true |
1c3d76d3c24826b5e3241ec4ccc5db15a25140b6 | 938 | py | Python | hris/apps/jobs/migrations/0001_initial.py | Minedomain/hris_backend | 90aab497c076c2d4ce4e05a441db0ee7a175df57 | [
"MIT"
] | null | null | null | hris/apps/jobs/migrations/0001_initial.py | Minedomain/hris_backend | 90aab497c076c2d4ce4e05a441db0ee7a175df57 | [
"MIT"
] | null | null | null | hris/apps/jobs/migrations/0001_initial.py | Minedomain/hris_backend | 90aab497c076c2d4ce4e05a441db0ee7a175df57 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.8 on 2021-06-01 10:37
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('job_id', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('job_title', models.CharField(max_length=255)),
('duties', models.CharField(blank=True, default=None, max_length=255, null=True)),
('min_salary', models.DecimalField(blank=True, decimal_places=2, default=0.0, max_digits=20, null=True)),
('max_salary', models.DecimalField(blank=True, decimal_places=2, default=0.0, max_digits=20, null=True)),
],
),
]
| 34.740741 | 121 | 0.613006 |
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('job_id', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('job_title', models.CharField(max_length=255)),
('duties', models.CharField(blank=True, default=None, max_length=255, null=True)),
('min_salary', models.DecimalField(blank=True, decimal_places=2, default=0.0, max_digits=20, null=True)),
('max_salary', models.DecimalField(blank=True, decimal_places=2, default=0.0, max_digits=20, null=True)),
],
),
]
| true | true |
1c3d78b10f90882ad730e610478a9ccba770e350 | 1,524 | py | Python | data_txt.py | yuanCnD/Model-parameter-analysis | 9fabef4f434087a56e13aa28514fe1d4065ecc4d | [
"Apache-2.0"
] | 2 | 2018-02-02T13:41:20.000Z | 2018-02-09T15:27:10.000Z | data_txt.py | yuanCnD/Model-parameter-analysis | 9fabef4f434087a56e13aa28514fe1d4065ecc4d | [
"Apache-2.0"
] | null | null | null | data_txt.py | yuanCnD/Model-parameter-analysis | 9fabef4f434087a56e13aa28514fe1d4065ecc4d | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 yuanCnD.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import mxnet as mx
import datetime
starttime = datetime.datetime.now()
sym,args,aux=mx.model.load_checkpoint('/cd/to/your/params/file/vgg16_reduced',0)
names = args.keys()
index = dict()
for i,name in enumerate(names):
if 'conv4_3_weight' in name:
data = []
layer_para = args[name]
shape = layer_para.shape
single_filter = np.zeros((shape[0],shape[1],9))
for j in range(shape[0]):
for k in range(shape[1]):
array = layer_para[j,k,:,:].asnumpy().flatten()
sum = np.sum(np.abs(array))
single_filter[j,k,:] = array
data.append(single_filter)
index.update({name:data})
endtime = datetime.datetime.now()
print (endtime - starttime).seconds
import pickle
output = open('data-filter.pkl', 'wb')
pickle.dump(index, output)
output.close() | 35.44186 | 80 | 0.645013 |
import numpy as np
import mxnet as mx
import datetime
starttime = datetime.datetime.now()
sym,args,aux=mx.model.load_checkpoint('/cd/to/your/params/file/vgg16_reduced',0)
names = args.keys()
index = dict()
for i,name in enumerate(names):
if 'conv4_3_weight' in name:
data = []
layer_para = args[name]
shape = layer_para.shape
single_filter = np.zeros((shape[0],shape[1],9))
for j in range(shape[0]):
for k in range(shape[1]):
array = layer_para[j,k,:,:].asnumpy().flatten()
sum = np.sum(np.abs(array))
single_filter[j,k,:] = array
data.append(single_filter)
index.update({name:data})
endtime = datetime.datetime.now()
print (endtime - starttime).seconds
import pickle
output = open('data-filter.pkl', 'wb')
pickle.dump(index, output)
output.close() | true | true |
1c3d795eb9c4a7caab5c4703be89f9dffe028b1b | 31 | py | Python | maya/app/general/pointOnPolyConstraint.py | arjun-namdeo/py_stubs | 605bb167e239978f5417f3f1fc1f5c12e2a243cc | [
"MIT"
] | 20 | 2019-09-20T00:30:22.000Z | 2021-12-26T06:56:16.000Z | mayaSDK/maya/app/general/pointOnPolyConstraint.py | minjiang999/vscode-mayapy | 7a21872f80b5b740fc653e79c3f9b5268e87b3c3 | [
"MIT"
] | 5 | 2019-12-29T15:19:03.000Z | 2022-03-29T16:54:19.000Z | mayaSDK/maya/app/general/pointOnPolyConstraint.py | minjiang999/vscode-mayapy | 7a21872f80b5b740fc653e79c3f9b5268e87b3c3 | [
"MIT"
] | 8 | 2019-09-23T05:46:44.000Z | 2022-01-11T14:42:14.000Z | def assembleCmd():
pass
| 5.166667 | 18 | 0.580645 | def assembleCmd():
pass
| true | true |
1c3d7998163e4e898f27af795609e5bf77690f19 | 443 | py | Python | tdrs-backend/tdpservice/core/test/test_models.py | amilash/TANF-app | 09ef2baa854e41250c5761fa66aa665e112a53cf | [
"CC0-1.0"
] | 18 | 2020-03-25T19:57:12.000Z | 2021-07-26T15:37:50.000Z | tdrs-backend/tdpservice/core/test/test_models.py | amilash/TANF-app | 09ef2baa854e41250c5761fa66aa665e112a53cf | [
"CC0-1.0"
] | 1,465 | 2020-07-22T21:16:53.000Z | 2022-03-31T16:04:22.000Z | tdrs-backend/tdpservice/core/test/test_models.py | amilash/TANF-app | 09ef2baa854e41250c5761fa66aa665e112a53cf | [
"CC0-1.0"
] | 15 | 2020-07-22T14:58:37.000Z | 2021-06-22T17:29:55.000Z | """Module for testing the core model."""
import pytest
from tdpservice.core.models import GlobalPermission
@pytest.mark.django_db
def test_manager_get_queryset():
"""Test the get queryset method returns a query."""
GlobalPermission.objects.create(
name="Can View User Data", codename="view_user_data"
)
global_permissions = GlobalPermission.objects.first()
assert global_permissions.name == "Can View User Data"
| 29.533333 | 60 | 0.742664 | import pytest
from tdpservice.core.models import GlobalPermission
@pytest.mark.django_db
def test_manager_get_queryset():
GlobalPermission.objects.create(
name="Can View User Data", codename="view_user_data"
)
global_permissions = GlobalPermission.objects.first()
assert global_permissions.name == "Can View User Data"
| true | true |
1c3d79f4f810684251297bbcb12b7c7e11301388 | 1,282 | py | Python | project/aliez/script/kancolle/profile/_P3P7N17513004864.py | coppelia517/stve | dd23cc83e7a516e3f2ea9984aabb0cb407a30647 | [
"MIT"
] | null | null | null | project/aliez/script/kancolle/profile/_P3P7N17513004864.py | coppelia517/stve | dd23cc83e7a516e3f2ea9984aabb0cb407a30647 | [
"MIT"
] | 9 | 2021-03-18T20:25:04.000Z | 2022-01-13T00:44:11.000Z | project/aliez/script/kancolle/profile/_P3P7N17513004864.py | coppelia517/stve | dd23cc83e7a516e3f2ea9984aabb0cb407a30647 | [
"MIT"
] | null | null | null | import os
import sys
PATH = os.path.abspath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if not PATH in sys.path:
sys.path.insert(0, PATH)
import android_base
class _P3P7N17513004864(android_base.Android):
SERIAL = "P3P7N17513004864"
TMP_PICTURE = "%s_TMP.png" % SERIAL
IP = ""
PORT = ""
NAME = "Huawei P10 lite"
WIDTH = "1920"
HEIGHT = "1080"
#MINICAP_WIDTH = "1920"
#MINICAP_HEIGHT = "1080"
MINICAP_WIDTH = "1280"
MINICAP_HEIGHT = "720"
LOCATE = "H"
ROTATE = "90"
EXERCISES_X = "1590"
EXERCISES_Y = "390"
EXERCISES_WIDTH = "180"
EXERCISES_HEIGHT = "120"
FORMATION_X = "320"
FORMATION_Y = "375"
FORMATION_WIDTH = "445"
FORMATION_HEIGHT = "115"
ATTACK_X = "560"
ATTACK_Y = "190"
ATTACK_WIDTH = "245"
ATTACK_HEIGHT= "76"
LEVELING_X = "550"
LEVELING_Y = "560"
LEVELING_WIDTH = "260"
LEVELING_HEIGHT = "70"
BATTLE_X = "330"
BATTLE_Y = "280"
BATTLE_WIDTH = "245"
BATTLE_HEIGHT = "68"
DOCKING_X = "800"
DOCKING_Y = "228"
DOCKING_WIDTH = "300"
DOCKING_HEIGHT = "100"
if __name__ == "__main__":
print(eval("_P3P7N17513004864.%s" % "TMP_PICTURE"))
| 21.728814 | 84 | 0.590484 | import os
import sys
PATH = os.path.abspath(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if not PATH in sys.path:
sys.path.insert(0, PATH)
import android_base
class _P3P7N17513004864(android_base.Android):
SERIAL = "P3P7N17513004864"
TMP_PICTURE = "%s_TMP.png" % SERIAL
IP = ""
PORT = ""
NAME = "Huawei P10 lite"
WIDTH = "1920"
HEIGHT = "1080"
MINICAP_WIDTH = "1280"
MINICAP_HEIGHT = "720"
LOCATE = "H"
ROTATE = "90"
EXERCISES_X = "1590"
EXERCISES_Y = "390"
EXERCISES_WIDTH = "180"
EXERCISES_HEIGHT = "120"
FORMATION_X = "320"
FORMATION_Y = "375"
FORMATION_WIDTH = "445"
FORMATION_HEIGHT = "115"
ATTACK_X = "560"
ATTACK_Y = "190"
ATTACK_WIDTH = "245"
ATTACK_HEIGHT= "76"
LEVELING_X = "550"
LEVELING_Y = "560"
LEVELING_WIDTH = "260"
LEVELING_HEIGHT = "70"
BATTLE_X = "330"
BATTLE_Y = "280"
BATTLE_WIDTH = "245"
BATTLE_HEIGHT = "68"
DOCKING_X = "800"
DOCKING_Y = "228"
DOCKING_WIDTH = "300"
DOCKING_HEIGHT = "100"
if __name__ == "__main__":
print(eval("_P3P7N17513004864.%s" % "TMP_PICTURE"))
| true | true |
1c3d7c85ebf349d319a77f6dc2f7711b6a679bf3 | 8,021 | py | Python | bin/Python27/Lib/site-packages/scipy/stats/tests/test_kdeoth.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | bin/Python27/Lib/site-packages/scipy/stats/tests/test_kdeoth.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | bin/Python27/Lib/site-packages/scipy/stats/tests/test_kdeoth.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | 1 | 2020-08-08T12:44:48.000Z | 2020-08-08T12:44:48.000Z | from __future__ import division, print_function, absolute_import
from scipy import stats
import numpy as np
from numpy.testing import assert_almost_equal, assert_, assert_raises, \
assert_array_almost_equal, assert_array_almost_equal_nulp, run_module_suite
def test_kde_1d():
#some basic tests comparing to normal distribution
np.random.seed(8765678)
n_basesample = 500
xn = np.random.randn(n_basesample)
xnmean = xn.mean()
xnstd = xn.std(ddof=1)
# get kde for original sample
gkde = stats.gaussian_kde(xn)
# evaluate the density function for the kde for some points
xs = np.linspace(-7,7,501)
kdepdf = gkde.evaluate(xs)
normpdf = stats.norm.pdf(xs, loc=xnmean, scale=xnstd)
intervall = xs[1] - xs[0]
assert_(np.sum((kdepdf - normpdf)**2)*intervall < 0.01)
prob1 = gkde.integrate_box_1d(xnmean, np.inf)
prob2 = gkde.integrate_box_1d(-np.inf, xnmean)
assert_almost_equal(prob1, 0.5, decimal=1)
assert_almost_equal(prob2, 0.5, decimal=1)
assert_almost_equal(gkde.integrate_box(xnmean, np.inf), prob1, decimal=13)
assert_almost_equal(gkde.integrate_box(-np.inf, xnmean), prob2, decimal=13)
assert_almost_equal(gkde.integrate_kde(gkde),
(kdepdf**2).sum()*intervall, decimal=2)
assert_almost_equal(gkde.integrate_gaussian(xnmean, xnstd**2),
(kdepdf*normpdf).sum()*intervall, decimal=2)
def test_kde_2d():
#some basic tests comparing to normal distribution
np.random.seed(8765678)
n_basesample = 500
mean = np.array([1.0, 3.0])
covariance = np.array([[1.0, 2.0], [2.0, 6.0]])
# Need transpose (shape (2, 500)) for kde
xn = np.random.multivariate_normal(mean, covariance, size=n_basesample).T
# get kde for original sample
gkde = stats.gaussian_kde(xn)
# evaluate the density function for the kde for some points
x, y = np.mgrid[-7:7:500j, -7:7:500j]
grid_coords = np.vstack([x.ravel(), y.ravel()])
kdepdf = gkde.evaluate(grid_coords)
kdepdf = kdepdf.reshape(500, 500)
normpdf = stats.multivariate_normal.pdf(np.dstack([x, y]), mean=mean, cov=covariance)
intervall = y.ravel()[1] - y.ravel()[0]
assert_(np.sum((kdepdf - normpdf)**2) * (intervall**2) < 0.01)
small = -1e100
large = 1e100
prob1 = gkde.integrate_box([small, mean[1]], [large, large])
prob2 = gkde.integrate_box([small, small], [large, mean[1]])
assert_almost_equal(prob1, 0.5, decimal=1)
assert_almost_equal(prob2, 0.5, decimal=1)
assert_almost_equal(gkde.integrate_kde(gkde),
(kdepdf**2).sum()*(intervall**2), decimal=2)
assert_almost_equal(gkde.integrate_gaussian(mean, covariance),
(kdepdf*normpdf).sum()*(intervall**2), decimal=2)
def test_kde_bandwidth_method():
def scotts_factor(kde_obj):
"""Same as default, just check that it works."""
return np.power(kde_obj.n, -1./(kde_obj.d+4))
np.random.seed(8765678)
n_basesample = 50
xn = np.random.randn(n_basesample)
# Default
gkde = stats.gaussian_kde(xn)
# Supply a callable
gkde2 = stats.gaussian_kde(xn, bw_method=scotts_factor)
# Supply a scalar
gkde3 = stats.gaussian_kde(xn, bw_method=gkde.factor)
xs = np.linspace(-7,7,51)
kdepdf = gkde.evaluate(xs)
kdepdf2 = gkde2.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf2)
kdepdf3 = gkde3.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf3)
assert_raises(ValueError, stats.gaussian_kde, xn, bw_method='wrongstring')
# Subclasses that should stay working (extracted from various sources).
# Unfortunately the earlier design of gaussian_kde made it necessary for users
# to create these kinds of subclasses, or call _compute_covariance() directly.
class _kde_subclass1(stats.gaussian_kde):
def __init__(self, dataset):
self.dataset = np.atleast_2d(dataset)
self.d, self.n = self.dataset.shape
self.covariance_factor = self.scotts_factor
self._compute_covariance()
class _kde_subclass2(stats.gaussian_kde):
def __init__(self, dataset):
self.covariance_factor = self.scotts_factor
super(_kde_subclass2, self).__init__(dataset)
class _kde_subclass3(stats.gaussian_kde):
def __init__(self, dataset, covariance):
self.covariance = covariance
stats.gaussian_kde.__init__(self, dataset)
def _compute_covariance(self):
self.inv_cov = np.linalg.inv(self.covariance)
self._norm_factor = np.sqrt(np.linalg.det(2*np.pi * self.covariance)) \
* self.n
class _kde_subclass4(stats.gaussian_kde):
def covariance_factor(self):
return 0.5 * self.silverman_factor()
def test_gaussian_kde_subclassing():
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=50)
# gaussian_kde itself
kde = stats.gaussian_kde(x1)
ys = kde(xs)
# subclass 1
kde1 = _kde_subclass1(x1)
y1 = kde1(xs)
assert_array_almost_equal_nulp(ys, y1, nulp=10)
# subclass 2
kde2 = _kde_subclass2(x1)
y2 = kde2(xs)
assert_array_almost_equal_nulp(ys, y2, nulp=10)
# subclass 3
kde3 = _kde_subclass3(x1, kde.covariance)
y3 = kde3(xs)
assert_array_almost_equal_nulp(ys, y3, nulp=10)
# subclass 4
kde4 = _kde_subclass4(x1)
y4 = kde4(x1)
y_expected = [0.06292987, 0.06346938, 0.05860291, 0.08657652, 0.07904017]
assert_array_almost_equal(y_expected, y4, decimal=6)
# Not a subclass, but check for use of _compute_covariance()
kde5 = kde
kde5.covariance_factor = lambda: kde.factor
kde5._compute_covariance()
y5 = kde5(xs)
assert_array_almost_equal_nulp(ys, y5, nulp=10)
def test_gaussian_kde_covariance_caching():
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=5)
# These expected values are from scipy 0.10, before some changes to
# gaussian_kde. They were not compared with any external reference.
y_expected = [0.02463386, 0.04689208, 0.05395444, 0.05337754, 0.01664475]
# Set the bandwidth, then reset it to the default.
kde = stats.gaussian_kde(x1)
kde.set_bandwidth(bw_method=0.5)
kde.set_bandwidth(bw_method='scott')
y2 = kde(xs)
assert_array_almost_equal(y_expected, y2, decimal=7)
def test_gaussian_kde_monkeypatch():
"""Ugly, but people may rely on this. See scipy pull request 123,
specifically the linked ML thread "Width of the Gaussian in stats.kde".
If it is necessary to break this later on, that is to be discussed on ML.
"""
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=50)
# The old monkeypatched version to get at Silverman's Rule.
kde = stats.gaussian_kde(x1)
kde.covariance_factor = kde.silverman_factor
kde._compute_covariance()
y1 = kde(xs)
# The new saner version.
kde2 = stats.gaussian_kde(x1, bw_method='silverman')
y2 = kde2(xs)
assert_array_almost_equal_nulp(y1, y2, nulp=10)
def test_kde_integer_input():
"""Regression test for #1181."""
x1 = np.arange(5)
kde = stats.gaussian_kde(x1)
y_expected = [0.13480721, 0.18222869, 0.19514935, 0.18222869, 0.13480721]
assert_array_almost_equal(kde(x1), y_expected, decimal=6)
def test_pdf_logpdf():
np.random.seed(1)
n_basesample = 50
xn = np.random.randn(n_basesample)
# Default
gkde = stats.gaussian_kde(xn)
xs = np.linspace(-15, 12, 25)
pdf = gkde.evaluate(xs)
pdf2 = gkde.pdf(xs)
assert_almost_equal(pdf, pdf2, decimal=12)
logpdf = np.log(pdf)
logpdf2 = gkde.logpdf(xs)
assert_almost_equal(logpdf, logpdf2, decimal=12)
if __name__ == "__main__":
run_module_suite()
| 33.282158 | 90 | 0.658272 | from __future__ import division, print_function, absolute_import
from scipy import stats
import numpy as np
from numpy.testing import assert_almost_equal, assert_, assert_raises, \
assert_array_almost_equal, assert_array_almost_equal_nulp, run_module_suite
def test_kde_1d():
np.random.seed(8765678)
n_basesample = 500
xn = np.random.randn(n_basesample)
xnmean = xn.mean()
xnstd = xn.std(ddof=1)
gkde = stats.gaussian_kde(xn)
xs = np.linspace(-7,7,501)
kdepdf = gkde.evaluate(xs)
normpdf = stats.norm.pdf(xs, loc=xnmean, scale=xnstd)
intervall = xs[1] - xs[0]
assert_(np.sum((kdepdf - normpdf)**2)*intervall < 0.01)
prob1 = gkde.integrate_box_1d(xnmean, np.inf)
prob2 = gkde.integrate_box_1d(-np.inf, xnmean)
assert_almost_equal(prob1, 0.5, decimal=1)
assert_almost_equal(prob2, 0.5, decimal=1)
assert_almost_equal(gkde.integrate_box(xnmean, np.inf), prob1, decimal=13)
assert_almost_equal(gkde.integrate_box(-np.inf, xnmean), prob2, decimal=13)
assert_almost_equal(gkde.integrate_kde(gkde),
(kdepdf**2).sum()*intervall, decimal=2)
assert_almost_equal(gkde.integrate_gaussian(xnmean, xnstd**2),
(kdepdf*normpdf).sum()*intervall, decimal=2)
def test_kde_2d():
np.random.seed(8765678)
n_basesample = 500
mean = np.array([1.0, 3.0])
covariance = np.array([[1.0, 2.0], [2.0, 6.0]])
xn = np.random.multivariate_normal(mean, covariance, size=n_basesample).T
gkde = stats.gaussian_kde(xn)
x, y = np.mgrid[-7:7:500j, -7:7:500j]
grid_coords = np.vstack([x.ravel(), y.ravel()])
kdepdf = gkde.evaluate(grid_coords)
kdepdf = kdepdf.reshape(500, 500)
normpdf = stats.multivariate_normal.pdf(np.dstack([x, y]), mean=mean, cov=covariance)
intervall = y.ravel()[1] - y.ravel()[0]
assert_(np.sum((kdepdf - normpdf)**2) * (intervall**2) < 0.01)
small = -1e100
large = 1e100
prob1 = gkde.integrate_box([small, mean[1]], [large, large])
prob2 = gkde.integrate_box([small, small], [large, mean[1]])
assert_almost_equal(prob1, 0.5, decimal=1)
assert_almost_equal(prob2, 0.5, decimal=1)
assert_almost_equal(gkde.integrate_kde(gkde),
(kdepdf**2).sum()*(intervall**2), decimal=2)
assert_almost_equal(gkde.integrate_gaussian(mean, covariance),
(kdepdf*normpdf).sum()*(intervall**2), decimal=2)
def test_kde_bandwidth_method():
def scotts_factor(kde_obj):
return np.power(kde_obj.n, -1./(kde_obj.d+4))
np.random.seed(8765678)
n_basesample = 50
xn = np.random.randn(n_basesample)
gkde = stats.gaussian_kde(xn)
gkde2 = stats.gaussian_kde(xn, bw_method=scotts_factor)
gkde3 = stats.gaussian_kde(xn, bw_method=gkde.factor)
xs = np.linspace(-7,7,51)
kdepdf = gkde.evaluate(xs)
kdepdf2 = gkde2.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf2)
kdepdf3 = gkde3.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf3)
assert_raises(ValueError, stats.gaussian_kde, xn, bw_method='wrongstring')
class _kde_subclass1(stats.gaussian_kde):
def __init__(self, dataset):
self.dataset = np.atleast_2d(dataset)
self.d, self.n = self.dataset.shape
self.covariance_factor = self.scotts_factor
self._compute_covariance()
class _kde_subclass2(stats.gaussian_kde):
def __init__(self, dataset):
self.covariance_factor = self.scotts_factor
super(_kde_subclass2, self).__init__(dataset)
class _kde_subclass3(stats.gaussian_kde):
def __init__(self, dataset, covariance):
self.covariance = covariance
stats.gaussian_kde.__init__(self, dataset)
def _compute_covariance(self):
self.inv_cov = np.linalg.inv(self.covariance)
self._norm_factor = np.sqrt(np.linalg.det(2*np.pi * self.covariance)) \
* self.n
class _kde_subclass4(stats.gaussian_kde):
def covariance_factor(self):
return 0.5 * self.silverman_factor()
def test_gaussian_kde_subclassing():
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=50)
kde = stats.gaussian_kde(x1)
ys = kde(xs)
kde1 = _kde_subclass1(x1)
y1 = kde1(xs)
assert_array_almost_equal_nulp(ys, y1, nulp=10)
kde2 = _kde_subclass2(x1)
y2 = kde2(xs)
assert_array_almost_equal_nulp(ys, y2, nulp=10)
kde3 = _kde_subclass3(x1, kde.covariance)
y3 = kde3(xs)
assert_array_almost_equal_nulp(ys, y3, nulp=10)
kde4 = _kde_subclass4(x1)
y4 = kde4(x1)
y_expected = [0.06292987, 0.06346938, 0.05860291, 0.08657652, 0.07904017]
assert_array_almost_equal(y_expected, y4, decimal=6)
kde5 = kde
kde5.covariance_factor = lambda: kde.factor
kde5._compute_covariance()
y5 = kde5(xs)
assert_array_almost_equal_nulp(ys, y5, nulp=10)
def test_gaussian_kde_covariance_caching():
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=5)
y_expected = [0.02463386, 0.04689208, 0.05395444, 0.05337754, 0.01664475]
kde = stats.gaussian_kde(x1)
kde.set_bandwidth(bw_method=0.5)
kde.set_bandwidth(bw_method='scott')
y2 = kde(xs)
assert_array_almost_equal(y_expected, y2, decimal=7)
def test_gaussian_kde_monkeypatch():
x1 = np.array([-7, -5, 1, 4, 5], dtype=float)
xs = np.linspace(-10, 10, num=50)
kde = stats.gaussian_kde(x1)
kde.covariance_factor = kde.silverman_factor
kde._compute_covariance()
y1 = kde(xs)
# The new saner version.
kde2 = stats.gaussian_kde(x1, bw_method='silverman')
y2 = kde2(xs)
assert_array_almost_equal_nulp(y1, y2, nulp=10)
def test_kde_integer_input():
x1 = np.arange(5)
kde = stats.gaussian_kde(x1)
y_expected = [0.13480721, 0.18222869, 0.19514935, 0.18222869, 0.13480721]
assert_array_almost_equal(kde(x1), y_expected, decimal=6)
def test_pdf_logpdf():
np.random.seed(1)
n_basesample = 50
xn = np.random.randn(n_basesample)
# Default
gkde = stats.gaussian_kde(xn)
xs = np.linspace(-15, 12, 25)
pdf = gkde.evaluate(xs)
pdf2 = gkde.pdf(xs)
assert_almost_equal(pdf, pdf2, decimal=12)
logpdf = np.log(pdf)
logpdf2 = gkde.logpdf(xs)
assert_almost_equal(logpdf, logpdf2, decimal=12)
if __name__ == "__main__":
run_module_suite()
| true | true |
1c3d7c8a12d2d18f3c9da293de6b538fccd030f0 | 7,892 | py | Python | nndet/evaluator/detection/hist.py | joeranbosma/nnDetection | 2ebbf1cdc8a8794c73e325f06fea50632c78ae8c | [
"BSD-3-Clause"
] | 242 | 2021-05-17T12:31:39.000Z | 2022-03-31T11:51:29.000Z | nndet/evaluator/detection/hist.py | joeranbosma/nnDetection | 2ebbf1cdc8a8794c73e325f06fea50632c78ae8c | [
"BSD-3-Clause"
] | 59 | 2021-06-02T07:32:10.000Z | 2022-03-31T18:45:52.000Z | nndet/evaluator/detection/hist.py | joeranbosma/nnDetection | 2ebbf1cdc8a8794c73e325f06fea50632c78ae8c | [
"BSD-3-Clause"
] | 38 | 2021-05-31T14:01:37.000Z | 2022-03-21T08:24:40.000Z | """
Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import numpy as np
from pathlib import Path
from loguru import logger
from typing import Sequence, List, Dict, Any, Tuple
import matplotlib.pyplot as plt
from nndet.evaluator import DetectionMetric
class PredictionHistogram(DetectionMetric):
def __init__(self,
classes: Sequence[str], save_dir: Path,
iou_thresholds: Sequence[float] = (0.1, 0.5),
bins: int = 50):
"""
Class to compute prediction histograms. (Note: this class does not
provide any scalar metrics)
Args:
classes: name of each class (index needs to correspond to predicted class indices!)
save_dir: directory where histograms are saved to
iou_thresholds: IoU thresholds for which FROC is evaluated
bins: number of bins of histogram
"""
self.classes = classes
self.save_dir = save_dir
self.iou_thresholds = iou_thresholds
self.bins = bins
def get_iou_thresholds(self) -> Sequence[float]:
"""
Return IoU thresholds needed for this metric in an numpy array
Returns:
Sequence[float]: IoU thresholds [M], M is the number of thresholds
"""
return self.iou_thresholds
def compute(self, results_list: List[Dict[int, Dict[str, np.ndarray]]]) -> Tuple[
Dict[str, float], Dict[str, Dict[str, Any]]]:
"""
Plot class independent and per class histograms. For more info see
`method``plot_hist`
Args:
Dict: results over dataset
"""
self.plot_hist(results_list=results_list)
for cls_idx, cls_str in enumerate(self.classes):
# filter current class from list of results and put them into a dict with a single entry
results_by_cls = [{0: r[cls_idx]} for r in results_list if cls_idx in r if cls_idx in r]
self.plot_hist(results_by_cls, title_prefix=f"cl_{cls_str}_")
return {}, {}
def plot_hist(self, results_list: List[Dict[int, Dict[str, np.ndarray]]],
title_prefix: str = "") -> Tuple[
Dict[str, float], Dict[str, Dict[str, Any]]]:
"""
Compute prediction histograms for multiple IoU values
Args:
results_list (List[Dict[int, Dict[str, np.ndarray]]]): list with result s per image (in list)
per category (dict). Inner Dict contains multiple results obtained by :func:`box_matching_batch`.
`dtMatches`: matched detections [T, G], where T = number of thresholds, G = number of ground truth
`gtMatches`: matched ground truth boxes [T, D], where T = number of thresholds,
D = number of detections
`dtScores`: prediction scores [D] detection scores
`gtIgnore`: ground truth boxes which should be ignored [G] indicate whether ground truth
should be ignored
`dtIgnore`: detections which should be ignored [T, D], indicate which detections should be ignored
title_prefix: prefix for title of histogram plot
Returns:
Dict: empty
Dict[Dict[str, Any]]: histogram informations
`{IoU Value}`:
`tp_hist` (np.ndarray): histogram if true positives; false negatives @ score=0 [:attr:`self.bins`]
`fp_hist` (np.ndarray): false positive histogram [:attr:`self.bins`]
`true_positives` (int): number of true positives according to matching
`false_positives` (int): number of false_positives according to matching
`false_negatives` (int): number of false_negatives according to matching
"""
num_images = len(results_list)
results = [_r for r in results_list for _r in r.values()]
if len(results) == 0:
logger.warning(f"WARNING, no results found for froc computation")
return {}, {}
# r['dtMatches'] [T, R], where R = sum(all detections)
dt_matches = np.concatenate([r['dtMatches'] for r in results], axis=1)
dt_ignores = np.concatenate([r['dtIgnore'] for r in results], axis=1)
dt_scores = np.concatenate([r['dtScores'] for r in results])
gt_ignore = np.concatenate([r['gtIgnore'] for r in results])
self.check_number_of_iou(dt_matches, dt_ignores)
num_gt = np.count_nonzero(gt_ignore == 0) # number of ground truth boxes (non ignored)
if num_gt == 0:
logger.error("No ground truth found! Returning nothing.")
return {}, {}
for iou_idx, iou_val in enumerate(self.iou_thresholds):
# filter scores with ignores detections
_scores = dt_scores[np.logical_not(dt_ignores[iou_idx])]
assert len(_scores) == len(dt_matches[iou_idx])
_ = self.compute_histogram_one_iou(\
dt_matches[iou_idx], _scores, num_images, num_gt, iou_val, title_prefix)
return {}, {}
def compute_histogram_one_iou(self, dt_matches: np.ndarray, dt_scores: np.ndarray,
num_images: int, num_gt: int, iou: float,
title_prefix: str):
"""
Plot prediction histogram
Args:
dt_matches (np.ndarray): binary array indicating which bounding
boxes have a large enough overlap with gt;
[R] where R is the number of predictions
dt_scores (np.ndarray): prediction score for each bounding box;
[R] where R is the number of predictions
num_images (int): number of images
num_gt (int): number of ground truth bounding boxes
iou: IoU values which is currently evaluated
title_prefix: prefix for title of histogram plot
"""
num_matched = np.sum(dt_matches)
false_negatives = num_gt - num_matched # false negatives
true_positives = np.sum(dt_matches)
false_positives = np.sum(dt_matches == 0)
_dt_matches = np.concatenate([dt_matches, [1] * int(false_negatives)])
_dt_scores = np.concatenate([dt_scores, [0] * int(false_negatives)])
plt.figure()
plt.yscale('log')
if 0 in dt_matches:
plt.hist(_dt_scores[_dt_matches == 0], bins=self.bins, range=(0., 1.),
alpha=0.3, color='g', label='false pos.')
if 1 in dt_matches:
plt.hist(_dt_scores[_dt_matches == 1], bins=self.bins, range=(0., 1.),
alpha=0.3, color='b', label='true pos. (false neg. @ score=0)')
plt.legend()
title = title_prefix + (f"tp:{true_positives} fp:{false_positives} "
f"fn:{false_negatives} pos:{true_positives+false_negatives}")
plt.title(title)
plt.xlabel('confidence score')
plt.ylabel('log n')
if self.save_dir is not None:
save_path = self.save_dir / (f"{title_prefix}pred_hist_IoU@{iou}".replace(".", "_") + ".png")
logger.info(f"Saving {save_path}")
plt.savefig(save_path)
plt.close()
return None
| 44.587571 | 118 | 0.61556 |
import time
import numpy as np
from pathlib import Path
from loguru import logger
from typing import Sequence, List, Dict, Any, Tuple
import matplotlib.pyplot as plt
from nndet.evaluator import DetectionMetric
class PredictionHistogram(DetectionMetric):
def __init__(self,
classes: Sequence[str], save_dir: Path,
iou_thresholds: Sequence[float] = (0.1, 0.5),
bins: int = 50):
self.classes = classes
self.save_dir = save_dir
self.iou_thresholds = iou_thresholds
self.bins = bins
def get_iou_thresholds(self) -> Sequence[float]:
return self.iou_thresholds
def compute(self, results_list: List[Dict[int, Dict[str, np.ndarray]]]) -> Tuple[
Dict[str, float], Dict[str, Dict[str, Any]]]:
self.plot_hist(results_list=results_list)
for cls_idx, cls_str in enumerate(self.classes):
results_by_cls = [{0: r[cls_idx]} for r in results_list if cls_idx in r if cls_idx in r]
self.plot_hist(results_by_cls, title_prefix=f"cl_{cls_str}_")
return {}, {}
def plot_hist(self, results_list: List[Dict[int, Dict[str, np.ndarray]]],
title_prefix: str = "") -> Tuple[
Dict[str, float], Dict[str, Dict[str, Any]]]:
num_images = len(results_list)
results = [_r for r in results_list for _r in r.values()]
if len(results) == 0:
logger.warning(f"WARNING, no results found for froc computation")
return {}, {}
dt_matches = np.concatenate([r['dtMatches'] for r in results], axis=1)
dt_ignores = np.concatenate([r['dtIgnore'] for r in results], axis=1)
dt_scores = np.concatenate([r['dtScores'] for r in results])
gt_ignore = np.concatenate([r['gtIgnore'] for r in results])
self.check_number_of_iou(dt_matches, dt_ignores)
num_gt = np.count_nonzero(gt_ignore == 0)
if num_gt == 0:
logger.error("No ground truth found! Returning nothing.")
return {}, {}
for iou_idx, iou_val in enumerate(self.iou_thresholds):
_scores = dt_scores[np.logical_not(dt_ignores[iou_idx])]
assert len(_scores) == len(dt_matches[iou_idx])
_ = self.compute_histogram_one_iou(\
dt_matches[iou_idx], _scores, num_images, num_gt, iou_val, title_prefix)
return {}, {}
def compute_histogram_one_iou(self, dt_matches: np.ndarray, dt_scores: np.ndarray,
num_images: int, num_gt: int, iou: float,
title_prefix: str):
num_matched = np.sum(dt_matches)
false_negatives = num_gt - num_matched
true_positives = np.sum(dt_matches)
false_positives = np.sum(dt_matches == 0)
_dt_matches = np.concatenate([dt_matches, [1] * int(false_negatives)])
_dt_scores = np.concatenate([dt_scores, [0] * int(false_negatives)])
plt.figure()
plt.yscale('log')
if 0 in dt_matches:
plt.hist(_dt_scores[_dt_matches == 0], bins=self.bins, range=(0., 1.),
alpha=0.3, color='g', label='false pos.')
if 1 in dt_matches:
plt.hist(_dt_scores[_dt_matches == 1], bins=self.bins, range=(0., 1.),
alpha=0.3, color='b', label='true pos. (false neg. @ score=0)')
plt.legend()
title = title_prefix + (f"tp:{true_positives} fp:{false_positives} "
f"fn:{false_negatives} pos:{true_positives+false_negatives}")
plt.title(title)
plt.xlabel('confidence score')
plt.ylabel('log n')
if self.save_dir is not None:
save_path = self.save_dir / (f"{title_prefix}pred_hist_IoU@{iou}".replace(".", "_") + ".png")
logger.info(f"Saving {save_path}")
plt.savefig(save_path)
plt.close()
return None
| true | true |
1c3d7dbe44cddf00d83f5274678a534170a18368 | 2,184 | py | Python | release/src-rt-6.x.4708/router/samba3/source4/scripting/python/samba/tests/core.py | zaion520/ATtomato | 4d48bb79f8d147f89a568cf18da9e0edc41f93fb | [
"FSFAP"
] | 2 | 2019-01-13T09:16:31.000Z | 2019-02-15T03:30:28.000Z | release/src-rt-6.x.4708/router/samba3/source4/scripting/python/samba/tests/core.py | zaion520/ATtomato | 4d48bb79f8d147f89a568cf18da9e0edc41f93fb | [
"FSFAP"
] | null | null | null | release/src-rt-6.x.4708/router/samba3/source4/scripting/python/samba/tests/core.py | zaion520/ATtomato | 4d48bb79f8d147f89a568cf18da9e0edc41f93fb | [
"FSFAP"
] | 2 | 2020-03-08T01:58:25.000Z | 2020-12-20T10:34:54.000Z | #!/usr/bin/env python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Samba Python tests."""
import ldb
import os
import samba
from samba.tests import TestCase, TestCaseInTempDir
class SubstituteVarTestCase(TestCase):
def test_empty(self):
self.assertEquals("", samba.substitute_var("", {}))
def test_nothing(self):
self.assertEquals("foo bar",
samba.substitute_var("foo bar", {"bar": "bla"}))
def test_replace(self):
self.assertEquals("foo bla",
samba.substitute_var("foo ${bar}", {"bar": "bla"}))
def test_broken(self):
self.assertEquals("foo ${bdkjfhsdkfh sdkfh ",
samba.substitute_var("foo ${bdkjfhsdkfh sdkfh ", {"bar": "bla"}))
def test_unknown_var(self):
self.assertEquals("foo ${bla} gsff",
samba.substitute_var("foo ${bla} gsff", {"bar": "bla"}))
def test_check_all_substituted(self):
samba.check_all_substituted("nothing to see here")
self.assertRaises(Exception, samba.check_all_substituted,
"Not subsituted: ${FOOBAR}")
class LdbExtensionTests(TestCaseInTempDir):
def test_searchone(self):
path = self.tempdir + "/searchone.ldb"
l = samba.Ldb(path)
try:
l.add({"dn": "foo=dc", "bar": "bla"})
self.assertEquals("bla",
l.searchone(basedn=ldb.Dn(l, "foo=dc"), attribute="bar"))
finally:
del l
os.unlink(path)
| 33.090909 | 77 | 0.649267 |
import ldb
import os
import samba
from samba.tests import TestCase, TestCaseInTempDir
class SubstituteVarTestCase(TestCase):
def test_empty(self):
self.assertEquals("", samba.substitute_var("", {}))
def test_nothing(self):
self.assertEquals("foo bar",
samba.substitute_var("foo bar", {"bar": "bla"}))
def test_replace(self):
self.assertEquals("foo bla",
samba.substitute_var("foo ${bar}", {"bar": "bla"}))
def test_broken(self):
self.assertEquals("foo ${bdkjfhsdkfh sdkfh ",
samba.substitute_var("foo ${bdkjfhsdkfh sdkfh ", {"bar": "bla"}))
def test_unknown_var(self):
self.assertEquals("foo ${bla} gsff",
samba.substitute_var("foo ${bla} gsff", {"bar": "bla"}))
def test_check_all_substituted(self):
samba.check_all_substituted("nothing to see here")
self.assertRaises(Exception, samba.check_all_substituted,
"Not subsituted: ${FOOBAR}")
class LdbExtensionTests(TestCaseInTempDir):
def test_searchone(self):
path = self.tempdir + "/searchone.ldb"
l = samba.Ldb(path)
try:
l.add({"dn": "foo=dc", "bar": "bla"})
self.assertEquals("bla",
l.searchone(basedn=ldb.Dn(l, "foo=dc"), attribute="bar"))
finally:
del l
os.unlink(path)
| true | true |
1c3d7e614d1a0716eb18cfab5bbae718b8334f24 | 1,557 | py | Python | grouper/fe/handlers/permissions_revoke_tag.py | bonniech3n/merou | 47d9de906686fd5b930a49299d3ffbcc0673ae8a | [
"Apache-2.0"
] | null | null | null | grouper/fe/handlers/permissions_revoke_tag.py | bonniech3n/merou | 47d9de906686fd5b930a49299d3ffbcc0673ae8a | [
"Apache-2.0"
] | null | null | null | grouper/fe/handlers/permissions_revoke_tag.py | bonniech3n/merou | 47d9de906686fd5b930a49299d3ffbcc0673ae8a | [
"Apache-2.0"
] | null | null | null | from grouper.constants import TAG_EDIT
from grouper.fe.util import GrouperHandler
from grouper.models.audit_log import AuditLog
from grouper.models.counter import Counter
from grouper.models.tag_permission_map import TagPermissionMap
from grouper.user_permissions import user_has_permission
class PermissionsRevokeTag(GrouperHandler):
def get(self, name=None, mapping_id=None):
mapping = TagPermissionMap.get(self.session, id=mapping_id)
if not mapping:
return self.notfound()
if not user_has_permission(self.session, self.current_user, TAG_EDIT, mapping.tag.name):
return self.forbidden()
self.render("permission-revoke-tag.html", mapping=mapping)
def post(self, name=None, mapping_id=None):
mapping = TagPermissionMap.get(self.session, id=mapping_id)
if not mapping:
return self.notfound()
if not user_has_permission(self.session, self.current_user, TAG_EDIT, mapping.tag.name):
return self.forbidden()
permission = mapping.permission
tag = mapping.tag
mapping.delete(self.session)
Counter.incr(self.session, "updates")
self.session.commit()
AuditLog.log(
self.session,
self.current_user.id,
"revoke_tag_permission",
"Revoked permission with argument: {}".format(mapping.argument),
on_tag_id=tag.id,
on_permission_id=permission.id,
)
return self.redirect("/tags/{}?refresh=yes".format(tag.name))
| 33.847826 | 96 | 0.679512 | from grouper.constants import TAG_EDIT
from grouper.fe.util import GrouperHandler
from grouper.models.audit_log import AuditLog
from grouper.models.counter import Counter
from grouper.models.tag_permission_map import TagPermissionMap
from grouper.user_permissions import user_has_permission
class PermissionsRevokeTag(GrouperHandler):
def get(self, name=None, mapping_id=None):
mapping = TagPermissionMap.get(self.session, id=mapping_id)
if not mapping:
return self.notfound()
if not user_has_permission(self.session, self.current_user, TAG_EDIT, mapping.tag.name):
return self.forbidden()
self.render("permission-revoke-tag.html", mapping=mapping)
def post(self, name=None, mapping_id=None):
mapping = TagPermissionMap.get(self.session, id=mapping_id)
if not mapping:
return self.notfound()
if not user_has_permission(self.session, self.current_user, TAG_EDIT, mapping.tag.name):
return self.forbidden()
permission = mapping.permission
tag = mapping.tag
mapping.delete(self.session)
Counter.incr(self.session, "updates")
self.session.commit()
AuditLog.log(
self.session,
self.current_user.id,
"revoke_tag_permission",
"Revoked permission with argument: {}".format(mapping.argument),
on_tag_id=tag.id,
on_permission_id=permission.id,
)
return self.redirect("/tags/{}?refresh=yes".format(tag.name))
| true | true |
1c3d7f223c71439d9fe961ae326d8157074e19b2 | 6,719 | py | Python | workflows/cloudify_system_workflows/tests/test_plugins.py | ilan-WS/cloudify-manager | 510d8a277c848db351f38fc5b264806b2cb36d0b | [
"Apache-2.0"
] | 124 | 2015-01-22T22:28:37.000Z | 2022-02-26T23:12:06.000Z | workflows/cloudify_system_workflows/tests/test_plugins.py | cloudify-cosmo/cloudify-manager | 4a3f44ceb49d449bc5ebc8766b1c7b9c174ff972 | [
"Apache-2.0"
] | 345 | 2015-01-08T15:49:40.000Z | 2022-03-29T08:33:00.000Z | workflows/cloudify_system_workflows/tests/test_plugins.py | ilan-WS/cloudify-manager | 510d8a277c848db351f38fc5b264806b2cb36d0b | [
"Apache-2.0"
] | 77 | 2015-01-07T14:04:35.000Z | 2022-03-07T22:46:00.000Z | ########
# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import unittest
from mock import patch, MagicMock, PropertyMock, call
from cloudify.models_states import ExecutionState
from cloudify_system_workflows.plugins import (update as update_func,
install as install_func,
uninstall as uninstall_func)
class TestPlugins(unittest.TestCase):
def setUp(self):
operate_on_plugin_patcher = patch(
'cloudify_system_workflows.plugins._operate_on_plugin')
self.mock_operate_on_plugin = operate_on_plugin_patcher.start()
self.mock_operate_on_plugin.side_effect = lambda *_: _
self.addCleanup(operate_on_plugin_patcher.stop)
rest_client_patcher = patch(
'cloudify_system_workflows.plugins.get_rest_client')
self.mock_rest_client = MagicMock()
rest_client_patcher.start().return_value = self.mock_rest_client
self.addCleanup(rest_client_patcher.stop)
def test_uninstall_returns_execution_result(self):
return_value = list(uninstall_func(None, {}, ignores_this=None))
desired_call_args = [None, {}, 'uninstall']
self.mock_operate_on_plugin.assert_called_once_with(*desired_call_args)
self.assertListEqual(desired_call_args, return_value)
def test_install_returns_execution_result(self):
return_value = list(install_func(None, {}, ignores_this=None))
desired_call_args = [None, {}, 'install']
self.mock_operate_on_plugin.assert_called_once_with(*desired_call_args)
self.assertListEqual(desired_call_args, return_value)
def test_install_deletes_plugin_upon_failure(self):
class _Exception(Exception):
pass
def raise_custom_exception(*_):
raise _Exception()
plugin = {'id': 'some_id'}
self.mock_operate_on_plugin.side_effect = raise_custom_exception
mock_ctx = MagicMock()
with self.assertRaises(_Exception):
install_func(mock_ctx, plugin, ignores_this=None)
desired_operate_on_call_args = [mock_ctx, plugin, 'install']
self.mock_operate_on_plugin.assert_called_once_with(
*desired_operate_on_call_args)
self.mock_rest_client.plugins.delete.assert_called_once_with(
plugin_id=plugin['id'], force=True)
class TestPluginsUpdate(unittest.TestCase):
def setUp(self):
wait_for_patcher = patch('cloudify_system_workflows.plugins.wait_for')
wait_for_patcher.start()
self.addCleanup(wait_for_patcher.stop)
get_rest_client_patcher = patch('cloudify_system_workflows.plugins'
'.get_rest_client')
get_rest_client_mock = get_rest_client_patcher.start()
self.addCleanup(get_rest_client_patcher.stop)
self.mock_rest_client = MagicMock()
get_rest_client_mock.return_value = self.mock_rest_client
self.deployment_update_mock = MagicMock(
side_effect=self._update_with_existing_blueprint_mock)
self.mock_rest_client.deployment_updates \
.update_with_existing_blueprint \
.side_effect = self.deployment_update_mock
@staticmethod
def _update_with_existing_blueprint_mock(deployment_id, *_, **__):
return PropertyMock(execution_id=deployment_id)
def test_plugins_update_continues_when_one_deployment_update_fails(self):
def get_execution_mock(execution_id):
"""
:return: a mock of an execution object where its status is
TERMINATED if the execution shouldn't fail, and FAILED/CANCELLED
if it should.
"""
if execution_id == failed_execution_id:
return PropertyMock(
status=execution_status['curr_exec_status'])
return PropertyMock(status=ExecutionState.TERMINATED)
def _assert_update_func():
update_func(MagicMock(),
'my_update_id', None, dep_ids, False, False, True)
should_call_these = [call(deployment_id=i,
blueprint_id=None,
skip_install=True,
skip_uninstall=True,
skip_reinstall=True,
force=False,
auto_correct_types=False,
reevaluate_active_statuses=True)
for i in dep_ids]
self.deployment_update_mock.assert_has_calls(should_call_these)
execution_status = {'curr_exec_status': ExecutionState.FAILED}
dep_ids = list(range(5))
failed_execution_id = 3
self.mock_rest_client.executions.get \
.side_effect = get_execution_mock
_assert_update_func()
execution_status['curr_exec_status'] = ExecutionState.CANCELLED
_assert_update_func()
def test_doesnt_stop_updating(self):
finalize_update_mock = MagicMock()
dep_ids = list(range(5))
self.mock_rest_client.plugins_update.finalize_plugins_update \
= finalize_update_mock
self.mock_rest_client.executions.get \
.return_value = PropertyMock(status=ExecutionState.TERMINATED)
update_func(MagicMock(),
'12345678', None, dep_ids, False, False, False)
should_call_these = [call(deployment_id=i,
blueprint_id=None,
skip_install=True,
skip_uninstall=True,
skip_reinstall=True,
force=False,
auto_correct_types=False,
reevaluate_active_statuses=False)
for i in range(len(dep_ids))]
self.deployment_update_mock.assert_has_calls(should_call_these)
finalize_update_mock.assert_called_with('12345678')
| 44.496689 | 79 | 0.641316 | st
from mock import patch, MagicMock, PropertyMock, call
from cloudify.models_states import ExecutionState
from cloudify_system_workflows.plugins import (update as update_func,
install as install_func,
uninstall as uninstall_func)
class TestPlugins(unittest.TestCase):
def setUp(self):
operate_on_plugin_patcher = patch(
'cloudify_system_workflows.plugins._operate_on_plugin')
self.mock_operate_on_plugin = operate_on_plugin_patcher.start()
self.mock_operate_on_plugin.side_effect = lambda *_: _
self.addCleanup(operate_on_plugin_patcher.stop)
rest_client_patcher = patch(
'cloudify_system_workflows.plugins.get_rest_client')
self.mock_rest_client = MagicMock()
rest_client_patcher.start().return_value = self.mock_rest_client
self.addCleanup(rest_client_patcher.stop)
def test_uninstall_returns_execution_result(self):
return_value = list(uninstall_func(None, {}, ignores_this=None))
desired_call_args = [None, {}, 'uninstall']
self.mock_operate_on_plugin.assert_called_once_with(*desired_call_args)
self.assertListEqual(desired_call_args, return_value)
def test_install_returns_execution_result(self):
return_value = list(install_func(None, {}, ignores_this=None))
desired_call_args = [None, {}, 'install']
self.mock_operate_on_plugin.assert_called_once_with(*desired_call_args)
self.assertListEqual(desired_call_args, return_value)
def test_install_deletes_plugin_upon_failure(self):
class _Exception(Exception):
pass
def raise_custom_exception(*_):
raise _Exception()
plugin = {'id': 'some_id'}
self.mock_operate_on_plugin.side_effect = raise_custom_exception
mock_ctx = MagicMock()
with self.assertRaises(_Exception):
install_func(mock_ctx, plugin, ignores_this=None)
desired_operate_on_call_args = [mock_ctx, plugin, 'install']
self.mock_operate_on_plugin.assert_called_once_with(
*desired_operate_on_call_args)
self.mock_rest_client.plugins.delete.assert_called_once_with(
plugin_id=plugin['id'], force=True)
class TestPluginsUpdate(unittest.TestCase):
def setUp(self):
wait_for_patcher = patch('cloudify_system_workflows.plugins.wait_for')
wait_for_patcher.start()
self.addCleanup(wait_for_patcher.stop)
get_rest_client_patcher = patch('cloudify_system_workflows.plugins'
'.get_rest_client')
get_rest_client_mock = get_rest_client_patcher.start()
self.addCleanup(get_rest_client_patcher.stop)
self.mock_rest_client = MagicMock()
get_rest_client_mock.return_value = self.mock_rest_client
self.deployment_update_mock = MagicMock(
side_effect=self._update_with_existing_blueprint_mock)
self.mock_rest_client.deployment_updates \
.update_with_existing_blueprint \
.side_effect = self.deployment_update_mock
@staticmethod
def _update_with_existing_blueprint_mock(deployment_id, *_, **__):
return PropertyMock(execution_id=deployment_id)
def test_plugins_update_continues_when_one_deployment_update_fails(self):
def get_execution_mock(execution_id):
if execution_id == failed_execution_id:
return PropertyMock(
status=execution_status['curr_exec_status'])
return PropertyMock(status=ExecutionState.TERMINATED)
def _assert_update_func():
update_func(MagicMock(),
'my_update_id', None, dep_ids, False, False, True)
should_call_these = [call(deployment_id=i,
blueprint_id=None,
skip_install=True,
skip_uninstall=True,
skip_reinstall=True,
force=False,
auto_correct_types=False,
reevaluate_active_statuses=True)
for i in dep_ids]
self.deployment_update_mock.assert_has_calls(should_call_these)
execution_status = {'curr_exec_status': ExecutionState.FAILED}
dep_ids = list(range(5))
failed_execution_id = 3
self.mock_rest_client.executions.get \
.side_effect = get_execution_mock
_assert_update_func()
execution_status['curr_exec_status'] = ExecutionState.CANCELLED
_assert_update_func()
def test_doesnt_stop_updating(self):
finalize_update_mock = MagicMock()
dep_ids = list(range(5))
self.mock_rest_client.plugins_update.finalize_plugins_update \
= finalize_update_mock
self.mock_rest_client.executions.get \
.return_value = PropertyMock(status=ExecutionState.TERMINATED)
update_func(MagicMock(),
'12345678', None, dep_ids, False, False, False)
should_call_these = [call(deployment_id=i,
blueprint_id=None,
skip_install=True,
skip_uninstall=True,
skip_reinstall=True,
force=False,
auto_correct_types=False,
reevaluate_active_statuses=False)
for i in range(len(dep_ids))]
self.deployment_update_mock.assert_has_calls(should_call_these)
finalize_update_mock.assert_called_with('12345678')
| true | true |
1c3d805d80507f403a3e34b2877be6333830ac44 | 1,560 | py | Python | twitoff/twitter.py | Frank-Howd/Twitoff-23 | 949b5a12f0f6f7bb8c9eb03e9948fd42a339e271 | [
"MIT"
] | null | null | null | twitoff/twitter.py | Frank-Howd/Twitoff-23 | 949b5a12f0f6f7bb8c9eb03e9948fd42a339e271 | [
"MIT"
] | null | null | null | twitoff/twitter.py | Frank-Howd/Twitoff-23 | 949b5a12f0f6f7bb8c9eb03e9948fd42a339e271 | [
"MIT"
] | null | null | null | """Handles connection to Twitter API using Tweepy"""
from os import getenv
import tweepy
import spacy
from .models import DB, Tweet, User
TWITTER_API_KEY = getenv("TWITTER_API_KEY")
TWITTER_API_KEY_SECRET = getenv("TWITTER_API_KEY_SECRET")
TWITTER_AUTH = tweepy.OAuthHandler(TWITTER_API_KEY, TWITTER_API_KEY_SECRET)
TWITTER = tweepy.API(TWITTER_AUTH)
nlp = spacy.load("my_model/")
def vectorize_tweet(tweet_text):
return nlp(tweet_text).vector
def add_or_update_user(username):
"""Takes username and pulls user from Twitter API"""
try:
twitter_user = TWITTER.get_user(username)
db_user = (User.query.get(twitter_user.id)) or User(
id=twitter_user.id, name=username)
DB.session.add(db_user)
# What does since ID do?
tweets = twitter_user.timeline(
count=200, exclude_replies=True, include_rts=False,
tweet_mode="extended", since_id=db_user.newest_tweet_id
)
if tweets:
db_user.newest_tweet_id = tweets[0].id
for tweet in tweets:
tweet_embeddings = vectorize_tweet(tweet.full_text)
db_tweet = Tweet(
id=tweet.id, text=tweet.full_text[:300], vect=tweet_embeddings)
db_user.tweets.append(db_tweet)
DB.session.add(db_tweet)
except Exception as e:
print("Error Processing {}: {}".format(username, e))
raise e
else:
DB.session.commit()
def update_all_users():
for user in User.query.all():
add_or_update_user(user.name)
| 27.857143 | 79 | 0.667949 |
from os import getenv
import tweepy
import spacy
from .models import DB, Tweet, User
TWITTER_API_KEY = getenv("TWITTER_API_KEY")
TWITTER_API_KEY_SECRET = getenv("TWITTER_API_KEY_SECRET")
TWITTER_AUTH = tweepy.OAuthHandler(TWITTER_API_KEY, TWITTER_API_KEY_SECRET)
TWITTER = tweepy.API(TWITTER_AUTH)
nlp = spacy.load("my_model/")
def vectorize_tweet(tweet_text):
return nlp(tweet_text).vector
def add_or_update_user(username):
try:
twitter_user = TWITTER.get_user(username)
db_user = (User.query.get(twitter_user.id)) or User(
id=twitter_user.id, name=username)
DB.session.add(db_user)
tweets = twitter_user.timeline(
count=200, exclude_replies=True, include_rts=False,
tweet_mode="extended", since_id=db_user.newest_tweet_id
)
if tweets:
db_user.newest_tweet_id = tweets[0].id
for tweet in tweets:
tweet_embeddings = vectorize_tweet(tweet.full_text)
db_tweet = Tweet(
id=tweet.id, text=tweet.full_text[:300], vect=tweet_embeddings)
db_user.tweets.append(db_tweet)
DB.session.add(db_tweet)
except Exception as e:
print("Error Processing {}: {}".format(username, e))
raise e
else:
DB.session.commit()
def update_all_users():
for user in User.query.all():
add_or_update_user(user.name)
| true | true |
1c3d82367f6c3f67004cf7191d6a9c1057e8095a | 8,073 | py | Python | ptah/tests/test_populate.py | timgates42/ptah | 47594cef8e80397a545bdc9e166eafcac94c72d6 | [
"BSD-3-Clause"
] | 13 | 2015-03-18T16:06:50.000Z | 2021-04-27T19:14:35.000Z | ptah/tests/test_populate.py | timgates42/ptah | 47594cef8e80397a545bdc9e166eafcac94c72d6 | [
"BSD-3-Clause"
] | null | null | null | ptah/tests/test_populate.py | timgates42/ptah | 47594cef8e80397a545bdc9e166eafcac94c72d6 | [
"BSD-3-Clause"
] | 6 | 2015-01-07T11:17:32.000Z | 2020-04-02T11:35:03.000Z | import sqlalchemy as sqla
import ptah
from ptah.populate import POPULATE_ID, Populate
from pyramid import testing
from pyramid.exceptions import ConfigurationConflictError
class TestPopulateDirective(ptah.PtahTestCase):
_init_ptah = False
_auto_commit = False
def test_step_registration(self):
import ptah
@ptah.populate('step', title='Step', requires=['test-dep'])
def step(registry):
""" """
self.init_ptah()
data = ptah.get_cfg_storage(POPULATE_ID)
self.assertIn('step', data)
self.assertIs(data['step']['factory'], step)
self.assertEqual(data['step']['title'], 'Step')
self.assertTrue(data['step']['active'])
self.assertEqual(data['step']['requires'], ['test-dep'])
def test_step_pyramid_registration(self):
def step(registry):
""" """
config = testing.setUp()
config.include('ptah')
config.ptah_populate_step('step', factory=step,
title='Test', active=False)
config.commit()
data = config.get_cfg_storage(POPULATE_ID)
self.assertIn('step', data)
self.assertIs(data['step']['factory'], step)
self.assertEqual(data['step']['title'], 'Test')
self.assertFalse(data['step']['active'])
self.assertEqual(data['step']['requires'], ())
def test_step_registration_conflicts(self):
import ptah
@ptah.populate('step')
@ptah.populate('step')
def step(registry):
""" """
self.assertRaises(ConfigurationConflictError, self.init_ptah)
class TestPyramidDrective(ptah.PtahTestCase):
def test_directive_execute(self):
data = [False, False]
def step1(registry):
data[0] = True
def step2(registry): # pragma: no cover
data[0] = True
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
self.config.ptah_populate()
self.assertTrue(data[0])
self.assertFalse(data[1])
def test_directive_execute_populate_mode(self):
data = [False]
def step(registry): # pragma: no cover
data[0] = True
self.config.ptah_populate_step(
'custom-step', title='Custom step',
active=True, factory=step)
import ptah
ptah.POPULATE = True
self.config.ptah_populate()
ptah.POPULATE = False
self.assertFalse(data[0])
class TestListSteps(ptah.PtahTestCase):
def test_list_simple(self):
def step1(registry):
""" """
def step2(registry):
""" """
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', steps)
self.assertNotIn('custom-step2', steps)
self.assertEqual(steps['custom-step1']['factory'], step1)
def test_list_all(self):
def step1(registry):
""" """
def step2(registry):
""" """
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps(all=True)
steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', steps)
self.assertIn('custom-step2', steps)
self.assertEqual(steps['custom-step1']['factory'], step1)
self.assertEqual(steps['custom-step2']['factory'], step2)
def test_list_explicit(self):
def step1(registry):
""" """
def step2(registry):
""" """
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps(('custom-step2',))
steps = dict((s['name'], s) for s in steps)
self.assertNotIn('custom-step1', steps)
self.assertIn('custom-step2', steps)
def test_list_requires_inactive(self):
def step1(registry):
""" """
def step2(registry):
""" """
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, requires=('custom-step2',), factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
d_steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', d_steps)
self.assertIn('custom-step2', d_steps)
def test_list_requires_order(self):
def step1(registry):
""" """
def step2(registry):
""" """
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, requires=('custom-step2',), factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
l_steps = [s['name'] for s in steps]
self.assertTrue(l_steps.index('custom-step2') <
l_steps.index('custom-step1'))
def test_list_once(self):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1', requires=('custom-step2',))
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2')
self.config.ptah_populate_step(
'custom-step3', title='Custom step 3', requires=('custom-step2',))
steps = Populate(self.registry).list_steps()
count = 0
for step in steps:
if step['name'] == 'custom-step2':
count += 1
self.assertEqual(count, 1)
def test_list_unknown(self):
self.assertRaises(
RuntimeError,
Populate(self.registry).list_steps, ('unknown',))
def test_list_unknown_dependency(self):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1', requires=('unknown',))
self.assertRaises(
RuntimeError, Populate(self.registry).list_steps)
class TestCreateDbSchema(ptah.PtahTestCase):
def test_event(self):
from ptah.populate import create_db_schema
data = [False]
def event_handler(ev):
data[0] = True
self.registry.registerHandler(
event_handler, (ptah.events.BeforeCreateDbSchema,))
create_db_schema(self.registry)
self.assertTrue(data[0])
def test_skip_tables(self):
from ptah.populate import create_db_schema
base = ptah.get_base()
class test_populate_TestTable(base):
__tablename__ = 'test_populate_TestTable'
id = sqla.Column('id', sqla.Integer, primary_key=True)
cfg = ptah.get_settings(ptah.CFG_ID_PTAH)
cfg['db_skip_tables'] = ('test_populate_TestTable',)
create_db_schema(self.registry)
self.assertFalse(
base.metadata.tables['test_populate_TestTable'].exists())
cfg['db_skip_tables'] = ()
create_db_schema(self.registry)
self.assertTrue(
base.metadata.tables['test_populate_TestTable'].exists())
| 29.789668 | 78 | 0.592097 | import sqlalchemy as sqla
import ptah
from ptah.populate import POPULATE_ID, Populate
from pyramid import testing
from pyramid.exceptions import ConfigurationConflictError
class TestPopulateDirective(ptah.PtahTestCase):
_init_ptah = False
_auto_commit = False
def test_step_registration(self):
import ptah
@ptah.populate('step', title='Step', requires=['test-dep'])
def step(registry):
self.init_ptah()
data = ptah.get_cfg_storage(POPULATE_ID)
self.assertIn('step', data)
self.assertIs(data['step']['factory'], step)
self.assertEqual(data['step']['title'], 'Step')
self.assertTrue(data['step']['active'])
self.assertEqual(data['step']['requires'], ['test-dep'])
def test_step_pyramid_registration(self):
def step(registry):
config = testing.setUp()
config.include('ptah')
config.ptah_populate_step('step', factory=step,
title='Test', active=False)
config.commit()
data = config.get_cfg_storage(POPULATE_ID)
self.assertIn('step', data)
self.assertIs(data['step']['factory'], step)
self.assertEqual(data['step']['title'], 'Test')
self.assertFalse(data['step']['active'])
self.assertEqual(data['step']['requires'], ())
def test_step_registration_conflicts(self):
import ptah
@ptah.populate('step')
@ptah.populate('step')
def step(registry):
self.assertRaises(ConfigurationConflictError, self.init_ptah)
class TestPyramidDrective(ptah.PtahTestCase):
def test_directive_execute(self):
data = [False, False]
def step1(registry):
data[0] = True
def step2(registry):
data[0] = True
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
self.config.ptah_populate()
self.assertTrue(data[0])
self.assertFalse(data[1])
def test_directive_execute_populate_mode(self):
data = [False]
def step(registry):
data[0] = True
self.config.ptah_populate_step(
'custom-step', title='Custom step',
active=True, factory=step)
import ptah
ptah.POPULATE = True
self.config.ptah_populate()
ptah.POPULATE = False
self.assertFalse(data[0])
class TestListSteps(ptah.PtahTestCase):
def test_list_simple(self):
def step1(registry):
def step2(registry):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', steps)
self.assertNotIn('custom-step2', steps)
self.assertEqual(steps['custom-step1']['factory'], step1)
def test_list_all(self):
def step1(registry):
def step2(registry):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps(all=True)
steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', steps)
self.assertIn('custom-step2', steps)
self.assertEqual(steps['custom-step1']['factory'], step1)
self.assertEqual(steps['custom-step2']['factory'], step2)
def test_list_explicit(self):
def step1(registry):
def step2(registry):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps(('custom-step2',))
steps = dict((s['name'], s) for s in steps)
self.assertNotIn('custom-step1', steps)
self.assertIn('custom-step2', steps)
def test_list_requires_inactive(self):
def step1(registry):
def step2(registry):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, requires=('custom-step2',), factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
d_steps = dict((s['name'], s) for s in steps)
self.assertIn('custom-step1', d_steps)
self.assertIn('custom-step2', d_steps)
def test_list_requires_order(self):
def step1(registry):
def step2(registry):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1',
active=True, requires=('custom-step2',), factory=step1)
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2',
active=False, factory=step2)
steps = Populate(self.registry).list_steps()
l_steps = [s['name'] for s in steps]
self.assertTrue(l_steps.index('custom-step2') <
l_steps.index('custom-step1'))
def test_list_once(self):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1', requires=('custom-step2',))
self.config.ptah_populate_step(
'custom-step2', title='Custom step 2')
self.config.ptah_populate_step(
'custom-step3', title='Custom step 3', requires=('custom-step2',))
steps = Populate(self.registry).list_steps()
count = 0
for step in steps:
if step['name'] == 'custom-step2':
count += 1
self.assertEqual(count, 1)
def test_list_unknown(self):
self.assertRaises(
RuntimeError,
Populate(self.registry).list_steps, ('unknown',))
def test_list_unknown_dependency(self):
self.config.ptah_populate_step(
'custom-step1', title='Custom step 1', requires=('unknown',))
self.assertRaises(
RuntimeError, Populate(self.registry).list_steps)
class TestCreateDbSchema(ptah.PtahTestCase):
def test_event(self):
from ptah.populate import create_db_schema
data = [False]
def event_handler(ev):
data[0] = True
self.registry.registerHandler(
event_handler, (ptah.events.BeforeCreateDbSchema,))
create_db_schema(self.registry)
self.assertTrue(data[0])
def test_skip_tables(self):
from ptah.populate import create_db_schema
base = ptah.get_base()
class test_populate_TestTable(base):
__tablename__ = 'test_populate_TestTable'
id = sqla.Column('id', sqla.Integer, primary_key=True)
cfg = ptah.get_settings(ptah.CFG_ID_PTAH)
cfg['db_skip_tables'] = ('test_populate_TestTable',)
create_db_schema(self.registry)
self.assertFalse(
base.metadata.tables['test_populate_TestTable'].exists())
cfg['db_skip_tables'] = ()
create_db_schema(self.registry)
self.assertTrue(
base.metadata.tables['test_populate_TestTable'].exists())
| true | true |
1c3d8314c2faf4b2e2cd15a963ee6e6935375dd9 | 269 | py | Python | vehicle_management/vehicle_management/doctype/vehicle_expense_items/vehicle_expense_items.py | muirawachanga/vehicle | 1025f99ecab14d27485584f540e9630bfcb50c0f | [
"MIT"
] | null | null | null | vehicle_management/vehicle_management/doctype/vehicle_expense_items/vehicle_expense_items.py | muirawachanga/vehicle | 1025f99ecab14d27485584f540e9630bfcb50c0f | [
"MIT"
] | null | null | null | vehicle_management/vehicle_management/doctype/vehicle_expense_items/vehicle_expense_items.py | muirawachanga/vehicle | 1025f99ecab14d27485584f540e9630bfcb50c0f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Bituls Company Limited and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class VehicleExpenseItems(Document):
pass
| 22.416667 | 61 | 0.773234 |
from __future__ import unicode_literals
from frappe.model.document import Document
class VehicleExpenseItems(Document):
pass
| true | true |
1c3d832791a5a60515210a73d39ca02d72dd4a8a | 11,151 | py | Python | main.py | weiyl-chenjie/barcode-print-280B | 85c14dd86051ebdce12eec092d9e85ee35aa2147 | [
"Apache-2.0"
] | null | null | null | main.py | weiyl-chenjie/barcode-print-280B | 85c14dd86051ebdce12eec092d9e85ee35aa2147 | [
"Apache-2.0"
] | null | null | null | main.py | weiyl-chenjie/barcode-print-280B | 85c14dd86051ebdce12eec092d9e85ee35aa2147 | [
"Apache-2.0"
] | null | null | null | # Author:尉玉林(Mr.Wei)
# Create Date:2019/10/22
# Edition:V1.0.0
# Python自带库
import sys
import sqlite3
from time import sleep
# 第三方库
from PySide2.QtWidgets import QMainWindow, QApplication, QMessageBox, QSplashScreen, QLabel
from PySide2.QtCore import QThread, Signal, Qt
from PySide2.QtGui import QPixmap, QFont
from PIL import ImageGrab
import pytesseract
import keyboard
import pyautogui as pag
# 自己的包
from UI2PY.MainWindow import Ui_MainWindow
from mycode.HslCommunication import SiemensPLCS, SiemensS7Net
from mycode.config import Config
from mycode.sato import ComThread
class MyWindow(QMainWindow):
def __init__(self):
super(MyWindow, self).__init__()
self.Ui_MainWindow = Ui_MainWindow()
self.Ui_MainWindow.setupUi(self)
self._thread = MyThread()
self._thread.signal.connect(self.key_reading)
# 钥匙是否插到位
self.key_is_ready = False
# 钥匙上一次的状态
self.key_last_status = False
# 弹子机是否就位
self.marble_machine_is_ready = False
# 弹子机上一次的状态
self.marble_machine_last_status = False
# 当前生产的产品
self.product = ''
self.siemens = SiemensS7Net(SiemensPLCS.S1200, '192.168.0.1')
self.conf = Config()
self.com = ComThread()
self.setup()
def setup(self):
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
_, self.product = self.get_project()
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.siemens = SiemensS7Net(SiemensPLCS.S1200, ip_plc)
self.Ui_MainWindow.label_status.setText('正在初始化,请稍候...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.com.check_com(): # 如果有串口,则打开指定的串口
if self.com.open_com(): # 如果串口打开成功
if self.siemens.ConnectServer().IsSuccess: # 如果PLC连接成功
self.Ui_MainWindow.label_status.setText('初始化完成')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
self._thread.start()
else: # 如果PLC连接失败
QMessageBox.critical(self, '错误', 'PLC连接失败!')
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
else: # 如果串口打开失败
QMessageBox.critical(self, '错误!', '串口打开失败!')
self.Ui_MainWindow.label_status.setText('串口打开失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
else:
QMessageBox.critical(self, '错误!', '未发现串口!')
self.Ui_MainWindow.label_status.setText('未发现串口!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
# 槽函数
def change_ip_plc(self):
self._thread.__del__()
self._thread.quit()
ip_plc = self.Ui_MainWindow.lineEdit_IP_PLC.text()
self.conf.update_config(product='config', section='plc', name='ip', value=ip_plc)
def test_connect_plc(self):
self._thread.__del__()
self._thread.quit()
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.siemens = SiemensS7Net(SiemensPLCS.S1200, ip_plc)
self.Ui_MainWindow.label_status.setText('正在连接PLC...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.siemens.ConnectServer().IsSuccess: # 如果连接成功
QMessageBox.about(self, 'PLC连接', 'PLC连接成功!')
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
else:
QMessageBox.about(self, 'PLC连接', 'PLC连接失败!')
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
def key_reading(self):
# self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
# QApplication.processEvents()
key_is_ready = self.siemens.ReadBool('I4.5').Content
if key_is_ready: # 如果钥匙到位(I4.5)
if self.key_last_status: # 如果之前钥匙已经到位,不做任何处理
pass
else: # 如果之前钥匙未到位,则将标志位置为True
self.key_is_ready = True
else: # 如果钥匙未到位,则将标志位置为False
self.key_is_ready = False
# 将本次的钥匙是否到位传感器的状态作为下一次状态的上一状态
self.key_last_status = key_is_ready
marble_is_ready = self.siemens.ReadBool('M3.4').Content
if marble_is_ready: # 如果弹子机就位(M3.4)
if self.marble_machine_last_status: # 如果之前弹子机已经就位,不做任何处理
pass
else: # 如果之前弹子机未就位,则将标志位置为True
self.marble_machine_is_ready = True
else: # 如果弹子机未就位,则将标志位置为False
self.marble_machine_is_ready = False
# 将本次的弹子机是否到位传感器的状态作为下一次状态的上一状态
self.marble_machine_last_status = marble_is_ready
# print(marble_is_ready)
# print(self.key_is_ready, self.marble_machine_is_ready)
if self.key_is_ready and self.marble_machine_is_ready: # 如果钥匙和弹子机都到位,则读取钥匙号
self.Ui_MainWindow.label_status.setText('正在读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 255, 127);")
QApplication.processEvents()
self.key_is_ready = False
self.marble_machine_is_ready = False
self.key_read()
def manual_key_read(self):
self.Ui_MainWindow.label_status.setText('正在读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 255, 127);")
QApplication.processEvents()
self.key_read()
def start(self):
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.Ui_MainWindow.label_status.setText('正在连接PLC...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.siemens.ConnectServer().IsSuccess: # 如果连接成功
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
self._thread.working = True
self._thread.start()
else:
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
def pause(self):
self._thread.__del__()
self._thread.quit()
# 选择图片的识别区域
def select_capture_region(self):
self._thread.__del__()
self._thread.quit()
conf = Config()
if keyboard.wait(hotkey='ctrl+alt') == None:
x1, y1 = pag.position()
print(pag.position())
if keyboard.wait(hotkey='ctrl+alt') == None:
x2, y2 = pag.position()
print(pag.position())
pos_new = str((x1, y1, x2, y2))
conf.update_config(product='config', section='image_read_area', name='position', value=pos_new)
def show_capture(self):
self._thread.__del__()
self._thread.quit()
img, project = self.get_project()
self.Ui_MainWindow.lineEdit_product.setText(project)
project = pytesseract.image_to_string(img)
self.Ui_MainWindow.lineEdit_product.setText(project)
img.show()
# 功能函数
# 获取弹子号
def key_read(self):
_, self.product = self.get_project()
keyid = self.get_keyid()
res, keycode = self.get_keycode(keyid)
if res: # 如果正确获取钥匙号
self.barcode_print(self.product, keycode)
else:
keycode = '----'
self.Ui_MainWindow.label_status.setText('未正确获取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 0, 0);")
self.Ui_MainWindow.lineEdit.setText(keycode)
def get_keyid(self):
keyid = ''
read = self.siemens.Read('DB3.60', 32) # 读取8个弹子号格式:弹子号为11111111,则读取为00010001000100010001000100010001
if read.IsSuccess: # 如果读取成功
keys = read.Content[3::4] # 获取弹子号:从第4个位置开始,每隔4个数读取数据,得到8个弹子号
for key in keys:
keyid += str(key)
else:
keyid = 'XXXXXXXX'
return keyid
# 获取钥匙号
def get_keycode(self, keyid):
try:
with sqlite3.connect('keyid.db') as conn:
c = conn.cursor()
rows = c.execute("SELECT keycode FROM '%s' WHERE keyid='%s'" % (self.product, keyid)).fetchall()
keycode = rows[0][0]
return True, keycode
except Exception as e:
self.Ui_MainWindow.label_status.setText('get_keycode:%s' % str(e))
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
QMessageBox.critical(self, '错误', str(e))
return False, '----'
def get_project(self):
conf = Config()
pos = eval(conf.read_config(product='config', section="image_read_area", name="position"))
img = ImageGrab.grab(pos)
# img.show()
# img.save("picture2string.jpg")
project = pytesseract.image_to_string(img)
self.Ui_MainWindow.lineEdit_product.setText(project)
# print(project)
return img, project.upper()
# 条码打印
def barcode_print(self, product, keycode):
if product == '280B' or product == '281B' or product == '0开头':
barcode_bytes = keycode.encode("utf-8") # 转换为字节格式
send_data = b'\x1bA\x1bN\x1bR\x1bR\x1bH070\x1bV01282\x1bL0202\x1bS\x1bB103080*' + barcode_bytes + b'*\x1bH0200\x1bV01369\x1bL0202\x1bS' + barcode_bytes + b'\x1bQ1\x1bZ'
self.com.send_data(send_data)
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
class MyThread(QThread):
signal = Signal()
def __init__(self):
super(MyThread, self).__init__()
self.working = True # 工作状态标志量
def __del__(self):
self.working = False # 工作状态标志量
def run(self):
# 进行线程任务
while self.working:
sleep(0.1)
self.signal.emit() # 发射信号
if __name__ == '__main__':
# 创建一个应用程序对象
app = QApplication(sys.argv)
splash = QSplashScreen(QPixmap("resource/images/loading.png"))
splash.showMessage("加载中,请稍后...", Qt.AlignHCenter | Qt.AlignBottom, Qt.cyan)
splash.setFont(QFont("华文楷体", 10, QFont.Bold))
splash.show() # 显示启动界面
QApplication.processEvents() # 处理主进程事件
# 创建控件(容器)
window = MyWindow()
# 设置标题
# window.setWindowTitle('title')
# window.load_data(splash) # 加载数据
# 显示窗口
window.show()
splash.finish(window) # 隐藏启动界面
# 进入消息循环
sys.exit(app.exec_())
| 38.584775 | 180 | 0.63241 |
import sys
import sqlite3
from time import sleep
from PySide2.QtWidgets import QMainWindow, QApplication, QMessageBox, QSplashScreen, QLabel
from PySide2.QtCore import QThread, Signal, Qt
from PySide2.QtGui import QPixmap, QFont
from PIL import ImageGrab
import pytesseract
import keyboard
import pyautogui as pag
from UI2PY.MainWindow import Ui_MainWindow
from mycode.HslCommunication import SiemensPLCS, SiemensS7Net
from mycode.config import Config
from mycode.sato import ComThread
class MyWindow(QMainWindow):
def __init__(self):
super(MyWindow, self).__init__()
self.Ui_MainWindow = Ui_MainWindow()
self.Ui_MainWindow.setupUi(self)
self._thread = MyThread()
self._thread.signal.connect(self.key_reading)
self.key_is_ready = False
self.key_last_status = False
self.marble_machine_is_ready = False
self.marble_machine_last_status = False
self.product = ''
self.siemens = SiemensS7Net(SiemensPLCS.S1200, '192.168.0.1')
self.conf = Config()
self.com = ComThread()
self.setup()
def setup(self):
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
_, self.product = self.get_project()
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.siemens = SiemensS7Net(SiemensPLCS.S1200, ip_plc)
self.Ui_MainWindow.label_status.setText('正在初始化,请稍候...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.com.check_com():
if self.com.open_com():
if self.siemens.ConnectServer().IsSuccess:
self.Ui_MainWindow.label_status.setText('初始化完成')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
self._thread.start()
else:
QMessageBox.critical(self, '错误', 'PLC连接失败!')
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
else:
QMessageBox.critical(self, '错误!', '串口打开失败!')
self.Ui_MainWindow.label_status.setText('串口打开失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
else:
QMessageBox.critical(self, '错误!', '未发现串口!')
self.Ui_MainWindow.label_status.setText('未发现串口!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
def change_ip_plc(self):
self._thread.__del__()
self._thread.quit()
ip_plc = self.Ui_MainWindow.lineEdit_IP_PLC.text()
self.conf.update_config(product='config', section='plc', name='ip', value=ip_plc)
def test_connect_plc(self):
self._thread.__del__()
self._thread.quit()
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.siemens = SiemensS7Net(SiemensPLCS.S1200, ip_plc)
self.Ui_MainWindow.label_status.setText('正在连接PLC...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.siemens.ConnectServer().IsSuccess:
QMessageBox.about(self, 'PLC连接', 'PLC连接成功!')
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
else:
QMessageBox.about(self, 'PLC连接', 'PLC连接失败!')
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
def key_reading(self):
key_is_ready = self.siemens.ReadBool('I4.5').Content
if key_is_ready:
if self.key_last_status:
pass
else:
self.key_is_ready = True
else:
self.key_is_ready = False
self.key_last_status = key_is_ready
marble_is_ready = self.siemens.ReadBool('M3.4').Content
if marble_is_ready:
if self.marble_machine_last_status:
pass
else:
self.marble_machine_is_ready = True
else:
self.marble_machine_is_ready = False
self.marble_machine_last_status = marble_is_ready
if self.key_is_ready and self.marble_machine_is_ready:
self.Ui_MainWindow.label_status.setText('正在读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 255, 127);")
QApplication.processEvents()
self.key_is_ready = False
self.marble_machine_is_ready = False
self.key_read()
def manual_key_read(self):
self.Ui_MainWindow.label_status.setText('正在读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 255, 127);")
QApplication.processEvents()
self.key_read()
def start(self):
ip_plc = self.conf.read_config(product='config', section='plc', name='ip')
self.Ui_MainWindow.lineEdit_IP_PLC.setText(ip_plc)
self.Ui_MainWindow.label_status.setText('正在连接PLC...')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
QApplication.processEvents()
if self.siemens.ConnectServer().IsSuccess:
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
self._thread.working = True
self._thread.start()
else:
self.Ui_MainWindow.label_status.setText('PLC连接失败!')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
def pause(self):
self._thread.__del__()
self._thread.quit()
def select_capture_region(self):
self._thread.__del__()
self._thread.quit()
conf = Config()
if keyboard.wait(hotkey='ctrl+alt') == None:
x1, y1 = pag.position()
print(pag.position())
if keyboard.wait(hotkey='ctrl+alt') == None:
x2, y2 = pag.position()
print(pag.position())
pos_new = str((x1, y1, x2, y2))
conf.update_config(product='config', section='image_read_area', name='position', value=pos_new)
def show_capture(self):
self._thread.__del__()
self._thread.quit()
img, project = self.get_project()
self.Ui_MainWindow.lineEdit_product.setText(project)
project = pytesseract.image_to_string(img)
self.Ui_MainWindow.lineEdit_product.setText(project)
img.show()
def key_read(self):
_, self.product = self.get_project()
keyid = self.get_keyid()
res, keycode = self.get_keycode(keyid)
if res:
self.barcode_print(self.product, keycode)
else:
keycode = '----'
self.Ui_MainWindow.label_status.setText('未正确获取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet("background-color: rgb(255, 0, 0);")
self.Ui_MainWindow.lineEdit.setText(keycode)
def get_keyid(self):
keyid = ''
read = self.siemens.Read('DB3.60', 32)
if read.IsSuccess:
keys = read.Content[3::4]
for key in keys:
keyid += str(key)
else:
keyid = 'XXXXXXXX'
return keyid
def get_keycode(self, keyid):
try:
with sqlite3.connect('keyid.db') as conn:
c = conn.cursor()
rows = c.execute("SELECT keycode FROM '%s' WHERE keyid='%s'" % (self.product, keyid)).fetchall()
keycode = rows[0][0]
return True, keycode
except Exception as e:
self.Ui_MainWindow.label_status.setText('get_keycode:%s' % str(e))
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 0, 0);')
QMessageBox.critical(self, '错误', str(e))
return False, '----'
def get_project(self):
conf = Config()
pos = eval(conf.read_config(product='config', section="image_read_area", name="position"))
img = ImageGrab.grab(pos)
project = pytesseract.image_to_string(img)
self.Ui_MainWindow.lineEdit_product.setText(project)
return img, project.upper()
def barcode_print(self, product, keycode):
if product == '280B' or product == '281B' or product == '0开头':
barcode_bytes = keycode.encode("utf-8")
send_data = b'\x1bA\x1bN\x1bR\x1bR\x1bH070\x1bV01282\x1bL0202\x1bS\x1bB103080*' + barcode_bytes + b'*\x1bH0200\x1bV01369\x1bL0202\x1bS' + barcode_bytes + b'\x1bQ1\x1bZ'
self.com.send_data(send_data)
self.Ui_MainWindow.label_status.setText('等待读取钥匙号')
self.Ui_MainWindow.label_status.setStyleSheet('background-color: rgb(255, 255, 127);')
class MyThread(QThread):
signal = Signal()
def __init__(self):
super(MyThread, self).__init__()
self.working = True
def __del__(self):
self.working = False
def run(self):
while self.working:
sleep(0.1)
self.signal.emit()
if __name__ == '__main__':
app = QApplication(sys.argv)
splash = QSplashScreen(QPixmap("resource/images/loading.png"))
splash.showMessage("加载中,请稍后...", Qt.AlignHCenter | Qt.AlignBottom, Qt.cyan)
splash.setFont(QFont("华文楷体", 10, QFont.Bold))
splash.show()
QApplication.processEvents()
window = MyWindow()
window.show()
splash.finish(window)
sys.exit(app.exec_())
| true | true |
1c3d838e42e82c7034ab84145b1cfa1942ba8076 | 3,808 | py | Python | kivy/core/video/video_gstplayer.py | Ian-Foote/kivy | a9303f4ab733e612b17c6f8d0d9d082d4d6de9ba | [
"MIT"
] | 1 | 2016-03-08T19:07:50.000Z | 2016-03-08T19:07:50.000Z | kivy/core/video/video_gstplayer.py | Ian-Foote/kivy | a9303f4ab733e612b17c6f8d0d9d082d4d6de9ba | [
"MIT"
] | null | null | null | kivy/core/video/video_gstplayer.py | Ian-Foote/kivy | a9303f4ab733e612b17c6f8d0d9d082d4d6de9ba | [
"MIT"
] | null | null | null | '''
Video Gstplayer
===============
.. versionadded:: 1.8.0
Implementation of a VideoBase with Kivy :class:`~kivy.lib.gstplayer.GstPlayer`
This player is the prefered player, using Gstreamer 1.0, working on both Python
2 and 3.
'''
from kivy.lib.gstplayer import GstPlayer, get_gst_version
from kivy.graphics.texture import Texture
from kivy.core.video import VideoBase
from kivy.logger import Logger
from kivy.clock import Clock
from kivy.compat import PY2
from threading import Lock
from functools import partial
from os.path import realpath
from weakref import ref
if PY2:
from urllib import pathname2url
else:
from urllib.request import pathname2url
Logger.info('VideoGstplayer: Using Gstreamer {}'.format(
'.'.join(map(str, get_gst_version()))))
def _on_gstplayer_buffer(video, width, height, data):
video = video()
# if we still receive the video but no more player, remove it.
if not video:
return
with video._buffer_lock:
video._buffer = (width, height, data)
def _on_gstplayer_message(mtype, message):
if mtype == 'error':
Logger.error('VideoGstplayer: {}'.format(message))
elif mtype == 'warning':
Logger.warning('VideoGstplayer: {}'.format(message))
elif mtype == 'info':
Logger.info('VideoGstplayer: {}'.format(message))
class VideoGstplayer(VideoBase):
def __init__(self, **kwargs):
self.player = None
self._buffer = None
self._buffer_lock = Lock()
super(VideoGstplayer, self).__init__(**kwargs)
def _on_gst_eos_sync(self):
Clock.schedule_once(self._do_eos, 0)
def load(self):
Logger.debug('VideoGstplayer: Load <{}>'.format(self._filename))
uri = self._get_uri()
wk_self = ref(self)
self.player_callback = partial(_on_gstplayer_buffer, wk_self)
self.player = GstPlayer(uri, self.player_callback,
self._on_gst_eos_sync, _on_gstplayer_message)
self.player.load()
def unload(self):
if self.player:
self.player.unload()
self.player = None
with self._buffer_lock:
self._buffer = None
self._texture = None
def stop(self):
super(VideoGstplayer, self).stop()
self.player.stop()
def pause(self):
super(VideoGstplayer, self).pause()
self.player.pause()
def play(self):
super(VideoGstplayer, self).play()
self.player.set_volume(self.volume)
self.player.play()
def seek(self, percent):
self.player.seek(percent)
def _get_position(self):
return self.player.get_position()
def _get_duration(self):
return self.player.get_duration()
def _get_volume(self):
return self._volume
def _set_volume(self, value):
self._volume = value
if self.player:
self.player.set_volume(self._volume)
def _update(self, dt):
buf = None
with self._buffer_lock:
buf = self._buffer
self._buffer = None
if buf is not None:
self._update_texture(buf)
self.dispatch('on_frame')
def _update_texture(self, buf):
width, height, data = buf
# texture is not allocated yet, create it first
if not self._texture:
self._texture = Texture.create(size=(width, height),
colorfmt='rgb')
self._texture.flip_vertical()
self.dispatch('on_load')
self._texture.blit_buffer(data, size=(width, height), colorfmt='rgb')
def _get_uri(self):
uri = self.filename
if not uri:
return
if not '://' in uri:
uri = 'file:' + pathname2url(realpath(uri))
return uri
| 28 | 79 | 0.625263 |
from kivy.lib.gstplayer import GstPlayer, get_gst_version
from kivy.graphics.texture import Texture
from kivy.core.video import VideoBase
from kivy.logger import Logger
from kivy.clock import Clock
from kivy.compat import PY2
from threading import Lock
from functools import partial
from os.path import realpath
from weakref import ref
if PY2:
from urllib import pathname2url
else:
from urllib.request import pathname2url
Logger.info('VideoGstplayer: Using Gstreamer {}'.format(
'.'.join(map(str, get_gst_version()))))
def _on_gstplayer_buffer(video, width, height, data):
video = video()
if not video:
return
with video._buffer_lock:
video._buffer = (width, height, data)
def _on_gstplayer_message(mtype, message):
if mtype == 'error':
Logger.error('VideoGstplayer: {}'.format(message))
elif mtype == 'warning':
Logger.warning('VideoGstplayer: {}'.format(message))
elif mtype == 'info':
Logger.info('VideoGstplayer: {}'.format(message))
class VideoGstplayer(VideoBase):
def __init__(self, **kwargs):
self.player = None
self._buffer = None
self._buffer_lock = Lock()
super(VideoGstplayer, self).__init__(**kwargs)
def _on_gst_eos_sync(self):
Clock.schedule_once(self._do_eos, 0)
def load(self):
Logger.debug('VideoGstplayer: Load <{}>'.format(self._filename))
uri = self._get_uri()
wk_self = ref(self)
self.player_callback = partial(_on_gstplayer_buffer, wk_self)
self.player = GstPlayer(uri, self.player_callback,
self._on_gst_eos_sync, _on_gstplayer_message)
self.player.load()
def unload(self):
if self.player:
self.player.unload()
self.player = None
with self._buffer_lock:
self._buffer = None
self._texture = None
def stop(self):
super(VideoGstplayer, self).stop()
self.player.stop()
def pause(self):
super(VideoGstplayer, self).pause()
self.player.pause()
def play(self):
super(VideoGstplayer, self).play()
self.player.set_volume(self.volume)
self.player.play()
def seek(self, percent):
self.player.seek(percent)
def _get_position(self):
return self.player.get_position()
def _get_duration(self):
return self.player.get_duration()
def _get_volume(self):
return self._volume
def _set_volume(self, value):
self._volume = value
if self.player:
self.player.set_volume(self._volume)
def _update(self, dt):
buf = None
with self._buffer_lock:
buf = self._buffer
self._buffer = None
if buf is not None:
self._update_texture(buf)
self.dispatch('on_frame')
def _update_texture(self, buf):
width, height, data = buf
if not self._texture:
self._texture = Texture.create(size=(width, height),
colorfmt='rgb')
self._texture.flip_vertical()
self.dispatch('on_load')
self._texture.blit_buffer(data, size=(width, height), colorfmt='rgb')
def _get_uri(self):
uri = self.filename
if not uri:
return
if not '://' in uri:
uri = 'file:' + pathname2url(realpath(uri))
return uri
| true | true |
1c3d843715613e680fdfa4abbc33e8da7ab514cd | 457 | py | Python | multidomain_sentiment/models/__init__.py | koreyou/multidomain-sentiment | 766afdb839483c38a141c8d1d60c6faa75bd2684 | [
"CC0-1.0"
] | null | null | null | multidomain_sentiment/models/__init__.py | koreyou/multidomain-sentiment | 766afdb839483c38a141c8d1d60c6faa75bd2684 | [
"CC0-1.0"
] | null | null | null | multidomain_sentiment/models/__init__.py | koreyou/multidomain-sentiment | 766afdb839483c38a141c8d1d60c6faa75bd2684 | [
"CC0-1.0"
] | null | null | null |
from multidomain_sentiment.models.classifier import MultiDomainClassifier
from multidomain_sentiment.models.cnn import CNNEncoder
from multidomain_sentiment.models.cnn import MultiDomainCNNPredictor
from multidomain_sentiment.models.cnn import create_cnn_predictor
from multidomain_sentiment.models.rnn import MultiDomainRNNPredictor
from multidomain_sentiment.models.rnn import RNNEncoder
from multidomain_sentiment.models.rnn import create_rnn_predictor
| 50.777778 | 73 | 0.905908 |
from multidomain_sentiment.models.classifier import MultiDomainClassifier
from multidomain_sentiment.models.cnn import CNNEncoder
from multidomain_sentiment.models.cnn import MultiDomainCNNPredictor
from multidomain_sentiment.models.cnn import create_cnn_predictor
from multidomain_sentiment.models.rnn import MultiDomainRNNPredictor
from multidomain_sentiment.models.rnn import RNNEncoder
from multidomain_sentiment.models.rnn import create_rnn_predictor
| true | true |
1c3d849d07d63b0c52cc0119b718f71185620b0f | 2,029 | py | Python | middleware/models/service.py | protagora/device_network_protocol | 2f514c786d0c1268c5d97b8d76d57f35e54df5f2 | [
"MIT"
] | null | null | null | middleware/models/service.py | protagora/device_network_protocol | 2f514c786d0c1268c5d97b8d76d57f35e54df5f2 | [
"MIT"
] | null | null | null | middleware/models/service.py | protagora/device_network_protocol | 2f514c786d0c1268c5d97b8d76d57f35e54df5f2 | [
"MIT"
] | null | null | null | from dna.middleware.models.manager import *
from dna.middleware.endpoint.server import ManagedHandler
from dna.middleware.endpoint.server import Server
class Service(object):
def __init__(self, host=None, port=None):
self.__manager = None
self.__server = None
self.__host = None
self.__port = None
if host is not None:
self.host = host
if port is not None:
self.port = port
def run(self):
def print_handler(payload=None):
print(payload)
return True
# # # managed handler
# @todo: replace using local managed handler with provided handler
# @todo: if no default handler is provided as a parameter to call
demo_handler = ManagedHandler()
demo_manager = Manager()
demo_manager.add(entity='resource', _id=257, handler=print_handler)
demo_server = Server(host=self.host, port=self.port)
demo_server.run(handler=demo_handler, manager=demo_manager)
@property
def manager(self):
return self.__manager
@manager.setter
def manager(self, manager):
try:
assert isinstance(manager, Manager)
except (BaseException, ):
raise Exception("Manager instance expected, {} encountered".format(type(manager)))
self.__manager = manager
manager.service = self
@property
def server(self):
return self.__server
@server.setter
def server(self, server):
try:
assert isinstance(server, Server)
except (BaseException, ):
raise Exception("Server instance expected, {} encountered".format(type(server)))
self.__server = server
server.service = self
@property
def host(self):
return self.__host
@host.setter
def host(self, host):
self.__host = host
@property
def port(self):
return self.__port
@port.setter
def port(self, port):
self.__port = port
| 27.053333 | 94 | 0.620996 | from dna.middleware.models.manager import *
from dna.middleware.endpoint.server import ManagedHandler
from dna.middleware.endpoint.server import Server
class Service(object):
def __init__(self, host=None, port=None):
self.__manager = None
self.__server = None
self.__host = None
self.__port = None
if host is not None:
self.host = host
if port is not None:
self.port = port
def run(self):
def print_handler(payload=None):
print(payload)
return True
dler = ManagedHandler()
demo_manager = Manager()
demo_manager.add(entity='resource', _id=257, handler=print_handler)
demo_server = Server(host=self.host, port=self.port)
demo_server.run(handler=demo_handler, manager=demo_manager)
@property
def manager(self):
return self.__manager
@manager.setter
def manager(self, manager):
try:
assert isinstance(manager, Manager)
except (BaseException, ):
raise Exception("Manager instance expected, {} encountered".format(type(manager)))
self.__manager = manager
manager.service = self
@property
def server(self):
return self.__server
@server.setter
def server(self, server):
try:
assert isinstance(server, Server)
except (BaseException, ):
raise Exception("Server instance expected, {} encountered".format(type(server)))
self.__server = server
server.service = self
@property
def host(self):
return self.__host
@host.setter
def host(self, host):
self.__host = host
@property
def port(self):
return self.__port
@port.setter
def port(self, port):
self.__port = port
| true | true |
1c3d851cd9c71be09e0a6c57c8a32fceaf1783d2 | 12,793 | py | Python | structuretimers/admin.py | buahaha/aa-structuretimers | fbc2752d442795c0803aa419a58ebd1cfd33c66d | [
"MIT"
] | null | null | null | structuretimers/admin.py | buahaha/aa-structuretimers | fbc2752d442795c0803aa419a58ebd1cfd33c66d | [
"MIT"
] | null | null | null | structuretimers/admin.py | buahaha/aa-structuretimers | fbc2752d442795c0803aa419a58ebd1cfd33c66d | [
"MIT"
] | null | null | null | from typing import Any, Dict, Optional
from django import forms
from django.contrib import admin
from django.core.exceptions import ValidationError
from django.db.models.functions import Lower
from django.utils.safestring import mark_safe
from django.utils.timezone import now
from allianceauth.eveonline.models import EveAllianceInfo, EveCorporationInfo
from . import tasks
from .models import (
DiscordWebhook,
NotificationRule,
ScheduledNotification,
StagingSystem,
Timer,
)
@admin.register(DiscordWebhook)
class DiscordWebhookAdmin(admin.ModelAdmin):
list_display = ("name", "is_enabled", "_messages_in_queue")
list_filter = ("is_enabled",)
ordering = ("name",)
def _messages_in_queue(self, obj):
return obj.queue_size()
actions = ["send_test_message", "purge_messages"]
def purge_messages(self, request, queryset):
actions_count = 0
killmails_deleted = 0
for webhook in queryset:
killmails_deleted += webhook.clear_queue()
actions_count += 1
self.message_user(
request,
f"Purged queued messages for {actions_count} webhooks, "
f"deleting a total of {killmails_deleted} messages.",
)
purge_messages.short_description = "Purge queued messages of selected webhooks"
def send_test_message(self, request, queryset):
actions_count = 0
for webhook in queryset:
tasks.send_test_message_to_webhook.delay(webhook.pk, request.user.pk)
actions_count += 1
self.message_user(
request,
f"Initiated sending of {actions_count} test messages to "
f"selected webhooks. You will receive a notification with the result.",
)
send_test_message.short_description = "Send test message to selected webhooks"
def field_nice_display(name: str) -> str:
return name.replace("_", " ").capitalize()
class NotificationRuleAdminForm(forms.ModelForm):
def clean(self) -> Dict[str, Any]:
cleaned_data = super().clean()
self._validate_not_same_options_chosen(
cleaned_data,
"require_timer_types",
"exclude_timer_types",
lambda x: NotificationRule.get_multiselect_display(x, Timer.Type.choices),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_objectives",
"exclude_objectives",
lambda x: NotificationRule.get_multiselect_display(
x, Timer.Objective.choices
),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_visibility",
"exclude_visibility",
lambda x: NotificationRule.get_multiselect_display(
x, Timer.Visibility.choices
),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_corporations",
"exclude_corporations",
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_alliances",
"exclude_alliances",
)
if (
cleaned_data["trigger"] == NotificationRule.Trigger.SCHEDULED_TIME_REACHED
and cleaned_data["scheduled_time"] is None
):
raise ValidationError(
{
"scheduled_time": (
"You need to specify scheduled time for "
"the `Scheduled time reached` trigger"
)
}
)
if cleaned_data["trigger"] == NotificationRule.Trigger.NEW_TIMER_CREATED:
cleaned_data["scheduled_time"] = None
return cleaned_data
@staticmethod
def _validate_not_same_options_chosen(
cleaned_data, field_name_1, field_name_2, display_func=lambda x: x
) -> None:
same_options = set(cleaned_data[field_name_1]).intersection(
set(cleaned_data[field_name_2])
)
if same_options:
same_options_text = ", ".join(
map(
str,
[display_func(x) for x in same_options],
)
)
raise ValidationError(
f"Can not choose same options for {field_nice_display(field_name_1)} "
f"& {field_nice_display(field_name_2)}: {same_options_text}"
)
@admin.register(NotificationRule)
class NotificationRuleAdmin(admin.ModelAdmin):
form = NotificationRuleAdminForm
list_display = (
"id",
"is_enabled",
"trigger",
"_time",
"webhook",
"ping_type",
"_timer_clauses",
)
list_filter = ("is_enabled", "trigger")
ordering = ("id",)
def _time(self, obj) -> Optional[str]:
if obj.scheduled_time is None:
return None
else:
return obj.get_scheduled_time_display()
_time.admin_order_field = "scheduled time"
def _timer_clauses(self, obj) -> list:
clauses = list()
for field, func, choices in [
("require_timer_types", self._add_to_clauses_1, Timer.Type.choices),
("exclude_timer_types", self._add_to_clauses_1, Timer.Type.choices),
("require_objectives", self._add_to_clauses_1, Timer.Objective.choices),
("exclude_objectives", self._add_to_clauses_1, Timer.Objective.choices),
("require_visibility", self._add_to_clauses_1, Timer.Visibility.choices),
("exclude_visibility", self._add_to_clauses_1, Timer.Visibility.choices),
("require_corporations", self._add_to_clauses_2, None),
("exclude_corporations", self._add_to_clauses_2, None),
("require_alliances", self._add_to_clauses_2, None),
("exclude_alliances", self._add_to_clauses_2, None),
("is_important", self._add_to_clauses_3, None),
("is_opsec", self._add_to_clauses_3, None),
]:
func(clauses, obj, field, choices)
return mark_safe("<br>".join(clauses)) if clauses else None
def _add_to_clauses_1(self, clauses, obj, field, choices):
if getattr(obj, field):
text = ", ".join(
map(
str,
[
NotificationRule.get_multiselect_display(x, choices)
for x in getattr(obj, field)
],
)
)
self._append_field_to_clauses(clauses, field, text)
def _add_to_clauses_2(self, clauses, obj, field, choices=None):
if getattr(obj, field).count() > 0:
text = ", ".join(map(str, getattr(obj, field).all()))
self._append_field_to_clauses(clauses, field, text)
def _add_to_clauses_3(self, clauses, obj, field, choices=None):
if getattr(obj, field) != NotificationRule.Clause.ANY:
text = getattr(obj, f"get_{field}_display")()
self._append_field_to_clauses(clauses, field, text)
def _append_field_to_clauses(self, clauses, field, text):
clauses.append(f"{field_nice_display(field)} = {text}")
actions = ["enable_rule", "disable_rule"]
def enable_rule(self, request, queryset):
queryset.update(is_enabled=True)
self.message_user(request, f"Enabled {queryset.count()} notification rules.")
enable_rule.short_description = "Enable selected notification rules"
def disable_rule(self, request, queryset):
queryset.update(is_enabled=False)
self.message_user(request, f"Disabled {queryset.count()} notification rules.")
disable_rule.short_description = "Disable selected notification rules"
filter_horizontal = (
"require_alliances",
"exclude_alliances",
"require_corporations",
"exclude_corporations",
)
fieldsets = (
(
None,
{
"fields": (
"trigger",
"scheduled_time",
"webhook",
"ping_type",
"is_enabled",
)
},
),
(
"Timer clauses",
{
"classes": ("collapse",),
"fields": (
"require_timer_types",
"exclude_timer_types",
"require_objectives",
"exclude_objectives",
"require_corporations",
"exclude_corporations",
"require_alliances",
"exclude_alliances",
"require_visibility",
"exclude_visibility",
"is_important",
"is_opsec",
),
},
),
)
def formfield_for_manytomany(self, db_field, request, **kwargs):
"""overriding this formfield to have sorted lists in the form"""
if db_field.name in {"require_alliances", "exclude_alliances"}:
kwargs["queryset"] = EveAllianceInfo.objects.order_by(
Lower("alliance_name")
)
elif db_field.name in {"require_corporations", "exclude_corporations"}:
kwargs["queryset"] = EveCorporationInfo.objects.order_by(
Lower("corporation_name")
)
return super().formfield_for_manytomany(db_field, request, **kwargs)
@admin.register(ScheduledNotification)
class ScheduledNotificationAdmin(admin.ModelAdmin):
list_select_related = ("timer", "notification_rule")
list_display = ("notification_date", "timer", "notification_rule", "celery_task_id")
list_filter = ("notification_rule",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.filter(notification_date__gt=now()).order_by("notification_date")
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
# @admin.register(Timer)
# class TimerAdmin(admin.ModelAdmin):
# list_select_related = ("eve_solar_system", "structure_type", "user")
# list_filter = (
# "timer_type",
# ("eve_solar_system", admin.RelatedOnlyFieldListFilter),
# ("structure_type", admin.RelatedOnlyFieldListFilter),
# "objective",
# "owner_name",
# ("user", admin.RelatedOnlyFieldListFilter),
# "is_opsec",
# )
# ordering = ("-date",)
# autocomplete_fields = ["eve_solar_system", "structure_type"]
# """
# def _scheduled_notifications(self, obj):
# return sorted(
# [
# x["notification_date"].strftime(DATETIME_FORMAT)
# for x in ScheduledNotification.objects.filter(
# timer=obj, notification_date__gt=now()
# ).values("notification_date", "notification_rule_id")
# ]
# )
# """
# actions = ["send_test_notification"]
# def send_test_notification(self, request, queryset):
# for timer in queryset:
# for webhook in DiscordWebhook.objects.filter(is_enabled=True):
# timer.send_notification(
# webhook=webhook,
# content=f"Test notification sent by **{request.user}**",
# )
# self.message_user(
# request, f"Initiated sending test notification for timer: {timer}"
# )
# for webhook in DiscordWebhook.objects.filter(is_enabled=True):
# tasks.send_messages_for_webhook.delay(webhook.pk)
# send_test_notification.short_description = (
# "Send test notification for selected timers to all enabled webhooks"
# )
@admin.register(StagingSystem)
class StagingSystemAdmin(admin.ModelAdmin):
list_display = ("eve_solar_system", "_region", "is_main")
list_select_related = (
"eve_solar_system",
"eve_solar_system__eve_constellation__eve_region",
)
autocomplete_fields = ["eve_solar_system"]
ordering = ("eve_solar_system__name",)
def _region(self, obj) -> str:
return obj.eve_solar_system.eve_constellation.eve_region.name
_region.admin_order_field = "eve_solar_system__eve_constellation__eve_region"
actions = ["_recalc_timers"]
def _recalc_timers(self, request, queryset):
for obj in queryset:
tasks.calc_staging_system.delay(obj.pk, force_update=True)
self.message_user(
request, f"{obj}: Started to update timers for staging system..."
)
_recalc_timers.short_description = "Recalc timers for selected staging system"
| 34.575676 | 88 | 0.60197 | from typing import Any, Dict, Optional
from django import forms
from django.contrib import admin
from django.core.exceptions import ValidationError
from django.db.models.functions import Lower
from django.utils.safestring import mark_safe
from django.utils.timezone import now
from allianceauth.eveonline.models import EveAllianceInfo, EveCorporationInfo
from . import tasks
from .models import (
DiscordWebhook,
NotificationRule,
ScheduledNotification,
StagingSystem,
Timer,
)
@admin.register(DiscordWebhook)
class DiscordWebhookAdmin(admin.ModelAdmin):
list_display = ("name", "is_enabled", "_messages_in_queue")
list_filter = ("is_enabled",)
ordering = ("name",)
def _messages_in_queue(self, obj):
return obj.queue_size()
actions = ["send_test_message", "purge_messages"]
def purge_messages(self, request, queryset):
actions_count = 0
killmails_deleted = 0
for webhook in queryset:
killmails_deleted += webhook.clear_queue()
actions_count += 1
self.message_user(
request,
f"Purged queued messages for {actions_count} webhooks, "
f"deleting a total of {killmails_deleted} messages.",
)
purge_messages.short_description = "Purge queued messages of selected webhooks"
def send_test_message(self, request, queryset):
actions_count = 0
for webhook in queryset:
tasks.send_test_message_to_webhook.delay(webhook.pk, request.user.pk)
actions_count += 1
self.message_user(
request,
f"Initiated sending of {actions_count} test messages to "
f"selected webhooks. You will receive a notification with the result.",
)
send_test_message.short_description = "Send test message to selected webhooks"
def field_nice_display(name: str) -> str:
return name.replace("_", " ").capitalize()
class NotificationRuleAdminForm(forms.ModelForm):
def clean(self) -> Dict[str, Any]:
cleaned_data = super().clean()
self._validate_not_same_options_chosen(
cleaned_data,
"require_timer_types",
"exclude_timer_types",
lambda x: NotificationRule.get_multiselect_display(x, Timer.Type.choices),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_objectives",
"exclude_objectives",
lambda x: NotificationRule.get_multiselect_display(
x, Timer.Objective.choices
),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_visibility",
"exclude_visibility",
lambda x: NotificationRule.get_multiselect_display(
x, Timer.Visibility.choices
),
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_corporations",
"exclude_corporations",
)
self._validate_not_same_options_chosen(
cleaned_data,
"require_alliances",
"exclude_alliances",
)
if (
cleaned_data["trigger"] == NotificationRule.Trigger.SCHEDULED_TIME_REACHED
and cleaned_data["scheduled_time"] is None
):
raise ValidationError(
{
"scheduled_time": (
"You need to specify scheduled time for "
"the `Scheduled time reached` trigger"
)
}
)
if cleaned_data["trigger"] == NotificationRule.Trigger.NEW_TIMER_CREATED:
cleaned_data["scheduled_time"] = None
return cleaned_data
@staticmethod
def _validate_not_same_options_chosen(
cleaned_data, field_name_1, field_name_2, display_func=lambda x: x
) -> None:
same_options = set(cleaned_data[field_name_1]).intersection(
set(cleaned_data[field_name_2])
)
if same_options:
same_options_text = ", ".join(
map(
str,
[display_func(x) for x in same_options],
)
)
raise ValidationError(
f"Can not choose same options for {field_nice_display(field_name_1)} "
f"& {field_nice_display(field_name_2)}: {same_options_text}"
)
@admin.register(NotificationRule)
class NotificationRuleAdmin(admin.ModelAdmin):
form = NotificationRuleAdminForm
list_display = (
"id",
"is_enabled",
"trigger",
"_time",
"webhook",
"ping_type",
"_timer_clauses",
)
list_filter = ("is_enabled", "trigger")
ordering = ("id",)
def _time(self, obj) -> Optional[str]:
if obj.scheduled_time is None:
return None
else:
return obj.get_scheduled_time_display()
_time.admin_order_field = "scheduled time"
def _timer_clauses(self, obj) -> list:
clauses = list()
for field, func, choices in [
("require_timer_types", self._add_to_clauses_1, Timer.Type.choices),
("exclude_timer_types", self._add_to_clauses_1, Timer.Type.choices),
("require_objectives", self._add_to_clauses_1, Timer.Objective.choices),
("exclude_objectives", self._add_to_clauses_1, Timer.Objective.choices),
("require_visibility", self._add_to_clauses_1, Timer.Visibility.choices),
("exclude_visibility", self._add_to_clauses_1, Timer.Visibility.choices),
("require_corporations", self._add_to_clauses_2, None),
("exclude_corporations", self._add_to_clauses_2, None),
("require_alliances", self._add_to_clauses_2, None),
("exclude_alliances", self._add_to_clauses_2, None),
("is_important", self._add_to_clauses_3, None),
("is_opsec", self._add_to_clauses_3, None),
]:
func(clauses, obj, field, choices)
return mark_safe("<br>".join(clauses)) if clauses else None
def _add_to_clauses_1(self, clauses, obj, field, choices):
if getattr(obj, field):
text = ", ".join(
map(
str,
[
NotificationRule.get_multiselect_display(x, choices)
for x in getattr(obj, field)
],
)
)
self._append_field_to_clauses(clauses, field, text)
def _add_to_clauses_2(self, clauses, obj, field, choices=None):
if getattr(obj, field).count() > 0:
text = ", ".join(map(str, getattr(obj, field).all()))
self._append_field_to_clauses(clauses, field, text)
def _add_to_clauses_3(self, clauses, obj, field, choices=None):
if getattr(obj, field) != NotificationRule.Clause.ANY:
text = getattr(obj, f"get_{field}_display")()
self._append_field_to_clauses(clauses, field, text)
def _append_field_to_clauses(self, clauses, field, text):
clauses.append(f"{field_nice_display(field)} = {text}")
actions = ["enable_rule", "disable_rule"]
def enable_rule(self, request, queryset):
queryset.update(is_enabled=True)
self.message_user(request, f"Enabled {queryset.count()} notification rules.")
enable_rule.short_description = "Enable selected notification rules"
def disable_rule(self, request, queryset):
queryset.update(is_enabled=False)
self.message_user(request, f"Disabled {queryset.count()} notification rules.")
disable_rule.short_description = "Disable selected notification rules"
filter_horizontal = (
"require_alliances",
"exclude_alliances",
"require_corporations",
"exclude_corporations",
)
fieldsets = (
(
None,
{
"fields": (
"trigger",
"scheduled_time",
"webhook",
"ping_type",
"is_enabled",
)
},
),
(
"Timer clauses",
{
"classes": ("collapse",),
"fields": (
"require_timer_types",
"exclude_timer_types",
"require_objectives",
"exclude_objectives",
"require_corporations",
"exclude_corporations",
"require_alliances",
"exclude_alliances",
"require_visibility",
"exclude_visibility",
"is_important",
"is_opsec",
),
},
),
)
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name in {"require_alliances", "exclude_alliances"}:
kwargs["queryset"] = EveAllianceInfo.objects.order_by(
Lower("alliance_name")
)
elif db_field.name in {"require_corporations", "exclude_corporations"}:
kwargs["queryset"] = EveCorporationInfo.objects.order_by(
Lower("corporation_name")
)
return super().formfield_for_manytomany(db_field, request, **kwargs)
@admin.register(ScheduledNotification)
class ScheduledNotificationAdmin(admin.ModelAdmin):
list_select_related = ("timer", "notification_rule")
list_display = ("notification_date", "timer", "notification_rule", "celery_task_id")
list_filter = ("notification_rule",)
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.filter(notification_date__gt=now()).order_by("notification_date")
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
# def _scheduled_notifications(self, obj):
# return sorted(
# [
# x["notification_date"].strftime(DATETIME_FORMAT)
# for x in ScheduledNotification.objects.filter(
# timer=obj, notification_date__gt=now()
# ).values("notification_date", "notification_rule_id")
# ]
# )
# """
@admin.register(StagingSystem)
class StagingSystemAdmin(admin.ModelAdmin):
list_display = ("eve_solar_system", "_region", "is_main")
list_select_related = (
"eve_solar_system",
"eve_solar_system__eve_constellation__eve_region",
)
autocomplete_fields = ["eve_solar_system"]
ordering = ("eve_solar_system__name",)
def _region(self, obj) -> str:
return obj.eve_solar_system.eve_constellation.eve_region.name
_region.admin_order_field = "eve_solar_system__eve_constellation__eve_region"
actions = ["_recalc_timers"]
def _recalc_timers(self, request, queryset):
for obj in queryset:
tasks.calc_staging_system.delay(obj.pk, force_update=True)
self.message_user(
request, f"{obj}: Started to update timers for staging system..."
)
_recalc_timers.short_description = "Recalc timers for selected staging system"
| true | true |
1c3d8549aff24f6a3c86faef3f9cac68eda59f26 | 931 | py | Python | setup.py | rajbhutoria/homework-csci046 | 97090aef7311b2669a626d98e7f8a5210d0f57a0 | [
"MIT"
] | null | null | null | setup.py | rajbhutoria/homework-csci046 | 97090aef7311b2669a626d98e7f8a5210d0f57a0 | [
"MIT"
] | null | null | null | setup.py | rajbhutoria/homework-csci046 | 97090aef7311b2669a626d98e7f8a5210d0f57a0 | [
"MIT"
] | 1 | 2021-04-19T08:05:56.000Z | 2021-04-19T08:05:56.000Z | import pathlib
from setuptools import setup, find_packages
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
# This call to setup() does all the work
setup(
name="cmc_csci046_Bhutoria",
version="1.0.0",
description="A collection of data structures and classes by R. Bhutoria in CMC CSCI046 SP21",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/rajbhutoria/homework-csci046",
author="Raj Bhutoria",
author_email="rbhutoria22@cmc.edu",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
],
packages=find_packages(exclude=("tests")),
include_package_data=True,
install_requires=["pytest", "hypothesis"],
)
| 31.033333 | 97 | 0.692803 | import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(
name="cmc_csci046_Bhutoria",
version="1.0.0",
description="A collection of data structures and classes by R. Bhutoria in CMC CSCI046 SP21",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/rajbhutoria/homework-csci046",
author="Raj Bhutoria",
author_email="rbhutoria22@cmc.edu",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
],
packages=find_packages(exclude=("tests")),
include_package_data=True,
install_requires=["pytest", "hypothesis"],
)
| true | true |
1c3d859ae28e80d0d58e9dbc5ecfff154142ab2d | 7,491 | py | Python | tests/unit/network/GenericNetworkTest.py | halotudio/openPNM-copy2 | d400ec65e9421256a531f6d22a38255b002d5dcb | [
"MIT"
] | 1 | 2021-05-01T11:10:43.000Z | 2021-05-01T11:10:43.000Z | tests/unit/network/GenericNetworkTest.py | halotudio/openPNM-copy2 | d400ec65e9421256a531f6d22a38255b002d5dcb | [
"MIT"
] | 2 | 2020-06-26T19:58:23.000Z | 2021-12-14T07:16:41.000Z | tests/unit/network/GenericNetworkTest.py | halotudio/openPNM-copy2 | d400ec65e9421256a531f6d22a38255b002d5dcb | [
"MIT"
] | null | null | null | import numpy as np
import openpnm as op
class GenericNetworkTest:
def setup_class(self):
self.net = op.network.Cubic(shape=[10, 10, 10])
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_find_connected_pores_numeric_not_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1])
assert np.all(a.flatten() == [0, 1, 1, 2])
def test_find_connected_pores_numeric_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1], flatten=True)
assert np.all(a == [0, 1, 2])
def test_find_connected_pores_boolean_flattend(self):
Tind = np.zeros((self.net.Nt,), dtype=bool)
Tind[[0, 1]] = True
a = self.net.find_connected_pores(throats=Tind, flatten=True)
assert np.all(a == [0, 1, 2])
def test_find_connected_pores_empty_flattend(self):
a = self.net.find_connected_pores(throats=[], flatten=True)
assert np.shape(a) == (0, )
def test_find_neighbor_pores_numeric(self):
a = self.net.find_neighbor_pores(pores=[])
assert np.size(a) == 0
def test_find_neighbor_pores_boolean(self):
Pind = np.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_pores(pores=Pind)
assert np.all(a == [2, 10, 11, 100, 101])
def test_find_neighbor_pores_numeric_union(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='union')
assert np.all(a == [1, 3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_intersection(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='xnor')
assert np.all(a == [1])
def test_find_neighbor_pores_numeric_exclusive_or(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='exclusive_or')
assert np.all(a == [3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_union_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='or',
include_input=True)
assert np.all(a == [1, 3, 10, 12, 100, 102])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='or',
include_input=True)
assert np.all(a == [0, 1, 2, 10, 11, 100, 101])
def test_find_neighbor_pores_numeric_intersection_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='and',
include_input=True)
assert np.all(a == [1])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='and',
include_input=True)
assert np.all(a == [])
def test_find_neighbor_pores_numeric_intersection_exclude_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='and',
include_input=False)
assert np.all(a == [1])
def test_find_neighbor_pores_numeric_exclusive_or_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='exclusive_or',
include_input=True)
assert np.all(a == [3, 10, 12, 100, 102])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='exclusive_or',
include_input=True)
assert np.all(a == [0, 1, 2, 10, 11, 100, 101])
def test_find_neighbor_throats_on_pores_wo_throats(self):
net = op.network.Cubic(shape=[10, 10, 1])
ts = net.find_neighbor_throats(pores=net.Ps[-1])
op.topotools.trim(net, throats=ts)
ts2 = net.find_neighbor_throats(pores=99)
assert ts2.size == 0
def test_find_neighbor_throats_empty(self):
a = self.net.find_neighbor_throats(pores=[])
assert np.size(a) == 0
def test_find_neighbor_throats_boolean(self):
Pind = np.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_throats(pores=Pind)
assert np.all(a == [0, 1, 900, 901, 1800, 1801])
def test_find_neighbor_throats_numeric_union(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='union')
assert np.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_find_neighbor_throats_numeric_intersection(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='xnor')
assert np.size(a) == 0
def test_find_neighbor_throats_numeric_exclusive_or(self):
a = self.net.find_neighbor_throats(pores=[0, 2],
mode='exclusive_or')
assert np.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_num_neighbors_empty(self):
a = self.net.num_neighbors(pores=[])
assert np.size(a) == 0
def test_num_neighbors_pores_flattened(self):
a = self.net.num_neighbors(pores=0, flatten=True)
assert a == 3
assert isinstance(a, int)
a = self.net.num_neighbors(pores=[0, 2], flatten=True)
assert a == 6
assert isinstance(a, int)
def test_num_neighbors_pores_with_modes(self):
a = self.net.num_neighbors(pores=[0, 2], mode='union', flatten=True)
assert a == 6
a = self.net.num_neighbors(pores=[0, 2], mode='xnor',
flatten=True)
assert a == 1
a = self.net.num_neighbors(pores=[0, 2], mode='exclusive_or',
flatten=True)
assert a == 5
def test_num_neighbors_pores_not_flattened(self):
a = self.net.num_neighbors(pores=[0, 2], flatten=False)
assert np.all(a == [3, 4])
a = self.net.num_neighbors(pores=0, flatten=False)
assert np.all(a == [3])
assert isinstance(a, np.ndarray)
def test_find_nearby_pores_distance_1(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1, flatten=False,
include_input=True)
b = self.net.find_neighbor_pores(pores=[0, 1], flatten=False,
include_input=True)
assert np.all([np.all(a[i] == b[i]) for i in range(0, len(a))])
def test_find_nearby_pores_distance_2(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2)
assert np.all([np.size(a[i]) for i in [0, 1]] == [9, 13])
def test_find_nearby_pores_distance_0(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1e-9, flatten=False)
assert np.shape(a) == (2, 0)
a = self.net.find_nearby_pores(pores=[0, 1], r=1e-9, flatten=True)
assert a.shape == (0,)
def test_find_nearby_pores_distance_1_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1, flatten=True)
b = self.net.find_neighbor_pores(pores=[0, 1])
assert np.all(a == b)
def test_find_nearby_pores_distance_2_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2, flatten=True)
assert np.size(a) == 15
def test_find_nearby_pores_distance_2_flattened_include_input(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2,
flatten=True, include_input=True)
assert np.size(a) == 17
assert np.all(np.in1d([0, 1], a))
if __name__ == '__main__':
t = GenericNetworkTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| 40.934426 | 76 | 0.595248 | import numpy as np
import openpnm as op
class GenericNetworkTest:
def setup_class(self):
self.net = op.network.Cubic(shape=[10, 10, 10])
def teardown_class(self):
ws = op.Workspace()
ws.clear()
def test_find_connected_pores_numeric_not_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1])
assert np.all(a.flatten() == [0, 1, 1, 2])
def test_find_connected_pores_numeric_flattend(self):
a = self.net.find_connected_pores(throats=[0, 1], flatten=True)
assert np.all(a == [0, 1, 2])
def test_find_connected_pores_boolean_flattend(self):
Tind = np.zeros((self.net.Nt,), dtype=bool)
Tind[[0, 1]] = True
a = self.net.find_connected_pores(throats=Tind, flatten=True)
assert np.all(a == [0, 1, 2])
def test_find_connected_pores_empty_flattend(self):
a = self.net.find_connected_pores(throats=[], flatten=True)
assert np.shape(a) == (0, )
def test_find_neighbor_pores_numeric(self):
a = self.net.find_neighbor_pores(pores=[])
assert np.size(a) == 0
def test_find_neighbor_pores_boolean(self):
Pind = np.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_pores(pores=Pind)
assert np.all(a == [2, 10, 11, 100, 101])
def test_find_neighbor_pores_numeric_union(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='union')
assert np.all(a == [1, 3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_intersection(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='xnor')
assert np.all(a == [1])
def test_find_neighbor_pores_numeric_exclusive_or(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='exclusive_or')
assert np.all(a == [3, 10, 12, 100, 102])
def test_find_neighbor_pores_numeric_union_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='or',
include_input=True)
assert np.all(a == [1, 3, 10, 12, 100, 102])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='or',
include_input=True)
assert np.all(a == [0, 1, 2, 10, 11, 100, 101])
def test_find_neighbor_pores_numeric_intersection_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='and',
include_input=True)
assert np.all(a == [1])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='and',
include_input=True)
assert np.all(a == [])
def test_find_neighbor_pores_numeric_intersection_exclude_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='and',
include_input=False)
assert np.all(a == [1])
def test_find_neighbor_pores_numeric_exclusive_or_include_input(self):
a = self.net.find_neighbor_pores(pores=[0, 2], mode='exclusive_or',
include_input=True)
assert np.all(a == [3, 10, 12, 100, 102])
a = self.net.find_neighbor_pores(pores=[0, 1], mode='exclusive_or',
include_input=True)
assert np.all(a == [0, 1, 2, 10, 11, 100, 101])
def test_find_neighbor_throats_on_pores_wo_throats(self):
net = op.network.Cubic(shape=[10, 10, 1])
ts = net.find_neighbor_throats(pores=net.Ps[-1])
op.topotools.trim(net, throats=ts)
ts2 = net.find_neighbor_throats(pores=99)
assert ts2.size == 0
def test_find_neighbor_throats_empty(self):
a = self.net.find_neighbor_throats(pores=[])
assert np.size(a) == 0
def test_find_neighbor_throats_boolean(self):
Pind = np.zeros((self.net.Np,), dtype=bool)
Pind[[0, 1]] = True
a = self.net.find_neighbor_throats(pores=Pind)
assert np.all(a == [0, 1, 900, 901, 1800, 1801])
def test_find_neighbor_throats_numeric_union(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='union')
assert np.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_find_neighbor_throats_numeric_intersection(self):
a = self.net.find_neighbor_throats(pores=[0, 2], mode='xnor')
assert np.size(a) == 0
def test_find_neighbor_throats_numeric_exclusive_or(self):
a = self.net.find_neighbor_throats(pores=[0, 2],
mode='exclusive_or')
assert np.all(a == [0, 1, 2, 900, 902, 1800, 1802])
def test_num_neighbors_empty(self):
a = self.net.num_neighbors(pores=[])
assert np.size(a) == 0
def test_num_neighbors_pores_flattened(self):
a = self.net.num_neighbors(pores=0, flatten=True)
assert a == 3
assert isinstance(a, int)
a = self.net.num_neighbors(pores=[0, 2], flatten=True)
assert a == 6
assert isinstance(a, int)
def test_num_neighbors_pores_with_modes(self):
a = self.net.num_neighbors(pores=[0, 2], mode='union', flatten=True)
assert a == 6
a = self.net.num_neighbors(pores=[0, 2], mode='xnor',
flatten=True)
assert a == 1
a = self.net.num_neighbors(pores=[0, 2], mode='exclusive_or',
flatten=True)
assert a == 5
def test_num_neighbors_pores_not_flattened(self):
a = self.net.num_neighbors(pores=[0, 2], flatten=False)
assert np.all(a == [3, 4])
a = self.net.num_neighbors(pores=0, flatten=False)
assert np.all(a == [3])
assert isinstance(a, np.ndarray)
def test_find_nearby_pores_distance_1(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1, flatten=False,
include_input=True)
b = self.net.find_neighbor_pores(pores=[0, 1], flatten=False,
include_input=True)
assert np.all([np.all(a[i] == b[i]) for i in range(0, len(a))])
def test_find_nearby_pores_distance_2(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2)
assert np.all([np.size(a[i]) for i in [0, 1]] == [9, 13])
def test_find_nearby_pores_distance_0(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1e-9, flatten=False)
assert np.shape(a) == (2, 0)
a = self.net.find_nearby_pores(pores=[0, 1], r=1e-9, flatten=True)
assert a.shape == (0,)
def test_find_nearby_pores_distance_1_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=1, flatten=True)
b = self.net.find_neighbor_pores(pores=[0, 1])
assert np.all(a == b)
def test_find_nearby_pores_distance_2_flattened(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2, flatten=True)
assert np.size(a) == 15
def test_find_nearby_pores_distance_2_flattened_include_input(self):
a = self.net.find_nearby_pores(pores=[0, 1], r=2,
flatten=True, include_input=True)
assert np.size(a) == 17
assert np.all(np.in1d([0, 1], a))
if __name__ == '__main__':
t = GenericNetworkTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print('running test: '+item)
t.__getattribute__(item)()
| true | true |
1c3d85ca8eb7bd996d2442a399dae87a1eb08406 | 22,304 | py | Python | testing/unit/test_schemas.py | nanome-ai/nanome | 7777a782168fe4e68f58c42f01cff9e66f3675aa | [
"MIT"
] | 1 | 2020-04-10T09:47:54.000Z | 2020-04-10T09:47:54.000Z | testing/unit/test_schemas.py | nanome-ai/nanome | 7777a782168fe4e68f58c42f01cff9e66f3675aa | [
"MIT"
] | 10 | 2019-05-30T18:29:10.000Z | 2020-02-15T02:16:42.000Z | testing/unit/test_schemas.py | nanome-ai/nanome | 7777a782168fe4e68f58c42f01cff9e66f3675aa | [
"MIT"
] | 2 | 2020-02-04T02:56:21.000Z | 2020-04-25T20:05:16.000Z | import json
import os
import sys
import tempfile
import unittest
from nanome.api import structure, ui, shapes, streams
from nanome.util import Vector3, enums, Color
if sys.version_info.major >= 3:
from unittest.mock import MagicMock, patch
else:
# Python 2.7 way of getting magicmock. Requires pip install mock
from mock import MagicMock, patch
# Schemas requirements are optional, so don't run tests if they are not installed.
reqs_installed = True
try:
from nanome.api import schemas
except ModuleNotFoundError:
reqs_installed = False
test_assets = os.path.join(os.getcwd(), "testing/test_assets")
workspace_json = os.path.join(test_assets, "serialized_data/benzene_workspace.json")
pdb_file = os.path.join(test_assets, "pdb/1tyl.pdb")
conformer_pdb = os.path.join(test_assets, "pdb/thrombine_conformer.pdb")
smina_menu_json = os.path.join(test_assets, "test_menu_smina.json")
test_menu_json = os.path.join(test_assets, "test_menu.json")
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class StructureSchemaTestCase(unittest.TestCase):
def test_load_workspace(self):
with open(workspace_json, 'r') as f:
"""Deserialize a workspace from JSON."""
workspace_data = json.load(f)
workspace = schemas.WorkspaceSchema().load(workspace_data)
self.assertTrue(isinstance(workspace, structure.Workspace))
def test_dump_complex(self):
# Serialize a complex into JSON.
comp = structure.Complex.io.from_pdb(path=pdb_file)
self.assertTrue(isinstance(comp, structure.Complex))
comp_json = schemas.ComplexSchema().dump(comp)
self.assertTrue(isinstance(comp_json, dict))
def test_load_conformer_complex(self):
comp = structure.Complex.io.from_pdb(path=conformer_pdb)
mol = next(comp.molecules)
conformer_count = mol.conformer_count
self.assertEqual(conformer_count, 5)
comp_json = schemas.ComplexSchema().dump(comp)
loaded_comp = schemas.ComplexSchema().load(comp_json)
loaded_mol = next(loaded_comp.molecules)
self.assertEqual(loaded_mol.conformer_count, 5)
def test_structure_schema_dump(self):
"""Make sure StructureSchema can parse the correct structure type."""
with open(workspace_json, 'r') as f:
workspace_data = json.load(f)
workspace = schemas.WorkspaceSchema().load(workspace_data)
comp = workspace.complexes[0]
mol = next(comp.molecules)
chain = next(comp.chains)
residue = next(comp.residues)
atom = next(comp.atoms)
bond = next(comp.bonds)
schema = schemas.StructureSchema()
comp_data = schema.dump(comp)
self.assertTrue(isinstance(comp_data, dict))
reloaded_comp = schema.load(comp_data)
self.assertTrue(isinstance(reloaded_comp, structure.Complex))
mol_data = schema.dump(mol)
self.assertTrue(isinstance(mol_data, dict))
reloaded_mol = schema.load(mol_data)
self.assertTrue(isinstance(reloaded_mol, structure.Molecule))
chain_data = schema.dump(chain)
self.assertTrue(isinstance(chain_data, dict))
reloaded_chain = schema.load(chain_data)
self.assertTrue(isinstance(reloaded_chain, structure.Chain))
residue_data = schema.dump(residue)
self.assertTrue(isinstance(residue_data, dict))
reloaded_residue = schema.load(residue_data)
self.assertTrue(isinstance(reloaded_residue, structure.Residue))
bond_data = schema.dump(bond)
self.assertTrue(isinstance(bond_data, dict))
reloaded_bond = schema.load(bond_data)
self.assertTrue(isinstance(reloaded_bond, structure.Bond))
atom_data = schema.dump(atom)
self.assertTrue(isinstance(atom_data, dict))
reloaded_atom = schema.load(atom_data)
self.assertTrue(isinstance(reloaded_atom, structure.Atom))
def test_structure_schema_load(self):
"""Make sure StructureSchema can parse the correct structure type."""
with open(workspace_json, 'r') as f:
workspace_data = json.load(f)
struct_schema = schemas.StructureSchema()
comp_data = workspace_data['complexes'][0]
comp = struct_schema.load(comp_data)
self.assertTrue(isinstance(comp, structure.Complex))
mol_data = comp_data['molecules'][0]
mol = struct_schema.load(mol_data)
self.assertTrue(isinstance(mol, structure.Molecule))
chain_data = mol_data['chains'][0]
chain = struct_schema.load(chain_data)
self.assertTrue(isinstance(chain, structure.Chain))
residue_data = chain_data['residues'][0]
residue = struct_schema.load(residue_data)
self.assertTrue(isinstance(residue, structure.Residue))
bond_data = residue_data['bonds'][0]
bond = struct_schema.load(bond_data)
self.assertTrue(isinstance(bond, structure.Bond))
atom_data = residue_data['atoms'][0]
atom = struct_schema.load(atom_data)
self.assertTrue(isinstance(atom, structure.Atom))
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class UISchemaTestCase(unittest.TestCase):
def test_load_menu(self):
"""Ensure loading menu with serializers equivalent to Menu.io.from_json."""
reference_menu = ui.Menu.io.from_json(path=smina_menu_json)
with open(smina_menu_json, 'r') as f:
menu_dict = json.load(f)
menu = schemas.MenuSchema().load(menu_dict)
self.assertTrue(isinstance(menu, ui.Menu))
self.assertTrue(isinstance(menu.root, ui.LayoutNode))
# Make sure menu can be serialized back to json.
test_export = tempfile.NamedTemporaryFile(mode='w+', delete=False)
menu.io.to_json(test_export.name)
with open(test_export.name, 'r') as f:
exported_data = json.loads(f.read())
self.assertTrue(isinstance(exported_data, dict))
test_export.close()
# Make sure all content in reference menu was loaded
menu_content_types = [
content.__class__ for content in menu.get_all_content()]
self.assertTrue(menu_content_types)
reference_menu_content_types = [content.__class__ for content in reference_menu.get_all_content()]
self.assertEqual(menu_content_types, reference_menu_content_types)
# Check that Button values match the reference menu
reference_menu_btn = next(content for content in reference_menu.get_all_content() if isinstance(content, ui.Button))
menu_btn = next(content for content in menu.get_all_content() if isinstance(content, ui.Button))
# ButtonText fields
self.assertEqual(menu_btn.text.value.idle, reference_menu_btn.text.value.idle)
self.assertEqual(menu_btn.text.value.highlighted, reference_menu_btn.text.value.highlighted)
self.assertEqual(menu_btn.text.value.selected, reference_menu_btn.text.value.selected)
self.assertEqual(menu_btn.text.value.unusable, reference_menu_btn.text.value.unusable)
self.assertEqual(menu_btn.text.bold.idle, reference_menu_btn.text.bold.idle)
self.assertEqual(menu_btn.text.bold.highlighted, reference_menu_btn.text.bold.highlighted)
self.assertEqual(menu_btn.text.bold.selected, reference_menu_btn.text.bold.selected)
self.assertEqual(menu_btn.text.bold.unusable, reference_menu_btn.text.bold.unusable)
self.assertEqual(menu_btn.text.color.idle.hex, reference_menu_btn.text.color.idle.hex)
self.assertEqual(menu_btn.text.color.highlighted.hex, reference_menu_btn.text.color.highlighted.hex)
self.assertEqual(menu_btn.text.color.selected.hex, reference_menu_btn.text.color.selected.hex)
self.assertEqual(menu_btn.text.color.unusable.hex, reference_menu_btn.text.color.unusable.hex)
# Test outline values
self.assertEqual(menu_btn.outline.color.idle.hex, reference_menu_btn.outline.color.idle.hex)
self.assertEqual(menu_btn.outline.color.highlighted.hex, reference_menu_btn.outline.color.highlighted.hex)
self.assertEqual(menu_btn.outline.color.selected.hex, reference_menu_btn.outline.color.selected.hex)
self.assertEqual(menu_btn.outline.color.unusable.hex, reference_menu_btn.outline.color.unusable.hex)
self.assertEqual(menu_btn.outline.size.idle, reference_menu_btn.outline.size.idle)
self.assertEqual(menu_btn.outline.size.highlighted, reference_menu_btn.outline.size.highlighted)
self.assertEqual(menu_btn.outline.size.selected, reference_menu_btn.outline.size.selected)
self.assertEqual(menu_btn.outline.size.unusable, reference_menu_btn.outline.size.unusable)
# assert mesh colors
self.assertEqual(menu_btn.mesh.active, reference_menu_btn.mesh.active)
self.assertEqual(menu_btn.mesh.color.idle.hex, reference_menu_btn.mesh.color.idle.hex)
self.assertEqual(menu_btn.mesh.color.highlighted.hex, reference_menu_btn.mesh.color.highlighted.hex)
self.assertEqual(menu_btn.mesh.color.selected.hex, reference_menu_btn.mesh.color.selected.hex)
self.assertEqual(menu_btn.mesh.color.unusable.hex, reference_menu_btn.mesh.color.unusable.hex)
# tooltip
self.assertEqual(menu_btn.tooltip.title, reference_menu_btn.tooltip.title)
self.assertEqual(menu_btn.tooltip.content, reference_menu_btn.tooltip.content)
self.assertEqual(menu_btn.tooltip.bounds, reference_menu_btn.tooltip.bounds)
self.assertEqual(menu_btn.tooltip.positioning_target, reference_menu_btn.tooltip.positioning_target)
self.assertEqual(menu_btn.tooltip.positioning_origin, reference_menu_btn.tooltip.positioning_origin)
# icons
self.assertEqual(menu_btn.icon.active, reference_menu_btn.icon.active)
self.assertEqual(menu_btn.icon.color.idle.hex, reference_menu_btn.icon.color.idle.hex)
self.assertEqual(menu_btn.icon.color.highlighted.hex, reference_menu_btn.icon.color.highlighted.hex)
self.assertEqual(menu_btn.icon.color.selected.hex, reference_menu_btn.icon.color.selected.hex)
self.assertEqual(menu_btn.icon.color.unusable.hex, reference_menu_btn.icon.color.unusable.hex)
self.assertEqual(menu_btn.icon.value.idle, reference_menu_btn.icon.value.idle)
self.assertEqual(menu_btn.icon.value.highlighted, reference_menu_btn.icon.value.highlighted)
self.assertEqual(menu_btn.icon.value.selected, reference_menu_btn.icon.value.selected)
self.assertEqual(menu_btn.icon.value.unusable, reference_menu_btn.icon.value.unusable)
def test_button_dump(self):
"""Test that all the nested values of a Button are dumped correctly."""
menu = ui.Menu.io.from_json(test_menu_json)
menu_dump = schemas.MenuSchema().dump(menu)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
# Test outline data
self.assertEqual(round(btn_data['outline_size_idle'], 2), round(menu_btn.outline.size.idle, 2))
self.assertEqual(round(btn_data['outline_size_selected'], 2), round(menu_btn.outline.size.selected, 2))
self.assertEqual(round(btn_data['outline_size_highlighted'], 2), round(menu_btn.outline.size.highlighted, 2))
self.assertEqual(round(btn_data['outline_size_selected_highlighted'], 2), round(menu_btn.outline.size.selected_highlighted, 2))
self.assertEqual(round(btn_data['outline_size_unusable'], 2), round(menu_btn.outline.size.unusable, 2))
# ButtonText data
self.assertEqual(btn_data['text_value_idle'], menu_btn.text.value.idle)
self.assertEqual(btn_data['text_value_highlighted'], menu_btn.text.value.highlighted)
self.assertEqual(btn_data['text_value_selected'], menu_btn.text.value.selected)
self.assertEqual(btn_data['text_value_selected_highlighted'], menu_btn.text.value.selected_highlighted)
self.assertEqual(btn_data['text_value_unusable'], menu_btn.text.value.unusable)
self.assertEqual(btn_data['text_bold_idle'], menu_btn.text.bold.idle)
self.assertEqual(btn_data['text_bold_highlighted'], menu_btn.text.bold.highlighted)
self.assertEqual(btn_data['text_bold_selected'], menu_btn.text.bold.selected)
self.assertEqual(btn_data['text_bold_unusable'], menu_btn.text.bold.unusable)
self.assertEqual(btn_data['text_color_idle'], menu_btn.text.color.idle._color)
self.assertEqual(btn_data['text_color_highlighted'], menu_btn.text.color.highlighted._color)
self.assertEqual(btn_data['text_color_selected'], menu_btn.text.color.selected._color)
self.assertEqual(btn_data['text_color_unusable'], menu_btn.text.color.unusable._color)
self.assertEqual(btn_data['text_min_size'], menu_btn.text.min_size)
self.assertEqual(btn_data['text_max_size'], menu_btn.text.max_size)
self.assertEqual(btn_data['text_size'], menu_btn.text_size)
self.assertEqual(btn_data['text_underlined'], menu_btn.text_underlined)
self.assertEqual(btn_data['text_vertical_align'], menu_btn.text_vertical_align)
self.assertEqual(btn_data['text_horizontal_align'], menu_btn.text_horizontal_align)
self.assertEqual(btn_data['text_ellipsis'], menu_btn.text.ellipsis)
self.assertEqual(btn_data['text_padding_top'], menu_btn.text_padding_top)
self.assertEqual(btn_data['text_padding_bottom'], menu_btn.text_padding_bottom)
self.assertEqual(btn_data['text_padding_left'], menu_btn.text_padding_left)
self.assertEqual(btn_data['text_padding_right'], menu_btn.text_padding_right)
self.assertEqual(btn_data['text_line_spacing'], menu_btn.text.line_spacing)
# Icons
self.assertEqual(btn_data['icon_active'], menu_btn.icon.active)
self.assertEqual(btn_data['icon_color_idle'], menu_btn.icon.color.idle._color)
self.assertEqual(btn_data['icon_color_highlighted'], menu_btn.icon.color.highlighted._color)
self.assertEqual(btn_data['icon_color_selected'], menu_btn.icon.color.selected._color)
self.assertEqual(btn_data['icon_color_unusable'], menu_btn.icon.color.unusable._color)
self.assertEqual(btn_data['icon_value_idle'], menu_btn.icon.value.idle)
self.assertEqual(btn_data['icon_value_highlighted'], menu_btn.icon.value.highlighted)
self.assertEqual(btn_data['icon_value_selected'], menu_btn.icon.value.selected)
self.assertEqual(btn_data['icon_value_unusable'], menu_btn.icon.value.unusable)
# Meshes
self.assertEqual(btn_data['mesh_active'], menu_btn.mesh.active)
self.assertEqual(btn_data['mesh_enabled_idle'], menu_btn.mesh.enabled.idle)
self.assertEqual(btn_data['mesh_enabled_selected'], menu_btn.mesh.enabled.selected)
self.assertEqual(btn_data['mesh_enabled_highlighted'], menu_btn.mesh.enabled.highlighted)
self.assertEqual(btn_data['mesh_enabled_selected_highlighted'], menu_btn.mesh.enabled.selected_highlighted)
self.assertEqual(btn_data['mesh_enabled_unusable'], menu_btn.mesh.enabled.unusable)
self.assertEqual(btn_data['mesh_color_idle'], menu_btn.mesh.color.idle._color)
self.assertEqual(btn_data['mesh_color_selected'], menu_btn.mesh.color.selected._color)
self.assertEqual(btn_data['mesh_color_highlighted'], menu_btn.mesh.color.highlighted._color)
self.assertEqual(btn_data['mesh_color_selected_highlighted'], menu_btn.mesh.color.selected_highlighted._color)
self.assertEqual(btn_data['mesh_color_unusable'], menu_btn.mesh.color.unusable._color)
# Tooltips
self.assertEqual(btn_data['tooltip_title'], menu_btn.tooltip.title)
self.assertEqual(btn_data['tooltip_content'], menu_btn.tooltip.content)
self.assertEqual(round(btn_data['tooltip_bounds']['x'], 2), round(menu_btn.tooltip.bounds.x, 2))
self.assertEqual(round(btn_data['tooltip_bounds']['y'], 2), round(menu_btn.tooltip.bounds.y, 2))
self.assertEqual(round(btn_data['tooltip_bounds']['z'], 2), round(menu_btn.tooltip.bounds.z, 2))
self.assertEqual(btn_data['tooltip_positioning_target'], menu_btn.tooltip.positioning_target)
self.assertEqual(btn_data['tooltip_positioning_origin'], menu_btn.tooltip.positioning_origin)
def test_dump_menu_idempotent(self):
"""Ensure that dumping menu from serializers returns same input json."""
with open(smina_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_dump = schemas.MenuSchema().dump(menu)
second_menu = schemas.MenuSchema().load(menu_dump)
second_menu_dump = schemas.MenuSchema().dump(second_menu)
self.assertEqual(menu_dump, second_menu_dump)
def test_btn_switch_fields(self):
"""Test btn switch values that are not included in StackStudio exports."""
with open(test_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
menu_btn.switch.active = True
menu_btn.switch.on_color = Color.Red()
menu_btn.switch.off_color = Color.Blue()
menu_dump = schemas.MenuSchema().dump(menu)
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
self.assertEqual(btn_data.get('switch_active'), menu_btn.switch.active)
self.assertEqual(btn_data.get('switch_on_color'), menu_btn.switch.on_color._color)
self.assertEqual(btn_data.get('switch_off_color'), menu_btn.switch.off_color._color)
def test_btn_icon_value_fields(self):
"""Test icon values that are not included in StackStudio exports, but we actually want."""
with open(test_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
menu_btn.icon.value.set_all("/path/to/icon.png")
menu_dump = schemas.MenuSchema().dump(menu)
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
self.assertEqual(btn_data.get('icon_value_idle'), menu_btn.icon.value.idle)
self.assertEqual(btn_data.get('icon_value_selected'), menu_btn.icon.value.selected)
self.assertEqual(btn_data.get('icon_value_highlighted'), menu_btn.icon.value.highlighted)
self.assertEqual(btn_data.get('icon_value_selected_highlighted'), menu_btn.icon.value.selected_highlighted)
self.assertEqual(btn_data.get('icon_value_unusable'), menu_btn.icon.value.unusable)
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class ShapeSchemaTestCase(unittest.TestCase):
def test_dump_sphere(self):
radius = 5
color = Color.Blue()
sphere1_position = Vector3(25, 100, 50)
# Serialize sphere anchored to point in Workspace
sphere1 = shapes.Sphere()
sphere1.radius = radius
sphere1.color = color
anchor1 = sphere1.anchors[0]
anchor1.anchor_type == enums.ShapeAnchorType.Workspace
anchor1.local_offset = sphere1_position
schema = schemas.SphereSchema()
sphere1_dict = schema.dump(sphere1)
self.assertEqual(sphere1_dict['radius'], radius)
self.assertEqual(sphere1_dict['color'], list(color.rgba))
anchor_dict = sphere1_dict['anchors'][0]
anchor1.anchor_type == enums.ShapeAnchorType.Workspace
self.assertEqual(
anchor_dict['local_offset'],
list(sphere1_position.unpack()))
def test_dump_label(self):
# Lets add a label that's centered on the line.
label = shapes.Label()
label.text = 'Label'
anchor = label.anchors[0]
for anchor in label.anchors:
anchor.viewer_offset = Vector3(0, 0, -.1)
label_dict = schemas.LabelSchema().dump(label)
self.assertEqual(label_dict['text'], label.text)
def test_dump_mesh(self):
mesh = shapes.Mesh()
# Create a cube
mesh.vertices = [
0.0, 20.0, 20.0, 0.0, 0.0, 20.0, 20.0, 0.0, 20.0, 20.0, 20.0, 20.0,
0.0, 20.0, 0.0, 0.0, 0.0, 0.0, 20.0, 0.0, 0.0, 20.0, 20.0, 0.0]
mesh.normals = [
-0.408, 0.408, 0.817, -0.667, -0.667, 0.333, 0.408, -0.408, 0.817,
0.667, 0.667, 0.333, -0.667, 0.667, -0.333, -0.408, -0.408, -0.817,
0.667, -0.667, -0.333, 0.408, 0.408, -0.817]
mesh.triangles = [
0, 1, 2, 0, 2, 3, 7, 6, 5, 7, 5, 4, 3, 2, 6, 3, 6, 7, 4, 0, 3, 4, 3, 7, 4, 5, 1,
4, 1, 0, 1, 5, 6, 1, 6, 2]
mesh.anchors[0].anchor_type = enums.ShapeAnchorType.Workspace
mesh.anchors[0].position = Vector3(0, 0, 0)
mesh.color = Color(255, 255, 255, 255)
mesh.colors = [
1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0,
0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0,
0.0, 0.0, 1.0, 1.0]
mesh_dict = schemas.MeshSchema().dump(mesh)
self.assertEqual(mesh_dict['vertices'], mesh.vertices)
self.assertEqual(mesh_dict['normals'], mesh.normals)
self.assertEqual(mesh_dict['triangles'], mesh.triangles)
self.assertEqual(mesh_dict['colors'], mesh.colors)
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class StreamSchemaTestCase(unittest.TestCase):
def test_stream_dump(self):
network = MagicMock()
stream_id = 5
data_type = enums.StreamDataType.string
direction = enums.StreamDirection.writing
stream = streams.Stream(network, stream_id, data_type, direction)
stream_dump = schemas.StreamSchema().dump(stream)
self.assertEqual(stream_dump['id'], stream_id)
self.assertEqual(stream_dump['data_type'], data_type.value)
self.assertEqual(stream_dump['direction'], direction.value)
| 56.040201 | 135 | 0.708348 | import json
import os
import sys
import tempfile
import unittest
from nanome.api import structure, ui, shapes, streams
from nanome.util import Vector3, enums, Color
if sys.version_info.major >= 3:
from unittest.mock import MagicMock, patch
else:
from mock import MagicMock, patch
reqs_installed = True
try:
from nanome.api import schemas
except ModuleNotFoundError:
reqs_installed = False
test_assets = os.path.join(os.getcwd(), "testing/test_assets")
workspace_json = os.path.join(test_assets, "serialized_data/benzene_workspace.json")
pdb_file = os.path.join(test_assets, "pdb/1tyl.pdb")
conformer_pdb = os.path.join(test_assets, "pdb/thrombine_conformer.pdb")
smina_menu_json = os.path.join(test_assets, "test_menu_smina.json")
test_menu_json = os.path.join(test_assets, "test_menu.json")
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class StructureSchemaTestCase(unittest.TestCase):
def test_load_workspace(self):
with open(workspace_json, 'r') as f:
workspace_data = json.load(f)
workspace = schemas.WorkspaceSchema().load(workspace_data)
self.assertTrue(isinstance(workspace, structure.Workspace))
def test_dump_complex(self):
# Serialize a complex into JSON.
comp = structure.Complex.io.from_pdb(path=pdb_file)
self.assertTrue(isinstance(comp, structure.Complex))
comp_json = schemas.ComplexSchema().dump(comp)
self.assertTrue(isinstance(comp_json, dict))
def test_load_conformer_complex(self):
comp = structure.Complex.io.from_pdb(path=conformer_pdb)
mol = next(comp.molecules)
conformer_count = mol.conformer_count
self.assertEqual(conformer_count, 5)
comp_json = schemas.ComplexSchema().dump(comp)
loaded_comp = schemas.ComplexSchema().load(comp_json)
loaded_mol = next(loaded_comp.molecules)
self.assertEqual(loaded_mol.conformer_count, 5)
def test_structure_schema_dump(self):
with open(workspace_json, 'r') as f:
workspace_data = json.load(f)
workspace = schemas.WorkspaceSchema().load(workspace_data)
comp = workspace.complexes[0]
mol = next(comp.molecules)
chain = next(comp.chains)
residue = next(comp.residues)
atom = next(comp.atoms)
bond = next(comp.bonds)
schema = schemas.StructureSchema()
comp_data = schema.dump(comp)
self.assertTrue(isinstance(comp_data, dict))
reloaded_comp = schema.load(comp_data)
self.assertTrue(isinstance(reloaded_comp, structure.Complex))
mol_data = schema.dump(mol)
self.assertTrue(isinstance(mol_data, dict))
reloaded_mol = schema.load(mol_data)
self.assertTrue(isinstance(reloaded_mol, structure.Molecule))
chain_data = schema.dump(chain)
self.assertTrue(isinstance(chain_data, dict))
reloaded_chain = schema.load(chain_data)
self.assertTrue(isinstance(reloaded_chain, structure.Chain))
residue_data = schema.dump(residue)
self.assertTrue(isinstance(residue_data, dict))
reloaded_residue = schema.load(residue_data)
self.assertTrue(isinstance(reloaded_residue, structure.Residue))
bond_data = schema.dump(bond)
self.assertTrue(isinstance(bond_data, dict))
reloaded_bond = schema.load(bond_data)
self.assertTrue(isinstance(reloaded_bond, structure.Bond))
atom_data = schema.dump(atom)
self.assertTrue(isinstance(atom_data, dict))
reloaded_atom = schema.load(atom_data)
self.assertTrue(isinstance(reloaded_atom, structure.Atom))
def test_structure_schema_load(self):
with open(workspace_json, 'r') as f:
workspace_data = json.load(f)
struct_schema = schemas.StructureSchema()
comp_data = workspace_data['complexes'][0]
comp = struct_schema.load(comp_data)
self.assertTrue(isinstance(comp, structure.Complex))
mol_data = comp_data['molecules'][0]
mol = struct_schema.load(mol_data)
self.assertTrue(isinstance(mol, structure.Molecule))
chain_data = mol_data['chains'][0]
chain = struct_schema.load(chain_data)
self.assertTrue(isinstance(chain, structure.Chain))
residue_data = chain_data['residues'][0]
residue = struct_schema.load(residue_data)
self.assertTrue(isinstance(residue, structure.Residue))
bond_data = residue_data['bonds'][0]
bond = struct_schema.load(bond_data)
self.assertTrue(isinstance(bond, structure.Bond))
atom_data = residue_data['atoms'][0]
atom = struct_schema.load(atom_data)
self.assertTrue(isinstance(atom, structure.Atom))
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class UISchemaTestCase(unittest.TestCase):
def test_load_menu(self):
reference_menu = ui.Menu.io.from_json(path=smina_menu_json)
with open(smina_menu_json, 'r') as f:
menu_dict = json.load(f)
menu = schemas.MenuSchema().load(menu_dict)
self.assertTrue(isinstance(menu, ui.Menu))
self.assertTrue(isinstance(menu.root, ui.LayoutNode))
# Make sure menu can be serialized back to json.
test_export = tempfile.NamedTemporaryFile(mode='w+', delete=False)
menu.io.to_json(test_export.name)
with open(test_export.name, 'r') as f:
exported_data = json.loads(f.read())
self.assertTrue(isinstance(exported_data, dict))
test_export.close()
# Make sure all content in reference menu was loaded
menu_content_types = [
content.__class__ for content in menu.get_all_content()]
self.assertTrue(menu_content_types)
reference_menu_content_types = [content.__class__ for content in reference_menu.get_all_content()]
self.assertEqual(menu_content_types, reference_menu_content_types)
# Check that Button values match the reference menu
reference_menu_btn = next(content for content in reference_menu.get_all_content() if isinstance(content, ui.Button))
menu_btn = next(content for content in menu.get_all_content() if isinstance(content, ui.Button))
# ButtonText fields
self.assertEqual(menu_btn.text.value.idle, reference_menu_btn.text.value.idle)
self.assertEqual(menu_btn.text.value.highlighted, reference_menu_btn.text.value.highlighted)
self.assertEqual(menu_btn.text.value.selected, reference_menu_btn.text.value.selected)
self.assertEqual(menu_btn.text.value.unusable, reference_menu_btn.text.value.unusable)
self.assertEqual(menu_btn.text.bold.idle, reference_menu_btn.text.bold.idle)
self.assertEqual(menu_btn.text.bold.highlighted, reference_menu_btn.text.bold.highlighted)
self.assertEqual(menu_btn.text.bold.selected, reference_menu_btn.text.bold.selected)
self.assertEqual(menu_btn.text.bold.unusable, reference_menu_btn.text.bold.unusable)
self.assertEqual(menu_btn.text.color.idle.hex, reference_menu_btn.text.color.idle.hex)
self.assertEqual(menu_btn.text.color.highlighted.hex, reference_menu_btn.text.color.highlighted.hex)
self.assertEqual(menu_btn.text.color.selected.hex, reference_menu_btn.text.color.selected.hex)
self.assertEqual(menu_btn.text.color.unusable.hex, reference_menu_btn.text.color.unusable.hex)
# Test outline values
self.assertEqual(menu_btn.outline.color.idle.hex, reference_menu_btn.outline.color.idle.hex)
self.assertEqual(menu_btn.outline.color.highlighted.hex, reference_menu_btn.outline.color.highlighted.hex)
self.assertEqual(menu_btn.outline.color.selected.hex, reference_menu_btn.outline.color.selected.hex)
self.assertEqual(menu_btn.outline.color.unusable.hex, reference_menu_btn.outline.color.unusable.hex)
self.assertEqual(menu_btn.outline.size.idle, reference_menu_btn.outline.size.idle)
self.assertEqual(menu_btn.outline.size.highlighted, reference_menu_btn.outline.size.highlighted)
self.assertEqual(menu_btn.outline.size.selected, reference_menu_btn.outline.size.selected)
self.assertEqual(menu_btn.outline.size.unusable, reference_menu_btn.outline.size.unusable)
# assert mesh colors
self.assertEqual(menu_btn.mesh.active, reference_menu_btn.mesh.active)
self.assertEqual(menu_btn.mesh.color.idle.hex, reference_menu_btn.mesh.color.idle.hex)
self.assertEqual(menu_btn.mesh.color.highlighted.hex, reference_menu_btn.mesh.color.highlighted.hex)
self.assertEqual(menu_btn.mesh.color.selected.hex, reference_menu_btn.mesh.color.selected.hex)
self.assertEqual(menu_btn.mesh.color.unusable.hex, reference_menu_btn.mesh.color.unusable.hex)
# tooltip
self.assertEqual(menu_btn.tooltip.title, reference_menu_btn.tooltip.title)
self.assertEqual(menu_btn.tooltip.content, reference_menu_btn.tooltip.content)
self.assertEqual(menu_btn.tooltip.bounds, reference_menu_btn.tooltip.bounds)
self.assertEqual(menu_btn.tooltip.positioning_target, reference_menu_btn.tooltip.positioning_target)
self.assertEqual(menu_btn.tooltip.positioning_origin, reference_menu_btn.tooltip.positioning_origin)
# icons
self.assertEqual(menu_btn.icon.active, reference_menu_btn.icon.active)
self.assertEqual(menu_btn.icon.color.idle.hex, reference_menu_btn.icon.color.idle.hex)
self.assertEqual(menu_btn.icon.color.highlighted.hex, reference_menu_btn.icon.color.highlighted.hex)
self.assertEqual(menu_btn.icon.color.selected.hex, reference_menu_btn.icon.color.selected.hex)
self.assertEqual(menu_btn.icon.color.unusable.hex, reference_menu_btn.icon.color.unusable.hex)
self.assertEqual(menu_btn.icon.value.idle, reference_menu_btn.icon.value.idle)
self.assertEqual(menu_btn.icon.value.highlighted, reference_menu_btn.icon.value.highlighted)
self.assertEqual(menu_btn.icon.value.selected, reference_menu_btn.icon.value.selected)
self.assertEqual(menu_btn.icon.value.unusable, reference_menu_btn.icon.value.unusable)
def test_button_dump(self):
menu = ui.Menu.io.from_json(test_menu_json)
menu_dump = schemas.MenuSchema().dump(menu)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
# Test outline data
self.assertEqual(round(btn_data['outline_size_idle'], 2), round(menu_btn.outline.size.idle, 2))
self.assertEqual(round(btn_data['outline_size_selected'], 2), round(menu_btn.outline.size.selected, 2))
self.assertEqual(round(btn_data['outline_size_highlighted'], 2), round(menu_btn.outline.size.highlighted, 2))
self.assertEqual(round(btn_data['outline_size_selected_highlighted'], 2), round(menu_btn.outline.size.selected_highlighted, 2))
self.assertEqual(round(btn_data['outline_size_unusable'], 2), round(menu_btn.outline.size.unusable, 2))
# ButtonText data
self.assertEqual(btn_data['text_value_idle'], menu_btn.text.value.idle)
self.assertEqual(btn_data['text_value_highlighted'], menu_btn.text.value.highlighted)
self.assertEqual(btn_data['text_value_selected'], menu_btn.text.value.selected)
self.assertEqual(btn_data['text_value_selected_highlighted'], menu_btn.text.value.selected_highlighted)
self.assertEqual(btn_data['text_value_unusable'], menu_btn.text.value.unusable)
self.assertEqual(btn_data['text_bold_idle'], menu_btn.text.bold.idle)
self.assertEqual(btn_data['text_bold_highlighted'], menu_btn.text.bold.highlighted)
self.assertEqual(btn_data['text_bold_selected'], menu_btn.text.bold.selected)
self.assertEqual(btn_data['text_bold_unusable'], menu_btn.text.bold.unusable)
self.assertEqual(btn_data['text_color_idle'], menu_btn.text.color.idle._color)
self.assertEqual(btn_data['text_color_highlighted'], menu_btn.text.color.highlighted._color)
self.assertEqual(btn_data['text_color_selected'], menu_btn.text.color.selected._color)
self.assertEqual(btn_data['text_color_unusable'], menu_btn.text.color.unusable._color)
self.assertEqual(btn_data['text_min_size'], menu_btn.text.min_size)
self.assertEqual(btn_data['text_max_size'], menu_btn.text.max_size)
self.assertEqual(btn_data['text_size'], menu_btn.text_size)
self.assertEqual(btn_data['text_underlined'], menu_btn.text_underlined)
self.assertEqual(btn_data['text_vertical_align'], menu_btn.text_vertical_align)
self.assertEqual(btn_data['text_horizontal_align'], menu_btn.text_horizontal_align)
self.assertEqual(btn_data['text_ellipsis'], menu_btn.text.ellipsis)
self.assertEqual(btn_data['text_padding_top'], menu_btn.text_padding_top)
self.assertEqual(btn_data['text_padding_bottom'], menu_btn.text_padding_bottom)
self.assertEqual(btn_data['text_padding_left'], menu_btn.text_padding_left)
self.assertEqual(btn_data['text_padding_right'], menu_btn.text_padding_right)
self.assertEqual(btn_data['text_line_spacing'], menu_btn.text.line_spacing)
# Icons
self.assertEqual(btn_data['icon_active'], menu_btn.icon.active)
self.assertEqual(btn_data['icon_color_idle'], menu_btn.icon.color.idle._color)
self.assertEqual(btn_data['icon_color_highlighted'], menu_btn.icon.color.highlighted._color)
self.assertEqual(btn_data['icon_color_selected'], menu_btn.icon.color.selected._color)
self.assertEqual(btn_data['icon_color_unusable'], menu_btn.icon.color.unusable._color)
self.assertEqual(btn_data['icon_value_idle'], menu_btn.icon.value.idle)
self.assertEqual(btn_data['icon_value_highlighted'], menu_btn.icon.value.highlighted)
self.assertEqual(btn_data['icon_value_selected'], menu_btn.icon.value.selected)
self.assertEqual(btn_data['icon_value_unusable'], menu_btn.icon.value.unusable)
# Meshes
self.assertEqual(btn_data['mesh_active'], menu_btn.mesh.active)
self.assertEqual(btn_data['mesh_enabled_idle'], menu_btn.mesh.enabled.idle)
self.assertEqual(btn_data['mesh_enabled_selected'], menu_btn.mesh.enabled.selected)
self.assertEqual(btn_data['mesh_enabled_highlighted'], menu_btn.mesh.enabled.highlighted)
self.assertEqual(btn_data['mesh_enabled_selected_highlighted'], menu_btn.mesh.enabled.selected_highlighted)
self.assertEqual(btn_data['mesh_enabled_unusable'], menu_btn.mesh.enabled.unusable)
self.assertEqual(btn_data['mesh_color_idle'], menu_btn.mesh.color.idle._color)
self.assertEqual(btn_data['mesh_color_selected'], menu_btn.mesh.color.selected._color)
self.assertEqual(btn_data['mesh_color_highlighted'], menu_btn.mesh.color.highlighted._color)
self.assertEqual(btn_data['mesh_color_selected_highlighted'], menu_btn.mesh.color.selected_highlighted._color)
self.assertEqual(btn_data['mesh_color_unusable'], menu_btn.mesh.color.unusable._color)
# Tooltips
self.assertEqual(btn_data['tooltip_title'], menu_btn.tooltip.title)
self.assertEqual(btn_data['tooltip_content'], menu_btn.tooltip.content)
self.assertEqual(round(btn_data['tooltip_bounds']['x'], 2), round(menu_btn.tooltip.bounds.x, 2))
self.assertEqual(round(btn_data['tooltip_bounds']['y'], 2), round(menu_btn.tooltip.bounds.y, 2))
self.assertEqual(round(btn_data['tooltip_bounds']['z'], 2), round(menu_btn.tooltip.bounds.z, 2))
self.assertEqual(btn_data['tooltip_positioning_target'], menu_btn.tooltip.positioning_target)
self.assertEqual(btn_data['tooltip_positioning_origin'], menu_btn.tooltip.positioning_origin)
def test_dump_menu_idempotent(self):
with open(smina_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_dump = schemas.MenuSchema().dump(menu)
second_menu = schemas.MenuSchema().load(menu_dump)
second_menu_dump = schemas.MenuSchema().dump(second_menu)
self.assertEqual(menu_dump, second_menu_dump)
def test_btn_switch_fields(self):
with open(test_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
menu_btn.switch.active = True
menu_btn.switch.on_color = Color.Red()
menu_btn.switch.off_color = Color.Blue()
menu_dump = schemas.MenuSchema().dump(menu)
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
self.assertEqual(btn_data.get('switch_active'), menu_btn.switch.active)
self.assertEqual(btn_data.get('switch_on_color'), menu_btn.switch.on_color._color)
self.assertEqual(btn_data.get('switch_off_color'), menu_btn.switch.off_color._color)
def test_btn_icon_value_fields(self):
with open(test_menu_json, 'r') as f:
input_dict = json.load(f)
menu = schemas.MenuSchema().load(input_dict)
menu_btn = next(
content for content in menu.get_all_content()
if isinstance(content, ui.Button))
menu_btn.icon.value.set_all("/path/to/icon.png")
menu_dump = schemas.MenuSchema().dump(menu)
btn_data = menu_dump['effective_root']['children'][0]['children'][0]['content']
self.assertEqual(btn_data.get('icon_value_idle'), menu_btn.icon.value.idle)
self.assertEqual(btn_data.get('icon_value_selected'), menu_btn.icon.value.selected)
self.assertEqual(btn_data.get('icon_value_highlighted'), menu_btn.icon.value.highlighted)
self.assertEqual(btn_data.get('icon_value_selected_highlighted'), menu_btn.icon.value.selected_highlighted)
self.assertEqual(btn_data.get('icon_value_unusable'), menu_btn.icon.value.unusable)
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class ShapeSchemaTestCase(unittest.TestCase):
def test_dump_sphere(self):
radius = 5
color = Color.Blue()
sphere1_position = Vector3(25, 100, 50)
# Serialize sphere anchored to point in Workspace
sphere1 = shapes.Sphere()
sphere1.radius = radius
sphere1.color = color
anchor1 = sphere1.anchors[0]
anchor1.anchor_type == enums.ShapeAnchorType.Workspace
anchor1.local_offset = sphere1_position
schema = schemas.SphereSchema()
sphere1_dict = schema.dump(sphere1)
self.assertEqual(sphere1_dict['radius'], radius)
self.assertEqual(sphere1_dict['color'], list(color.rgba))
anchor_dict = sphere1_dict['anchors'][0]
anchor1.anchor_type == enums.ShapeAnchorType.Workspace
self.assertEqual(
anchor_dict['local_offset'],
list(sphere1_position.unpack()))
def test_dump_label(self):
# Lets add a label that's centered on the line.
label = shapes.Label()
label.text = 'Label'
anchor = label.anchors[0]
for anchor in label.anchors:
anchor.viewer_offset = Vector3(0, 0, -.1)
label_dict = schemas.LabelSchema().dump(label)
self.assertEqual(label_dict['text'], label.text)
def test_dump_mesh(self):
mesh = shapes.Mesh()
mesh.vertices = [
0.0, 20.0, 20.0, 0.0, 0.0, 20.0, 20.0, 0.0, 20.0, 20.0, 20.0, 20.0,
0.0, 20.0, 0.0, 0.0, 0.0, 0.0, 20.0, 0.0, 0.0, 20.0, 20.0, 0.0]
mesh.normals = [
-0.408, 0.408, 0.817, -0.667, -0.667, 0.333, 0.408, -0.408, 0.817,
0.667, 0.667, 0.333, -0.667, 0.667, -0.333, -0.408, -0.408, -0.817,
0.667, -0.667, -0.333, 0.408, 0.408, -0.817]
mesh.triangles = [
0, 1, 2, 0, 2, 3, 7, 6, 5, 7, 5, 4, 3, 2, 6, 3, 6, 7, 4, 0, 3, 4, 3, 7, 4, 5, 1,
4, 1, 0, 1, 5, 6, 1, 6, 2]
mesh.anchors[0].anchor_type = enums.ShapeAnchorType.Workspace
mesh.anchors[0].position = Vector3(0, 0, 0)
mesh.color = Color(255, 255, 255, 255)
mesh.colors = [
1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0,
0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0,
0.0, 0.0, 1.0, 1.0]
mesh_dict = schemas.MeshSchema().dump(mesh)
self.assertEqual(mesh_dict['vertices'], mesh.vertices)
self.assertEqual(mesh_dict['normals'], mesh.normals)
self.assertEqual(mesh_dict['triangles'], mesh.triangles)
self.assertEqual(mesh_dict['colors'], mesh.colors)
@unittest.skipIf(not reqs_installed, "Marshmallow not installed")
class StreamSchemaTestCase(unittest.TestCase):
def test_stream_dump(self):
network = MagicMock()
stream_id = 5
data_type = enums.StreamDataType.string
direction = enums.StreamDirection.writing
stream = streams.Stream(network, stream_id, data_type, direction)
stream_dump = schemas.StreamSchema().dump(stream)
self.assertEqual(stream_dump['id'], stream_id)
self.assertEqual(stream_dump['data_type'], data_type.value)
self.assertEqual(stream_dump['direction'], direction.value)
| true | true |
1c3d89980930181dc95e1131b83e0f6c7cf686cc | 10,069 | py | Python | docs/source/conf.py | abdulelahsm/ignite | b932b13d0f1afd118e1dd806a9f6ba9015d9e16a | [
"BSD-3-Clause"
] | null | null | null | docs/source/conf.py | abdulelahsm/ignite | b932b13d0f1afd118e1dd806a9f6ba9015d9e16a | [
"BSD-3-Clause"
] | null | null | null | docs/source/conf.py | abdulelahsm/ignite | b932b13d0f1afd118e1dd806a9f6ba9015d9e16a | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath("../.."))
import ignite
import pytorch_sphinx_theme
# -- Project information -----------------------------------------------------
project = "ignite"
copyright = "2020, PyTorch-Ignite Contributors"
author = "PyTorch-Ignite Contributors"
# The short X.Y version
try:
version = os.environ["code_version"]
if "master" in version:
version = "master (" + ignite.__version__ + ")"
else:
version = version.replace("v", "")
except KeyError:
version = ignite.__version__
# The full version, including alpha/beta/rc tags
release = "master"
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "pytorch_sphinx_theme"
html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()]
html_theme_options = {
"canonical_url": "https://pytorch.org/ignite/index.html",
"collapse_navigation": False,
"display_version": True,
"logo_only": True,
}
html_logo = "_static/img/ignite_logo.svg"
html_favicon = "_templates/_static/img/ignite_logomark.svg"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static", "_templates/_static"]
html_context = {
"css_files": [
# 'https://fonts.googleapis.com/css?family=Lato',
# '_static/css/pytorch_theme.css'
"_static/css/ignite_theme.css"
],
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "ignitedoc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "ignite.tex", "ignite Documentation", "Torch Contributors", "manual"),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "ignite", "ignite Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"ignite",
"ignite Documentation",
author,
"ignite",
"One line description of project.",
"Miscellaneous",
),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Type hints configs ------------------------------------------------------
autodoc_typehints = "signature"
# -- A patch that turns-off cross refs for type annotations ------------------
import sphinx.domains.python
from docutils import nodes
from sphinx import addnodes
# replaces pending_xref node with desc_type for type annotations
sphinx.domains.python.type_to_xref = lambda t, e=None: addnodes.desc_type("", nodes.Text(t))
# -- Autosummary patch to get list of a classes, funcs automatically ----------
from importlib import import_module
from inspect import getmembers, isclass, isfunction
import sphinx.ext.autosummary
from sphinx.ext.autosummary import Autosummary
from docutils.parsers.rst import directives
from docutils.statemachine import StringList
class BetterAutosummary(Autosummary):
"""Autosummary with autolisting for modules.
By default it tries to import all public names (__all__),
otherwise import all classes and/or functions in a module.
Options:
- :autolist: option to get list of classes and functions from currentmodule.
- :autolist-classes: option to get list of classes from currentmodule.
- :autolist-functions: option to get list of functions from currentmodule.
Example Usage:
.. currentmodule:: ignite.metrics
.. autosummary::
:nosignatures:
:autolist:
"""
# Add new option
_option_spec = Autosummary.option_spec.copy()
_option_spec.update(
{
"autolist": directives.unchanged,
"autolist-classes": directives.unchanged,
"autolist-functions": directives.unchanged,
}
)
option_spec = _option_spec
def run(self):
for auto in ("autolist", "autolist-classes", "autolist-functions"):
if auto in self.options:
# Get current module name
module_name = self.env.ref_context.get("py:module")
# Import module
module = import_module(module_name)
# Get public names (if possible)
try:
names = getattr(module, "__all__")
except AttributeError:
# Get classes defined in the module
cls_names = [
name[0]
for name in getmembers(module, isclass)
if name[-1].__module__ == module_name and not (name[0].startswith("_"))
]
# Get functions defined in the module
fn_names = [
name[0]
for name in getmembers(module, isfunction)
if (name[-1].__module__ == module_name) and not (name[0].startswith("_"))
]
names = cls_names + fn_names
# It may happen that module doesn't have any defined class or func
if not names:
names = [name[0] for name in getmembers(module)]
# Filter out members w/o doc strings
names = [name for name in names if getattr(module, name).__doc__ is not None]
if auto == "autolist":
# Get list of all classes and functions inside module
names = [
name for name in names if (isclass(getattr(module, name)) or isfunction(getattr(module, name)))
]
else:
if auto == "autolist-classes":
# Get only classes
check = isclass
elif auto == "autolist-functions":
# Get only functions
check = isfunction
else:
raise NotImplementedError
names = [name for name in names if check(getattr(module, name))]
# Update content
self.content = StringList(names)
return super().run()
# Patch original Autosummary
sphinx.ext.autosummary.Autosummary = BetterAutosummary
| 32.691558 | 119 | 0.608601 |
import os
import sys
sys.path.insert(0, os.path.abspath("../.."))
import ignite
import pytorch_sphinx_theme
project = "ignite"
copyright = "2020, PyTorch-Ignite Contributors"
author = "PyTorch-Ignite Contributors"
try:
version = os.environ["code_version"]
if "master" in version:
version = "master (" + ignite.__version__ + ")"
else:
version = version.replace("v", "")
except KeyError:
version = ignite.__version__
release = "master"
extensions = [
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
]
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = []
pygments_style = "sphinx"
html_theme = "pytorch_sphinx_theme"
html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()]
html_theme_options = {
"canonical_url": "https://pytorch.org/ignite/index.html",
"collapse_navigation": False,
"display_version": True,
"logo_only": True,
}
html_logo = "_static/img/ignite_logo.svg"
html_favicon = "_templates/_static/img/ignite_logomark.svg"
html_static_path = ["_static", "_templates/_static"]
html_context = {
"css_files": [
"_static/css/ignite_theme.css"
],
}
htmlhelp_basename = "ignitedoc"
latex_elements = {
}
latex_documents = [
(master_doc, "ignite.tex", "ignite Documentation", "Torch Contributors", "manual"),
]
man_pages = [(master_doc, "ignite", "ignite Documentation", [author], 1)]
texinfo_documents = [
(
master_doc,
"ignite",
"ignite Documentation",
author,
"ignite",
"One line description of project.",
"Miscellaneous",
),
]
intersphinx_mapping = {"https://docs.python.org/": None}
todo_include_todos = True
autodoc_typehints = "signature"
import sphinx.domains.python
from docutils import nodes
from sphinx import addnodes
sphinx.domains.python.type_to_xref = lambda t, e=None: addnodes.desc_type("", nodes.Text(t))
from importlib import import_module
from inspect import getmembers, isclass, isfunction
import sphinx.ext.autosummary
from sphinx.ext.autosummary import Autosummary
from docutils.parsers.rst import directives
from docutils.statemachine import StringList
class BetterAutosummary(Autosummary):
_option_spec = Autosummary.option_spec.copy()
_option_spec.update(
{
"autolist": directives.unchanged,
"autolist-classes": directives.unchanged,
"autolist-functions": directives.unchanged,
}
)
option_spec = _option_spec
def run(self):
for auto in ("autolist", "autolist-classes", "autolist-functions"):
if auto in self.options:
module_name = self.env.ref_context.get("py:module")
module = import_module(module_name)
try:
names = getattr(module, "__all__")
except AttributeError:
cls_names = [
name[0]
for name in getmembers(module, isclass)
if name[-1].__module__ == module_name and not (name[0].startswith("_"))
]
fn_names = [
name[0]
for name in getmembers(module, isfunction)
if (name[-1].__module__ == module_name) and not (name[0].startswith("_"))
]
names = cls_names + fn_names
if not names:
names = [name[0] for name in getmembers(module)]
# Filter out members w/o doc strings
names = [name for name in names if getattr(module, name).__doc__ is not None]
if auto == "autolist":
# Get list of all classes and functions inside module
names = [
name for name in names if (isclass(getattr(module, name)) or isfunction(getattr(module, name)))
]
else:
if auto == "autolist-classes":
# Get only classes
check = isclass
elif auto == "autolist-functions":
# Get only functions
check = isfunction
else:
raise NotImplementedError
names = [name for name in names if check(getattr(module, name))]
# Update content
self.content = StringList(names)
return super().run()
# Patch original Autosummary
sphinx.ext.autosummary.Autosummary = BetterAutosummary
| true | true |
1c3d8b1891a29ee98f60a1880ff9f53cdca70c15 | 1,614 | py | Python | api/src/opentrons/hardware_control/__init__.py | anuwrag/opentrons | 28c8d76a19e367c6bd38f5290faaa32abf378715 | [
"Apache-2.0"
] | 2 | 2015-11-10T17:49:51.000Z | 2016-01-15T04:43:37.000Z | api/src/opentrons/hardware_control/__init__.py | anuwrag/opentrons | 28c8d76a19e367c6bd38f5290faaa32abf378715 | [
"Apache-2.0"
] | null | null | null | api/src/opentrons/hardware_control/__init__.py | anuwrag/opentrons | 28c8d76a19e367c6bd38f5290faaa32abf378715 | [
"Apache-2.0"
] | null | null | null | """
hardware_control: The sole authority for controlling the hardware of an OT2.
The hardware_control module presents a unified api for the lowest level of
hardware command that takes into account the robot as a whole. For instance,
it presents an API for moving a specific pipette mount (not a specific motor
or axis) to a deck-absolute point (not a Smoothie-coordinate point).
This module is not for use outside the opentrons api module. Higher-level
functions are available elsewhere.
"""
from .adapters import SynchronousAdapter
from .api import API
from .pause_manager import PauseManager
from .backends import Controller, Simulator
from .pipette import Pipette
from .types import (
CriticalPoint,
NoTipAttachedError,
TipAttachedError,
ExecutionState,
ExecutionCancelledError,
)
from .constants import DROP_TIP_RELEASE_DISTANCE
from .thread_manager import ThreadManager
from .execution_manager import ExecutionManager
from .threaded_async_lock import ThreadedAsyncLock, ThreadedAsyncForbidden
from .protocols import HardwareControlAPI
ThreadManagedHardware = ThreadManager[HardwareControlAPI]
SyncHardwareAPI = SynchronousAdapter[HardwareControlAPI]
__all__ = [
"API",
"Controller",
"Simulator",
"Pipette",
"PauseManager",
"SynchronousAdapter",
"HardwareControlAPI",
"CriticalPoint",
"NoTipAttachedError",
"TipAttachedError",
"DROP_TIP_RELEASE_DISTANCE",
"ThreadManager",
"ExecutionManager",
"ExecutionState",
"ExecutionCancelledError",
"ThreadedAsyncLock",
"ThreadedAsyncForbidden",
"ThreadManagedHardware",
]
| 29.888889 | 76 | 0.777571 |
from .adapters import SynchronousAdapter
from .api import API
from .pause_manager import PauseManager
from .backends import Controller, Simulator
from .pipette import Pipette
from .types import (
CriticalPoint,
NoTipAttachedError,
TipAttachedError,
ExecutionState,
ExecutionCancelledError,
)
from .constants import DROP_TIP_RELEASE_DISTANCE
from .thread_manager import ThreadManager
from .execution_manager import ExecutionManager
from .threaded_async_lock import ThreadedAsyncLock, ThreadedAsyncForbidden
from .protocols import HardwareControlAPI
ThreadManagedHardware = ThreadManager[HardwareControlAPI]
SyncHardwareAPI = SynchronousAdapter[HardwareControlAPI]
__all__ = [
"API",
"Controller",
"Simulator",
"Pipette",
"PauseManager",
"SynchronousAdapter",
"HardwareControlAPI",
"CriticalPoint",
"NoTipAttachedError",
"TipAttachedError",
"DROP_TIP_RELEASE_DISTANCE",
"ThreadManager",
"ExecutionManager",
"ExecutionState",
"ExecutionCancelledError",
"ThreadedAsyncLock",
"ThreadedAsyncForbidden",
"ThreadManagedHardware",
]
| true | true |
1c3d8b56a76803c8c51cc4377df46032420e58b6 | 191 | py | Python | applied_python/applied_python/lib/python2.7/site-packages/pylint/test/input/func_noerror_exception.py | mith1979/ansible_automation | 013dfa67c6d91720b787fadb21de574b6e023a26 | [
"Apache-2.0"
] | 35 | 2016-09-22T22:53:14.000Z | 2020-02-13T15:12:21.000Z | sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/test/input/func_noerror_exception.py | axbaretto/presto | f137d2709db42b5c3e4d43a631832a8f74853065 | [
"Apache-2.0"
] | 28 | 2020-03-04T22:01:48.000Z | 2022-03-12T00:59:47.000Z | sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/test/input/func_noerror_exception.py | axbaretto/presto | f137d2709db42b5c3e4d43a631832a8f74853065 | [
"Apache-2.0"
] | 88 | 2016-11-27T02:16:11.000Z | 2020-02-28T05:10:26.000Z | """ module doc """
__revision__ = ''
class MyException(Exception):
"""a custom exception with its *own* __init__ !!"""
def __init__(self, msg):
Exception.__init__(self, msg)
| 23.875 | 55 | 0.638743 | __revision__ = ''
class MyException(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.