id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
5093531 | <filename>window/classic_setup_win.py<gh_stars>0
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'classic_setup_win_template.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Setup(object):
def setupUi(self, Setup):
Setup.setObjectName("Setup")
Setup.resize(1214, 579)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Setup.sizePolicy().hasHeightForWidth())
Setup.setSizePolicy(sizePolicy)
self.save_btn = QtWidgets.QPushButton(Setup)
self.save_btn.setGeometry(QtCore.QRect(1010, 510, 181, 51))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.save_btn.setFont(font)
self.save_btn.setObjectName("save_btn")
self.frame_width_label = QtWidgets.QLabel(Setup)
self.frame_width_label.setGeometry(QtCore.QRect(300, 520, 301, 41))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.frame_width_label.setFont(font)
self.frame_width_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.frame_width_label.setAlignment(QtCore.Qt.AlignCenter)
self.frame_width_label.setObjectName("frame_width_label")
self.horizontalLayoutWidget = QtWidgets.QWidget(Setup)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 881, 491))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.frame_layout = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.frame_layout.setContentsMargins(0, 0, 0, 0)
self.frame_layout.setObjectName("frame_layout")
self.frame = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.frame.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setCursor(QtGui.QCursor(QtCore.Qt.SplitHCursor))
self.frame.setText("")
self.frame.setAlignment(QtCore.Qt.AlignCenter)
self.frame.setObjectName("frame")
self.frame_layout.addWidget(self.frame)
self.frame_2 = QtWidgets.QFrame(Setup)
self.frame_2.setGeometry(QtCore.QRect(900, 10, 301, 211))
self.frame_2.setFrameShape(QtWidgets.QFrame.Box)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.frame_2.setLineWidth(2)
self.frame_2.setObjectName("frame_2")
self.slit_pos_text_field = QtWidgets.QDoubleSpinBox(self.frame_2)
self.slit_pos_text_field.setGeometry(QtCore.QRect(160, 20, 121, 51))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.slit_pos_text_field.setFont(font)
self.slit_pos_text_field.setSuffix("")
self.slit_pos_text_field.setDecimals(0)
self.slit_pos_text_field.setMaximum(10000.0)
self.slit_pos_text_field.setObjectName("slit_pos_text_field")
self.slit_pos_text = QtWidgets.QLabel(self.frame_2)
self.slit_pos_text.setGeometry(QtCore.QRect(20, 20, 151, 51))
font = QtGui.QFont()
font.setFamily("Bahnschrift SemiBold")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.slit_pos_text.setFont(font)
self.slit_pos_text.setObjectName("slit_pos_text")
self.retranslateUi(Setup)
QtCore.QMetaObject.connectSlotsByName(Setup)
def retranslateUi(self, Setup):
_translate = QtCore.QCoreApplication.translate
Setup.setWindowTitle(_translate("Setup", "Form"))
self.save_btn.setText(_translate("Setup", " Save setup"))
self.frame_width_label.setText(_translate("Setup", "<< >>"))
self.slit_pos_text.setText(_translate("Setup", "Slit position:"))
| StarcoderdataPython |
6660097 | import logging
import math
import random
import sys
from typing import Callable, Set
import numpy as np
import pytest
from leaker.api import DataSink, RandomRangeDatabase, InputDocument, RangeDatabase, BaseRangeDatabase, \
PermutedBetaRandomRangeDatabase, BTRangeDatabase, ABTRangeDatabase, Selectivity
from leaker.attack import SubgraphVL, VolAn, SelVolAn, SubgraphID, Countv2, LMPrid, LMPrank, ApproxValue, \
LMPaux, ApproxOrder, GJWbasic, GJWspurious, GJWmissing, GLMP18, LMPappRec, Arrorder, GJWpartial, \
RangeCountBaselineAttack, Apa
from leaker.attack.kkno import GeneralizedKKNO
from leaker.attack.query_space import MissingBoundedRangeQuerySpace, ShortRangeQuerySpace, \
ValueCenteredRangeQuerySpace, PermutedBetaRangeQuerySpace, PartialQuerySpace, UniformRangeQuerySpace, \
BoundedRangeQuerySpace
from leaker.evaluation import KeywordAttackEvaluator, RangeAttackEvaluator, EvaluationCase, DatasetSampler, \
QuerySelector
from leaker.evaluation.errors import MAError, MaxASymError, MaxABucketError, CountSError, CountAError, \
CountPartialVolume, MSError, SetCountAError, OrderedMAError
from leaker.extension import VolumeExtension
from leaker.preprocessing import Preprocessor, Filter, Sink
from leaker.preprocessing.data import DirectoryEnumerator, RelativeFile, FileLoader, FileToDocument, \
PlainFileParser
from leaker.whoosh_interface import WhooshBackend, WhooshWriter
f = logging.Formatter(fmt='{asctime} {levelname:8.8} {process} --- [{threadName:12.12}] {name:32.32}: {message}',
style='{')
console = logging.StreamHandler(sys.stdout)
console.setFormatter(f)
file = logging.FileHandler('test_laa_eval.log', 'w', 'utf-8')
file.setFormatter(f)
log = logging.getLogger(__name__)
logging.basicConfig(handlers=[console, file], level=logging.INFO)
def init_rngs(seed):
random.seed(seed)
np.random.seed(seed)
class EvaluatorTestSink(DataSink):
__n: int
__cb: Callable[[str, int, float, float, int], None]
def __init__(self, callback: Callable[[str, int, float, float, int], None]):
self.__n = 0
self.__cb = callback
def register_series(self, series_id: str, user_ids: int = 1) -> None:
pass
def offer_data(self, series_id: str, user_id: int, kdr: float, rr: float) -> None:
self.__cb(series_id, kdr, rr, self.__n)
self.__n += 1
def flush(self) -> None:
pass
def test_indexing():
random_words = DirectoryEnumerator("data_sources/random_words")
rw_filter: Filter[RelativeFile, InputDocument] = FileLoader(PlainFileParser()) | FileToDocument()
rw_sink: Sink[InputDocument] = WhooshWriter("random_words")
preprocessor = Preprocessor(random_words, [rw_filter > rw_sink])
preprocessor.run()
backend = WhooshBackend()
data = backend.load_dataset('random_words')
keywords: Set[str] = set()
with open(f"random_words_processed.txt", "r") as f:
for line in f:
for word in line.split():
keywords.add(word)
assert keywords == data.keywords()
def test_keyword_attack():
init_rngs(1)
backend = WhooshBackend()
if not backend.has('random_words'):
test_indexing()
random_words = backend.load_dataset('random_words').extend_with(VolumeExtension)
query_space = PartialQuerySpace
space_size = 500
query_size = 150
sel = Selectivity.Independent
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
golden_dict = {0.25: 0.001, 0.5: 0.01, 0.75: 0.025, 1: 0.05}
if series_id != "Countv2":
print(f"Got series_id {series_id}, kdr {kdr}, rr {rr}")
assert (rr >= golden_dict[kdr])
verifier = EvaluatorTestSink(verif_cb)
run = KeywordAttackEvaluator(evaluation_case=EvaluationCase(attacks=[VolAn, SelVolAn,
SubgraphID.definition(epsilon=13),
SubgraphVL.definition(epsilon=7), Countv2],
dataset=random_words, runs=1),
dataset_sampler=DatasetSampler(kdr_samples=[0.25, 0.5, 0.75, 1.0], reuse=True,
monotonic=False),
query_selector=QuerySelector(query_space=query_space,
selectivity=sel,
query_space_size=space_size, queries=query_size,
allow_repetition=False),
sinks=verifier,
parallelism=8)
run.run()
def test_full_range_attacks():
init_rngs(1)
db = RandomRangeDatabase("test", 1, 16, density=1)
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert rr == 0
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[LMPrank, LMPrid,
LMPappRec.definition(return_mid_point=False,
error=.0625)],
dataset=db,
runs=1,
error=MAError),
range_queries=UniformRangeQuerySpace(db, 10 ** 4, allow_repetition=True,
allow_empty=True),
query_counts=[10 ** 4],
sinks=verifier,
parallelism=8)
run.run()
def test_approx_range_attacks():
init_rngs(1)
db_kkno = RandomRangeDatabase("test", 0, 1000, density=0.4, allow_repetition=True)
golden_dict_kkno = {100: (0.05, 0.08), 1000: (0.01, 0.02), 10000: (0.009, 0.02)}
golden_dict_avalue = {100: (0.05, 0.4), 500: (0.01, 0.12), 1000: (0.0009, 0.09)}
golden_dict_lmpapprox = {100: (0.0000001, 0.1), 1000: (0, 0.01), 10000: (0, 0.001)}
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
if series_id == "GeneralizedKKNO":
assert golden_dict_kkno[kdr][0] <= rr <= golden_dict_kkno[kdr][1]
if series_id == "ApproxValue":
assert golden_dict_avalue[kdr][0] <= rr <= golden_dict_avalue[kdr][1]
if series_id == "LMP-aux":
assert golden_dict_lmpapprox[kdr][0] <= rr <= golden_dict_lmpapprox[kdr][1]
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GeneralizedKKNO],
dataset=db_kkno,
runs=1,
error=MAError),
range_queries=UniformRangeQuerySpace(db_kkno, 10 ** 6, allow_repetition=True,
allow_empty=True),
query_counts=[100, 1000, 10 ** 4], normalize=True,
sinks=verifier,
parallelism=8)
run.run()
db_avalue = RandomRangeDatabase("test", min_val=1, max_val=1000, length=1000, allow_repetition=True)
eval = RangeAttackEvaluator(EvaluationCase(ApproxValue, db_avalue, 1, error=MaxASymError),
UniformRangeQuerySpace(db_avalue, 10 ** 5, allow_repetition=True, allow_empty=True),
[100, 500, 1000],
verifier, normalize=True, parallelism=8)
eval.run()
db_lmpapprox = RandomRangeDatabase("test", 1, 100, length=80, allow_repetition=True)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[LMPaux],
dataset=db_lmpapprox,
runs=1,
error=MAError),
range_queries=UniformRangeQuerySpace(db_lmpapprox, 10 ** 6, allow_repetition=True,
allow_empty=True),
query_counts=[100, 1000, 10 ** 4], normalize=True,
sinks=verifier,
parallelism=8)
run.run()
def test_approx_order_attack():
init_rngs(1)
db = RandomRangeDatabase("test", min_val=1, max_val=1000, length=1000, allow_repetition=True)
golden_dict = {100: (0.01, 0.35), 500: (0, 0.02), 1000: (0, 0.01)} # Fig. 3 of GLMP19
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert golden_dict[kdr][0] <= rr <= golden_dict[kdr][1]
verifier = EvaluatorTestSink(verif_cb)
eval = RangeAttackEvaluator(EvaluationCase(ApproxOrder.definition(bucket_error_rec=True), db, 1,
error=MaxABucketError),
UniformRangeQuerySpace(db, 10 ** 5, allow_repetition=True, allow_empty=True),
[100, 500, 1000],
verifier, normalize=True, parallelism=8)
eval.run()
def test_range_attack_arr_uniform():
init_rngs(1)
db = RandomRangeDatabase("test", 1, 10 ** 3, density=0.4, allow_repetition=False)
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert 1 < rr < 25
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[Arrorder],
dataset=db,
runs=1,
error=MAError),
range_queries=UniformRangeQuerySpace(db, 10 ** 4, allow_repetition=False,
allow_empty=False),
query_counts=[10 ** 4],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
def test_range_attack_arr_shortranges():
init_rngs(1)
db = RandomRangeDatabase("test", 1, 10 ** 3, density=0.8, allow_repetition=False)
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert 1 < rr < 125
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[Arrorder],
dataset=db,
runs=1,
error=MSError),
range_queries=ShortRangeQuerySpace(db, 10 ** 4, allow_repetition=False,
allow_empty=False),
query_counts=[10 ** 4],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
def test_range_attack_arr_valuecentered():
init_rngs(1)
db = RandomRangeDatabase("test", 1, 10 ** 3, density=0.8, allow_repetition=False)
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert 1 < rr < 5000
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[Arrorder],
dataset=db,
runs=1,
error=MSError),
range_queries=ValueCenteredRangeQuerySpace(db, 5 * 10 ** 4, allow_repetition=False,
allow_empty=False),
query_counts=[5 * 10 ** 4],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
# Takes about 20 seconds
def test_basic_range_counts():
init_rngs(1)
v = [5060, 13300, 7080, 4360, 3310, 2280, 1870, 1750, 1570, 1320, 1400, 1350, 1410, 1140, 1400, 1020, 1310, 1440,
1220]
db = RangeDatabase("test", [i + 1 for i, val in enumerate(v) for _ in range(val)])
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
assert abs(rr) == pytest.approx(0)
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GJWbasic, GJWspurious],
dataset=db,
runs=1,
error=CountSError),
range_queries=BoundedRangeQuerySpace(db, allow_empty=False, allow_repetition=False),
query_counts=[-1],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GLMP18],
dataset=db,
runs=1,
error=CountSError),
range_queries=UniformRangeQuerySpace(db, allow_empty=False, allow_repetition=False),
query_counts=[-1],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
v = [1344, 9635, 13377, 17011, 17731, 19053, 21016]
db = RangeDatabase("test", [i + 1 for i, val in enumerate(v) for _ in range(val)])
bound = 6
k = 2
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GJWmissing.definition(bound=bound, k=k)],
dataset=db,
runs=1,
error=CountSError),
range_queries=MissingBoundedRangeQuerySpace(db, allow_empty=False,
allow_repetition=False, bound=bound, k=k),
query_counts=[-1],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
# Takes about 3 minutes and tests the case when preprocessing does not find a solution and clique-finding is employed
def test_glmp18_cliques():
init_rngs(1)
v_networkx = [3, 6, 2, 3, 3, 4, 2, 3, 2]
v_graphtool = [20, 4040, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 780, 10, 10, 20, 30, 10, 10, 20, 10, 10,
10, 20, 10, 30, 20, 10, 20]
def verif_cb_networkx(series_id: str, kdr: float, rr: float, n: int) -> None:
assert 0 <= rr < .25
def verif_cb_graphtool(series_id: str, kdr: float, rr: float, n: int) -> None:
assert 0 <= rr < 60
for v, verif_cb in [(v_networkx, verif_cb_networkx), (v_graphtool, verif_cb_graphtool)]:
db = RangeDatabase("test", [i + 1 for i, val in enumerate(v) for _ in range(val)])
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GLMP18],
dataset=db,
runs=1,
error=SetCountAError),
range_queries=UniformRangeQuerySpace(db, allow_empty=False, allow_repetition=False),
query_counts=[-1],
sinks=verifier,
parallelism=8, normalize=False)
run.run()
@pytest.mark.skip()
def test_gjwpartial():
init_rngs(1)
db = RandomRangeDatabase("test", 1, 30, density=10, allow_repetition=True)
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
print(rr)
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[GJWpartial],
dataset=db,
runs=1,
error=CountPartialVolume),
range_queries=BoundedRangeQuerySpace(db, allow_repetition=False,
allow_empty=False),
query_counts=[-1],
sinks=verifier,
parallelism=1, normalize=False)
run.run()
def test_regular_schemes():
big_n = 2**10
init_rngs(1)
vals = RandomRangeDatabase("test", 1, big_n, density=.5, allow_repetition=True).get_numerical_values()
db1 = BaseRangeDatabase("test", values=vals)
assert db1.num_canonical_queries() == sum(big_n - 2 ** i + 1 for i in range(int(math.log2(big_n)) + 1))
db2 = ABTRangeDatabase("test", values=vals)
assert db2.num_canonical_queries() == 2*(2*big_n - 1) - math.log2(big_n) - big_n
db3 = BTRangeDatabase("test", values=vals)
assert db3.num_canonical_queries() == 2*big_n - 1
for db in [db1, db2, db3]:
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
pass
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[RangeCountBaselineAttack],
dataset=db,
runs=1,
error=CountAError),
range_queries=UniformRangeQuerySpace(db, allow_empty=True, allow_repetition=True),
query_counts=[-1],
sinks=verifier,
parallelism=1, normalize=False)
run.run()
def test_range_attack_apa():
init_rngs(1)
db = PermutedBetaRandomRangeDatabase("test", 1, 2**10, .1)
values = db.get_numerical_values()
for db in [ABTRangeDatabase("test", values=values), BTRangeDatabase("test", values=values)]:
def verif_cb(series_id: str, kdr: float, rr: float, n: int) -> None:
print(f"rr: {rr}")
assert 1 < rr < 40
verifier = EvaluatorTestSink(verif_cb)
run = RangeAttackEvaluator(evaluation_case=EvaluationCase(attacks=[Apa.definition(m=3)],
dataset=db,
runs=1,
error=OrderedMAError),
range_queries=PermutedBetaRangeQuerySpace(db, 10 ** 4, allow_repetition=True,
allow_empty=True, alpha=1, beta=5),
query_counts=[3072],
sinks=verifier,
parallelism=1, normalize=False)
run.run()
def test_big_q_calculation():
db = PermutedBetaRandomRangeDatabase("test", 1, 2**10, .05)
values = db.get_numerical_values()
abt = ABTRangeDatabase("test0", values=values)
bt = BTRangeDatabase("test1", values=values)
base = BaseRangeDatabase("test2", values=values)
assert abt.num_canonical_queries() == 3060
assert bt.num_canonical_queries() == 2047
assert base.num_canonical_queries() == 9228
| StarcoderdataPython |
1892161 | <reponame>D-Mbithi/Real-Python-Course-Solutions
from reportlab.pdfgen import canvas
c = canvas.Canvas("hello.pdf")
c.drawString(250, 500, "hello world")
c.save()
| StarcoderdataPython |
3352338 | # -*- coding: utf-8 -*-
"""Top-level package for FlowPing."""
__author__ = """<NAME>"""
__email__ = '<EMAIL>'
__version__ = '0.1.0'
| StarcoderdataPython |
1615638 | <reponame>remarkablerocket/changelog-cli
__version__ = "0.7.0"
| StarcoderdataPython |
5135410 | <filename>desugar/__init__.py
"""Re-implement the parts of Python that allow removing its syntactic sugar."""
__version__ = "0"
| StarcoderdataPython |
6683655 | #-*- coding: utf8
from __future__ import print_function, division
from pyksc import ksc
import myio
import numpy as np
def cluster(T, num_clust=5):
'''
Runs the KSC algorithm on time series matrix T.
Parameters
----------
T : ndarray of shape (row, time series length)
The time series to cluster
num_clust : int
Number of clusters to create
'''
T = np.asarray(T + 1e-20, order='C').copy()
cents, assign, _, _ = ksc.inc_ksc(T, num_clust)
return cents, assign
if __name__ == '__main__':
T_train_visits = myio.read_48h_timeseries('visits').values[:, :12]
for num_clust in [10, 30, 50, 70, 90, 110]:
cents_visits, assign_visits = cluster(T_train_visits, num_clust)
np.savetxt('cents_visits_%d.dat' % num_clust, cents_visits)
np.savetxt('assign_visits_%d.dat' % num_clust, assign_visits)
| StarcoderdataPython |
1846853 | # Lint as: python
#
# Authors: Vittorio | Francesco
# Location: Turin, Biella, Ivrea
#
# This file is based on the work of Francisco Dorr - PROBA-V-3DWDSR (https://github.com/frandorr/PROBA-V-3DWDSR)
"""Training class and some functions for training RAMS"""
import tensorflow as tf
from tensorflow.keras.utils import Progbar
from tensorflow.keras.metrics import Mean
import os
def random_flip(lr_img, hr_img, hr_img_mask):
"""Data Augmentation: flip data samples randomly"""
rn = tf.random.uniform(shape=(), maxval=1)
return tf.cond(rn < 0.5,
lambda: (lr_img, hr_img, hr_img_mask),
lambda: (tf.image.flip_left_right(lr_img),
tf.image.flip_left_right(hr_img),
tf.image.flip_left_right(hr_img_mask)))
def random_rotate(lr_img, hr_img, hr_img_mask):
"""Data Augmentation: rotate data samples randomly of a 90 degree angle"""
rn = tf.random.uniform(shape=(), maxval=4, dtype=tf.int32)
return tf.image.rot90(lr_img, rn), tf.image.rot90(hr_img, rn), tf.image.rot90(hr_img_mask, rn)
class Trainer(object):
"""
Train a network and manage weights loading and saving
...
Attributes
----------
model: obj
model to be trained
band: string
band to train with
image_hr_size: int
size of the HR image
name_net: string
name of the network
loss: obj
loss function
metric: obj
metric function
optimizer: obj
optimizer of the training
checkpoint_dir: string
weights path
log_dir: string
logs path
Methods
-------
restore()
Restore a previous version found in 'checkpoint_dir' path
fit(self, x=None, y=None, batch_size=None, buffer_size=512, epochs=100,
verbose=1, evaluate_every=100, val_steps=100,
validation_data=None, shuffle=True, initial_epoch=0, save_best_only=True,
data_aug = False)
Train the network with the configuration passed to the function
train_step(self, lr, hr, mask)
A single training step
test_step(self, lr, hr, mask)
A single testing step
"""
def __init__(self,
model, band, image_hr_size, name_net,
loss, metric,
optimizer,
checkpoint_dir='./checkpoint', log_dir='logs'):
self.now = None
self.band = band
self.name_net = name_net
self.loss = loss
self.image_hr_size = image_hr_size
self.metric = metric
self.log_dir = log_dir
self.train_loss = Mean(name='train_loss')
self.train_psnr = Mean(name='train_psnr')
self.test_loss = Mean(name='test_loss')
self.test_psnr = Mean(name='test_psnr')
self.checkpoint = tf.train.Checkpoint(step=tf.Variable(0),
psnr=tf.Variable(1.0),
optimizer=optimizer,
model=model)
self.checkpoint_manager = tf.train.CheckpointManager(checkpoint=self.checkpoint,
directory=checkpoint_dir,
max_to_keep=3)
self.restore()
def restore(self):
if self.checkpoint_manager.latest_checkpoint:
self.checkpoint.restore(self.checkpoint_manager.latest_checkpoint)
print(
f'Model restored from checkpoint at step {self.checkpoint.step.numpy()}.')
@property
def model(self):
return self.checkpoint.model
def fit(self, x=None, y=None, batch_size=None, buffer_size=512, epochs=100,
verbose=1, evaluate_every=100, val_steps=100,
validation_data=None, shuffle=True, initial_epoch=0, save_best_only=True,
data_aug = False):
ds_len = x.shape[0]
# Create dataset from slices
train_ds = tf.data.Dataset.from_tensor_slices(
(x, *y)).shuffle(buffer_size,
reshuffle_each_iteration=True).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE)
if data_aug:
train_ds.map(random_rotate, num_parallel_calls=tf.data.experimental.AUTOTUNE)
train_ds.map(random_flip, num_parallel_calls=tf.data.experimental.AUTOTUNE)
val_ds = tf.data.Dataset.from_tensor_slices(
(validation_data[0], *validation_data[1])).shuffle(buffer_size).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE).take(val_steps)
# Tensorboard logger
writer_train = tf.summary.create_file_writer(os.path.join(self.log_dir, f'train_{self.band}_{self.name_net}'))
writer_test = tf.summary.create_file_writer(os.path.join(self.log_dir, f'test_{self.band}_{self.name_net}'))
global_step = tf.cast(self.checkpoint.step,tf.int64)
total_steps = tf.cast(ds_len/batch_size,tf.int64)
step = tf.cast(self.checkpoint.step,tf.int64) % total_steps
for epoch in range(epochs - initial_epoch):
# Iterate over the batches of the dataset.
print("\nEpoch {}/{}".format(epoch + 1 + initial_epoch, epochs))
pb_i = Progbar(ds_len, stateful_metrics=['Loss', 'PSNR', 'Val Loss', 'Val PSNR'])
for x_batch_train, y_batch_train, y_mask_batch_train in train_ds:
if (total_steps - step) == 0:
step = tf.cast(self.checkpoint.step,tf.int64) % total_steps
# Reset metrics
self.train_loss.reset_states()
self.train_psnr.reset_states()
step +=1
global_step += 1
self.train_step(x_batch_train, y_batch_train, y_mask_batch_train)
self.checkpoint.step.assign_add(1)
with writer_train.as_default():
tf.summary.scalar(
'PSNR', self.train_psnr.result(), step=global_step)
tf.summary.scalar(
'Loss', self.train_loss.result(), step=global_step)
if step != 0 and (step % evaluate_every) == 0:
# Reset states for test
self.test_loss.reset_states()
self.test_psnr.reset_states()
for x_batch_val, y_batch_val, y_mask_batch_val in val_ds:
self.test_step(x_batch_val, y_batch_val, y_mask_batch_val)
with writer_test.as_default():
tf.summary.scalar(
'Loss', self.test_loss.result(), step=global_step)
tf.summary.scalar(
'PSNR', self.test_psnr.result(), step=global_step)
writer_train.flush()
writer_test.flush()
if save_best_only and (self.test_psnr.result() <= self.checkpoint.psnr):
# skip saving checkpoint, no PSNR improvement
continue
self.checkpoint.psnr = self.test_psnr.result()
self.checkpoint_manager.save()
values = [('Loss', self.train_loss.result()), ('PSNR', self.train_psnr.result()),
('Val Loss', self.test_loss.result()), ('Val PSNR', self.test_psnr.result())]
pb_i.add(batch_size, values=values)
@tf.function
def train_step(self, lr, hr, mask):
lr = tf.cast(lr, tf.float32)
with tf.GradientTape() as tape:
sr = self.checkpoint.model(lr, training=True)
loss = self.loss(hr, sr, mask, self.image_hr_size)
gradients = tape.gradient(
loss, self.checkpoint.model.trainable_variables)
self.checkpoint.optimizer.apply_gradients(
zip(gradients, self.checkpoint.model.trainable_variables))
metric = self.metric(hr, sr, mask)
self.train_loss(loss)
self.train_psnr(metric)
@tf.function
def test_step(self, lr, hr, mask):
lr = tf.cast(lr, tf.float32)
sr = self.checkpoint.model(lr, training=False)
t_loss = self.loss(hr, sr, mask, self.image_hr_size)
t_metric = self.metric(hr, sr, mask)
self.test_loss(t_loss)
self.test_psnr(t_metric)
| StarcoderdataPython |
270470 | <reponame>ttbrunner/blackbox_starting_points
import numpy as np
def find_img_centroid(img, min_mass_threshold=0.):
""" Finds the centroid of a grayscale image (center of mass for a saliency map). """
assert len(img.shape) == 2
# TD: Vectorize this in the next version
vec_sum = np.zeros(2, dtype=np.float32)
mass_sum = 0.
for y in range(img.shape[0]):
for x in range(img.shape[1]):
vec = np.array([y, x])
mass = img[y, x]
if mass >= min_mass_threshold:
vec_sum += mass * vec
mass_sum += mass
# Empty image.
if mass_sum < 1e-5:
return np.array([-1, -1])
centr = vec_sum / mass_sum
centr = np.uint8(np.round(centr))
return centr
def line_search_to_boundary(bb_model, x_orig, x_start, label, is_targeted):
"""
Binary search along a line between start and original image in order to find the decision boundary.
:param bb_model: The (black-box) model.
:param x_orig: The original image to attack.
:param x_start: The starting image (which fulfills the adversarial criterion)
:param is_targeted: true if the attack is targeted.
:param label: the target label if targeted, or the correct label if untargeted.
:return: A point next to the decision boundary (but still adversarial)
"""
eps = 0.5 # Stop when decision boundary is closer than this (in L2 distance)
i = 0
x1 = np.float32(x_start)
x2 = np.float32(x_orig)
diff = x2 - x1
while np.linalg.norm(diff / 255.) > eps:
i += 1
x_candidate = x1 + 0.5 * diff
if (np.argmax(bb_model.predictions(x_candidate)) == label) == is_targeted:
x1 = x_candidate
else:
x2 = x_candidate
diff = x2 - x1
print("Found decision boundary after {} queries.".format(i))
return x1
def find_closest_img(bb_model, X_orig, X_targets, label, is_targeted):
"""
From a list of potential starting images, finds the closest to the original.
Before returning, this method makes sure that the image fulfills the adversarial condition (is actually classified as the target label).
:param bb_model: The (black-box) model.
:param X_orig: The original image to attack.
:param X_targets: List of images that fulfill the adversarial criterion (i.e. target class in the targeted case)
:param is_targeted: true if the attack is targeted.
:param label: the target label if targeted, or the correct label if untargeted.
:return: the closest image (in L2 distance) to the original that also fulfills the adversarial condition.
"""
X_orig_normed = np.float32(X_orig) / 255.
dists = np.empty(len(X_targets), dtype=np.float32)
for i in range(len(X_targets)):
d_l2 = np.linalg.norm((np.float32(X_targets[i, ...]) / 255. - X_orig_normed))
dists[i] = d_l2
indices = np.argsort(dists)
for index in indices:
X_target = X_targets[index]
pred_clsid = np.argmax(bb_model.predictions(X_target))
if (pred_clsid == label) == is_targeted:
print("Found an image of the target class, d_l2={:.3f}.".format(dists[index]))
return X_target
print("Image of target class is wrongly classified by model, skipping.")
raise ValueError("Could not find an image of the target class that was correctly classified by the model!")
def sample_hypersphere(n_samples, sample_shape, radius, sample_gen=None, seed=None):
"""
Uniformly sample the surface of a L2-hypersphere.
Uniform picking: create a n-dimensional normal distribution and then normalize it to the desired radius.
See http://mathworld.wolfram.com/HyperspherePointPicking.html
:param n_samples: number of image samples to generate.
:param sample_shape: shape of a single image sample.
:param radius: radius(=eps) of the hypersphere.
:param sample_gen: If provided, retrieves random numbers from this generator.
:param seed: seed for the random generator. Cannot be used with the sample generator.
:return: Batch of image samples, shape: (n_samples,) + sample_shape
"""
if sample_gen is not None:
assert seed is None, "Can't provide individual seeds if using the multi-threaded generator."
assert sample_shape == sample_gen.shape
# Get precalculated samples from the generator
gauss = np.empty(shape=(n_samples, np.prod(sample_shape)), dtype=np.float32)
for i in range(n_samples):
gauss[i] = sample_gen.get_normal().reshape(-1)
else:
if seed is not None:
np.random.seed(seed)
gauss = np.random.normal(size=(n_samples, np.prod(sample_shape)))
# Norm to 1
norm = np.linalg.norm(gauss, ord=2, axis=1)
perturbation = (gauss / norm[:, np.newaxis]) * radius
perturbation = np.reshape(perturbation, (n_samples,) + sample_shape)
return perturbation
| StarcoderdataPython |
6645178 | <filename>examples_source/2D_simulation(macro_amorphous)/plot_1_I=2.5.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Czjzek distribution, ²⁷Al (I=5/2) 3QMAS
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
²⁷Al (I=5/2) 3QMAS simulation of amorphous material.
"""
# %%
# In this section, we illustrate the simulation of a quadrupolar MQMAS spectrum arising
# from a distribution of the electric field gradient (EFG) tensors from amorphous
# material. We proceed by employing the Czjzek distribution model.
# sphinx_gallery_thumbnail_number = 2
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import multivariate_normal
from mrsimulator import Simulator
from mrsimulator.methods import ThreeQ_VAS
from mrsimulator.models import CzjzekDistribution
from mrsimulator.utils.collection import single_site_system_generator
# %%
# Generate probability distribution
# ---------------------------------
# The range of isotropic chemical shifts, the quadrupolar coupling constant, and
# asymmetry parameters used in generating a 3D grid.
iso_r = np.arange(101) / 1.5 + 30 # in ppm
Cq_r = np.arange(100) / 4 # in MHz
eta_r = np.arange(10) / 9
# The 3D mesh grid over which the distribution amplitudes are evaluated.
iso, Cq, eta = np.meshgrid(iso_r, Cq_r, eta_r, indexing="ij")
# The 2D amplitude grid of Cq and eta is sampled from the Czjzek distribution model.
Cq_dist, e_dist, amp = CzjzekDistribution(sigma=1).pdf(pos=[Cq_r, eta_r])
# The 1D amplitude grid of isotropic chemical shifts is sampled from a Gaussian model.
iso_amp = multivariate_normal(mean=58, cov=[4]).pdf(iso_r)
# The 3D amplitude grid is generated as an uncorrelated distribution of the above two
# distribution, which is the product of the two distributions.
pdf = np.repeat(amp, iso_r.size).reshape(eta_r.size, Cq_r.size, iso_r.size)
pdf *= iso_amp
pdf = pdf.T
# %%
# The two-dimensional projections from this three-dimensional distribution are shown
# below.
_, ax = plt.subplots(1, 3, figsize=(9, 3))
# isotropic shift v.s. quadrupolar coupling constant
ax[0].contourf(Cq_r, iso_r, pdf.sum(axis=2))
ax[0].set_xlabel("Cq / MHz")
ax[0].set_ylabel("isotropic chemical shift / ppm")
# isotropic shift v.s. quadrupolar asymmetry
ax[1].contourf(eta_r, iso_r, pdf.sum(axis=1))
ax[1].set_xlabel(r"quadrupolar asymmetry, $\eta$")
ax[1].set_ylabel("isotropic chemical shift / ppm")
# quadrupolar coupling constant v.s. quadrupolar asymmetry
ax[2].contourf(eta_r, Cq_r, pdf.sum(axis=0))
ax[2].set_xlabel(r"quadrupolar asymmetry, $\eta$")
ax[2].set_ylabel("Cq / MHz")
plt.tight_layout()
plt.show()
# %%
# Simulation setup
# ----------------
# Let's create the site and spin system objects from these parameters. Use the
# :func:`~mrsimulator.utils.collection.single_site_system_generator` utility function to
# generate single-site spin systems.
spin_systems = single_site_system_generator(
isotope="27Al",
isotropic_chemical_shift=iso,
quadrupolar={"Cq": Cq * 1e6, "eta": eta}, # Cq in Hz
abundance=pdf,
)
len(spin_systems)
# %%
# Simulate a :math:`^{27}\text{Al}` 3Q-MAS spectrum by using the `ThreeQ_MAS` method.
mqvas = ThreeQ_VAS(
channels=["27Al"],
spectral_dimensions=[
{
"count": 512,
"spectral_width": 26718.475776, # in Hz
"reference_offset": -4174.76184, # in Hz
"label": "Isotropic dimension",
},
{
"count": 512,
"spectral_width": 2e4, # in Hz
"reference_offset": 2e3, # in Hz
"label": "MAS dimension",
},
],
)
# %%
# Create the simulator object, add the spin systems and method, and run the simulation.
sim = Simulator()
sim.spin_systems = spin_systems # add the spin systems
sim.methods = [mqvas] # add the method
sim.config.number_of_sidebands = 1
sim.run()
data = sim.methods[0].simulation
# %%
# The plot of the corresponding spectrum.
plt.figure(figsize=(4.25, 3.0))
ax = plt.subplot(projection="csdm")
cb = ax.imshow(data / data.max(), cmap="gist_ncar_r", aspect="auto")
plt.colorbar(cb)
ax.set_ylim(-20, -50)
ax.set_xlim(80, 20)
plt.tight_layout()
plt.show()
| StarcoderdataPython |
45865 | <reponame>pythonran/easy_server
from view_core import View
from easyserver import easyResponse
import json
class Index(View):
def get(self, request):
print request
data = {
"body": request.body,
"option": "test"
}
return easyResponse(json.dumps(data))
| StarcoderdataPython |
4887823 | <gh_stars>1-10
#!/usr/bin/env python3
import subprocess
import json
import os
BASEDIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
def tokei(paths, *, exclude=[]):
args = []
for e in exclude:
args.extend(["-e", e])
for p in paths:
args.append(os.path.join(BASEDIR, p))
try:
proc = subprocess.run(
["tokei", "-o", "json", *args],
capture_output=True,
encoding="utf8",
check=True,
)
except subprocess.CalledProcessError as e:
print(e.stderr)
raise e
return json.loads(proc.stdout)
# print("component,lines")
data = {}
def do_loc(component, language, paths, *, exclude=[]):
results = tokei(paths, exclude=exclude)
n = results[language]["code"]
if language == 'C':
n += results.get('CHeader', {'code': 0})['code']
data[component] = n
# print(f'{component},{n}')
# riscv
do_loc(
"rv32 Racket impl",
"Racket",
["racket/rv32/bpf_jit_comp32.rkt", "racket/lib/riscv-common.rkt",],
)
do_loc(
"rv32 Racket invariants",
"Racket",
[
"racket/lib/spec/riscv.rkt",
"racket/rv32/spec.rkt",
"racket/rv32/synthesis.rkt",
],
)
do_loc(
"rv32 C implementation",
"C",
[
"arch/riscv/net/bpf_jit_core.c",
"arch/riscv/net/bpf_jit.h",
"arch/riscv/net/bpf_jit_comp32.c",
],
)
do_loc(
"rv64 Racket impl",
"Racket",
["racket/rv64/bpf_jit_comp64.rkt", "racket/lib/riscv-common.rkt",],
)
do_loc(
"rv64 Racket invariants",
"Racket",
[
"racket/lib/spec/riscv.rkt",
"racket/rv64/spec.rkt",
"racket/rv64/synthesis.rkt",
],
)
do_loc(
"rv64 C implementation",
"C",
[
"arch/riscv/net/bpf_jit_core.c",
"arch/riscv/net/bpf_jit.h",
"arch/riscv/net/bpf_jit_comp64.c",
],
)
do_loc(
"riscv interpreter", "Racket", ["serval/serval/riscv/"],
)
do_loc("arm32 Racket impl", "Racket", ["racket/arm32/bpf_jit_comp.rkt",])
do_loc("arm32 Racket invariants", "Racket", ["racket/arm32/spec.rkt",])
do_loc(
"arm32 C implementation", "C", ["arch/arm/"],
)
do_loc(
"arm32 interpreter", "Racket", ["serval/serval/arm32/", "serval/serval/arm32.rkt",]
)
do_loc(
"arm64 Racket impl",
"Racket",
[
"racket/arm64/bpf_jit_comp.rkt",
"racket/arm64/bpf_jit.rkt",
"racket/arm64/insn.rkt",
],
)
do_loc("arm64 Racket invariants", "Racket", ["racket/arm64/spec.rkt",])
do_loc(
"arm64 C implementation", "C", ["arch/arm64/net/"],
)
do_loc(
"arm64 interpreter", "Racket", ["serval/serval/arm64/", "serval/serval/arm64.rkt",]
)
# x86
do_loc(
"x86_32 Racket impl", "Racket", ["racket/x86_32/bpf_jit_comp32.rkt"],
)
do_loc(
"x86_32 Racket invariants", "Racket", ["racket/x86_32/spec.rkt"],
)
do_loc("x86_32 C implementation", "C", ["arch/x86/net/bpf_jit_comp32.c"])
do_loc(
"x86_64 Racket impl", "Racket", ["racket/x86_64/bpf_jit_comp.rkt"],
)
do_loc(
"x86_64 Racket invariants", "Racket", ["racket/x86_64/spec.rkt"],
)
do_loc("x86_64 C implementation", "C", ["arch/x86/net/bpf_jit_comp.c"])
do_loc("x86 interpreter", "Racket", ["serval/serval/x86/", "serval/serval/x86.rkt"])
# Lib
do_loc(
"Jitterbug library",
"Racket",
[
"racket/lib/bvaxiom.rkt",
"racket/lib/extraction/",
"racket/lib/env.rkt",
"racket/lib/hybrid-memory.rkt",
"racket/lib/linux.rkt",
"racket/lib/spec/bpf.rkt",
"racket/lib/spec/prologue.rkt",
"racket/lib/spec/proof.rkt",
"racket/lib/spec/per-insn.rkt",
"racket/lib/spec/epilogue.rkt",
"racket/lib/spec/tail-call.rkt",
"racket/lib/tests.rkt",
"serval/serval/lib/bvarith.rkt",
"serval/serval/lib/core.rkt",
"serval/serval/lib/debug.rkt",
"serval/serval/lib/memmgr.rkt",
"serval/serval/lib/memory/manager.rkt",
"serval/serval/lib/solver.rkt",
"serval/serval/lib/unittest.rkt",
],
)
do_loc(
"Racket BPF interpreter",
"Racket",
["serval/serval/bpf.rkt"],
)
do_loc(
"Lean proofs",
"Lean",
["lean/src/"],
)
print('### Generic components')
print('Jitterbug library = {:,} loc'.format(data['Jitterbug library']))
print('BPF interpreter = {:,} loc'.format(data['Racket BPF interpreter']))
print('Lean proofs = {:,} loc'.format(data['Lean proofs']))
print('\n### Code size table')
print('\\begin{tabular}{lrrrr}')
print('\\toprule')
print('JIT impl. (C) & JIT impl. (DSL) & Spec. & Interp. \\\\')
print('\\midrule')
print('riscv32 & {:,} & {:,} & {:,} & {:,} \\\\'.format(
data['rv32 C implementation'],
data['rv32 Racket impl'],
data['rv32 Racket invariants'],
data['riscv interpreter'],
))
print('riscv64 & {:,} & {:,} & {:,} & \" \\\\'.format(
data['rv64 C implementation'],
data['rv64 Racket impl'],
data['rv64 Racket invariants'],
))
print('arm32 & {:,} & {:,} & {:,} & {:,} \\\\'.format(
data['arm32 C implementation'],
data['arm32 Racket impl'],
data['arm32 Racket invariants'],
data['arm32 interpreter'],
))
print('arm64 & {:,} & {:,} & {:,} & {:,} \\\\'.format(
data['arm64 C implementation'],
data['arm64 Racket impl'],
data['arm64 Racket invariants'],
data['arm64 interpreter'],
))
print('x86-32 & {:,} & {:,} & {:,} & {:,} \\\\'.format(
data['x86_32 C implementation'],
data['x86_32 Racket impl'],
data['x86_32 Racket invariants'],
data['x86 interpreter'],
))
print('x86-64 & {:,} & {:,} & {:,} & " \\\\'.format(
data['x86_64 C implementation'],
data['x86_64 Racket impl'],
data['x86_64 Racket invariants'],
))
print('\\bottomrule')
print('\\end{tabular}')
| StarcoderdataPython |
5147789 | from django.db import models
from django.utils.translation import gettext_lazy as _
from core.models import BaseAbstractModel
from core.utils import IBANValidator
from payments.managers import BankAccountQuerySet
class Bank(BaseAbstractModel):
"""
Bank model
"""
name = models.CharField(max_length=100, verbose_name=_("Bank Name"))
class Meta:
verbose_name = _("Bank")
verbose_name_plural = _("Banks")
def __str__(self):
return self.name
class BankAccount(BaseAbstractModel):
"""
Bank Account model
"""
iban_validator = IBANValidator()
bank = models.ForeignKey(Bank, verbose_name=_("Bank Name"), on_delete=models.PROTECT)
name = models.CharField(max_length=100, verbose_name=_("Bank Account Name"))
iban = models.CharField(max_length=100, verbose_name=_("IBAN"), validators=[iban_validator])
objects = BankAccountQuerySet.as_manager()
class Meta:
verbose_name = _("Bank Account")
verbose_name_plural = _("Bank Accounts")
def __str__(self):
return f"{self.name} - {self.iban}"
| StarcoderdataPython |
4962468 | <gh_stars>0
# Copyright 2021 Sony Semiconductors Israel, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import tensorflow as tf
import model_compression_toolkit as mct
from model_compression_toolkit.common.substitutions.scale_equalization import fixed_second_moment_after_relu, \
fixed_mean_after_relu
from model_compression_toolkit.keras.default_framework_info import DEFAULT_KERAS_INFO
from tests.keras_tests.fw_hw_model_keras import get_16bit_fw_hw_model
from tests.keras_tests.feature_networks_tests.base_keras_feature_test import BaseKerasFeatureNetworkTest
keras = tf.keras
layers = keras.layers
def gamma_init():
return tf.keras.initializers.RandomNormal(mean=10.0, stddev=1.0, seed=None)
"""
This test checks the Channel Scale Equalization feature.
"""
class ScaleEqualizationTest(BaseKerasFeatureNetworkTest):
def __init__(self, unit_test, first_op2d, second_op2d, act_node=layers.ReLU(), zero_pad=False):
self.first_op2d = first_op2d
self.act_node = act_node
self.second_op2d = second_op2d
self.zero_pad = zero_pad
super().__init__(unit_test,
input_shape=(16, 16, 3))
def get_fw_hw_model(self):
return get_16bit_fw_hw_model("scale_equalization_bound_test")
def get_quantization_config(self):
return mct.QuantizationConfig(mct.QuantizationErrorMethod.MSE, mct.QuantizationErrorMethod.MSE,
relu_bound_to_power_of_2=False, weights_bias_correction=False,
weights_per_channel_threshold=True, activation_channel_equalization=True)
def create_networks(self):
inputs = layers.Input(shape=self.get_input_shapes()[0][1:])
x = self.first_op2d(inputs)
x = layers.BatchNormalization(gamma_initializer=gamma_init())(x)
x = self.act_node(x)
if self.zero_pad:
x = layers.ZeroPadding2D()(x)
outputs = self.second_op2d(x)
return keras.Model(inputs=inputs, outputs=outputs)
def compare(self, quantized_model, float_model, input_x=None, quantization_info=None):
q_first_linear_op_index = 2
q_second_linear_op_index = 5 + int(self.zero_pad) + int(
isinstance(self.first_op2d, (layers.Dense, layers.Conv2DTranspose)))
f_first_linear_op_index = 1
f_second_linear_op_index = 4 + int(self.zero_pad)
quantized_model_layer1_weight = quantized_model.layers[q_first_linear_op_index].weights[0]
quantized_model_layer2_weight = quantized_model.layers[q_second_linear_op_index].weights[0]
float_model_layer1_weight = float_model.layers[f_first_linear_op_index].weights[0]
float_model_layer2_weight = float_model.layers[f_second_linear_op_index].weights[0]
gamma = np.abs(float_model.layers[f_first_linear_op_index + 1].gamma)
bn_beta = float_model.layers[f_first_linear_op_index + 1].beta
fixed_second_moment_vector = fixed_second_moment_after_relu(bn_beta, gamma)
fixed_mean_vector = fixed_mean_after_relu(bn_beta, gamma)
fixed_std_vector = np.sqrt(fixed_second_moment_vector - np.power(fixed_mean_vector, 2))
scale_factor = 1.0 / fixed_std_vector
scale_factor = np.minimum(scale_factor, 1.0)
quantized_model_layer1_weight_without_bn_fold = quantized_model_layer1_weight / gamma
if (type(quantized_model.layers[q_first_linear_op_index]) == layers.DepthwiseConv2D) \
or (type(quantized_model.layers[q_second_linear_op_index]) == layers.DepthwiseConv2D):
alpha = np.mean(quantized_model_layer1_weight_without_bn_fold / float_model_layer1_weight)
beta = np.mean(float_model_layer2_weight / quantized_model_layer2_weight)
scale_factor = np.mean(scale_factor)
else:
first_layer_chn_dim = DEFAULT_KERAS_INFO.kernel_channels_mapping.get(
type(quantized_model.layers[q_first_linear_op_index]))[0]
second_layer_chn_dim = DEFAULT_KERAS_INFO.kernel_channels_mapping.get(
type(quantized_model.layers[q_second_linear_op_index]))[1]
first_layer_axes = tuple(np.delete(np.arange(quantized_model_layer1_weight.numpy().ndim),
first_layer_chn_dim))
second_layer_axes = tuple(np.delete(np.arange(quantized_model_layer2_weight.numpy().ndim),
second_layer_chn_dim))
alpha = np.mean(quantized_model_layer1_weight_without_bn_fold / float_model_layer1_weight,
axis=first_layer_axes)
beta = np.mean(float_model_layer2_weight / quantized_model_layer2_weight, axis=second_layer_axes)
self.unit_test.assertTrue(np.allclose(alpha, beta, atol=1e-1))
self.unit_test.assertTrue((np.isclose(alpha, 1.0, atol=1e-1) + np.less(alpha, 1.0)).all())
self.unit_test.assertTrue(np.allclose(alpha, scale_factor, atol=1e-1))
| StarcoderdataPython |
3341855 | <reponame>knuu/competitive-programming<filename>atcoder/abc/abc030_b.py
N, M = map(int, input().split())
N %= 12
l = M / 60
s = (N + l) / 12
ans = 360 * abs(l - s)
print('{:.12}'.format(min(ans, 360 - ans)))
| StarcoderdataPython |
6569439 | import json
import subprocess
import pytest
from cli.autocomplete import ac_table
from cli.export import api_to_dict
from jina.checker import NetworkChecker
from jina.jaml import JAML
from jina.parsers import set_pod_parser, set_pea_parser
from jina.parsers.ping import set_ping_parser
from jina.peapods import Pea
def test_export_api(tmpdir):
with open(tmpdir / 'test.yml', 'w', encoding='utf8') as fp:
JAML.dump(api_to_dict(), fp)
with open(tmpdir / 'test.json', 'w', encoding='utf8') as fp:
json.dump(api_to_dict(), fp)
def test_main_cli():
subprocess.check_call(['jina'])
@pytest.mark.parametrize('cli', ac_table['commands'])
def test_all_cli(cli):
subprocess.check_call(['jina', cli, '--help'])
def test_parse_env_map():
a = set_pod_parser().parse_args(['--env', 'key1=value1',
'--env', 'key2=value2'])
assert a.env == {'key1': 'value1', 'key2': 'value2'}
a = set_pod_parser().parse_args(['--env', 'key1=value1', 'key2=value2', 'key3=3'])
assert a.env == {'key1': 'value1', 'key2': 'value2', 'key3': 3}
def test_ping():
a1 = set_pea_parser().parse_args([])
a2 = set_ping_parser().parse_args(['0.0.0.0', str(a1.port_ctrl), '--print-response'])
a3 = set_ping_parser().parse_args(['0.0.0.1', str(a1.port_ctrl), '--timeout', '1000'])
with pytest.raises(SystemExit) as cm:
with Pea(a1):
NetworkChecker(a2)
assert cm.value.code == 0
# test with bad address
with pytest.raises(SystemExit) as cm:
with Pea(a1):
NetworkChecker(a3)
assert cm.value.code == 1
| StarcoderdataPython |
8023876 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Tue Oct 10 00:42:20 2017 by generateDS.py version 2.28b.
# Python 2.7.12 (default, Nov 19 2016, 06:48:10) [GCC 5.4.0 20160609]
#
# Command line options:
# ('--no-process-includes', '')
# ('-o', 'esociallib/v2_04/evtAltContratual.py')
#
# Command line arguments:
# schemas/v2_04/evtAltContratual.xsd
#
# Command line:
# /usr/local/bin/generateDS --no-process-includes -o "esociallib/v2_04/evtAltContratual.py" schemas/v2_04/evtAltContratual.xsd
#
# Current working directory (os.getcwd()):
# esociallib
#
import sys
import re as re_
import base64
import datetime as datetime_
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (time_parts[0], micro_seconds, )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns. We should:
# - AND the outer elements
# - OR the inner elements
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
if re_.search(patterns2, target) is not None:
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
return instring.encode(ExternalEncoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class eSocial(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, evtAltContratual=None, Signature=None):
self.original_tagname_ = None
self.evtAltContratual = evtAltContratual
self.Signature = Signature
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, eSocial)
if subclass is not None:
return subclass(*args_, **kwargs_)
if eSocial.subclass:
return eSocial.subclass(*args_, **kwargs_)
else:
return eSocial(*args_, **kwargs_)
factory = staticmethod(factory)
def get_evtAltContratual(self): return self.evtAltContratual
def set_evtAltContratual(self, evtAltContratual): self.evtAltContratual = evtAltContratual
def get_Signature(self): return self.Signature
def set_Signature(self, Signature): self.Signature = Signature
def hasContent_(self):
if (
self.evtAltContratual is not None or
self.Signature is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='eSocial', namespacedef_=' xmlns:ds="http://www.w3.org/2000/09/xmldsig#" ', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('eSocial')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='eSocial')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='eSocial', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='eSocial'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='eSocial', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.evtAltContratual is not None:
self.evtAltContratual.export(outfile, level, namespace_, name_='evtAltContratual', pretty_print=pretty_print)
if self.Signature is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sSignature>%s</%sSignature>%s' % ('ds:', self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), 'ds:', eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'evtAltContratual':
obj_ = evtAltContratual.factory()
obj_.build(child_)
self.evtAltContratual = obj_
obj_.original_tagname_ = 'evtAltContratual'
elif nodeName_ == 'Signature':
Signature_ = child_.text
Signature_ = self.gds_validate_string(Signature_, node, 'Signature')
self.Signature = Signature_
# end class eSocial
class evtAltContratual(GeneratedsSuper):
"""Evento Alteração Contratual"""
subclass = None
superclass = None
def __init__(self, Id=None, ideEvento=None, ideEmpregador=None, ideVinculo=None, altContratual=None):
self.original_tagname_ = None
self.Id = _cast(None, Id)
self.ideEvento = ideEvento
self.ideEmpregador = ideEmpregador
self.ideVinculo = ideVinculo
self.altContratual = altContratual
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, evtAltContratual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if evtAltContratual.subclass:
return evtAltContratual.subclass(*args_, **kwargs_)
else:
return evtAltContratual(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ideEvento(self): return self.ideEvento
def set_ideEvento(self, ideEvento): self.ideEvento = ideEvento
def get_ideEmpregador(self): return self.ideEmpregador
def set_ideEmpregador(self, ideEmpregador): self.ideEmpregador = ideEmpregador
def get_ideVinculo(self): return self.ideVinculo
def set_ideVinculo(self, ideVinculo): self.ideVinculo = ideVinculo
def get_altContratual(self): return self.altContratual
def set_altContratual(self, altContratual): self.altContratual = altContratual
def get_Id(self): return self.Id
def set_Id(self, Id): self.Id = Id
def hasContent_(self):
if (
self.ideEvento is not None or
self.ideEmpregador is not None or
self.ideVinculo is not None or
self.altContratual is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='evtAltContratual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('evtAltContratual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='evtAltContratual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='evtAltContratual', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='evtAltContratual'):
if self.Id is not None and 'Id' not in already_processed:
already_processed.add('Id')
outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), ))
def exportChildren(self, outfile, level, namespace_='', name_='evtAltContratual', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.ideEvento is not None:
self.ideEvento.export(outfile, level, namespace_, name_='ideEvento', pretty_print=pretty_print)
if self.ideEmpregador is not None:
self.ideEmpregador.export(outfile, level, namespace_, name_='ideEmpregador', pretty_print=pretty_print)
if self.ideVinculo is not None:
self.ideVinculo.export(outfile, level, namespace_, name_='ideVinculo', pretty_print=pretty_print)
if self.altContratual is not None:
self.altContratual.export(outfile, level, namespace_, name_='altContratual', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('Id', node)
if value is not None and 'Id' not in already_processed:
already_processed.add('Id')
self.Id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'ideEvento':
obj_ = TIdeEveTrab.factory()
obj_.build(child_)
self.ideEvento = obj_
obj_.original_tagname_ = 'ideEvento'
elif nodeName_ == 'ideEmpregador':
obj_ = TEmpregador.factory()
obj_.build(child_)
self.ideEmpregador = obj_
obj_.original_tagname_ = 'ideEmpregador'
elif nodeName_ == 'ideVinculo':
obj_ = TIdeVinculoNisObrig.factory()
obj_.build(child_)
self.ideVinculo = obj_
obj_.original_tagname_ = 'ideVinculo'
elif nodeName_ == 'altContratual':
obj_ = altContratual.factory()
obj_.build(child_)
self.altContratual = obj_
obj_.original_tagname_ = 'altContratual'
# end class evtAltContratual
class altContratual(GeneratedsSuper):
"""Informações do Contrato de Trabalho"""
subclass = None
superclass = None
def __init__(self, dtAlteracao=None, dtEf=None, dscAlt=None, vinculo=None, infoRegimeTrab=None, infoContrato=None):
self.original_tagname_ = None
if isinstance(dtAlteracao, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtAlteracao, '%Y-%m-%d').date()
else:
initvalue_ = dtAlteracao
self.dtAlteracao = initvalue_
if isinstance(dtEf, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtEf, '%Y-%m-%d').date()
else:
initvalue_ = dtEf
self.dtEf = initvalue_
self.dscAlt = dscAlt
self.vinculo = vinculo
self.infoRegimeTrab = infoRegimeTrab
self.infoContrato = infoContrato
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, altContratual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if altContratual.subclass:
return altContratual.subclass(*args_, **kwargs_)
else:
return altContratual(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dtAlteracao(self): return self.dtAlteracao
def set_dtAlteracao(self, dtAlteracao): self.dtAlteracao = dtAlteracao
def get_dtEf(self): return self.dtEf
def set_dtEf(self, dtEf): self.dtEf = dtEf
def get_dscAlt(self): return self.dscAlt
def set_dscAlt(self, dscAlt): self.dscAlt = dscAlt
def get_vinculo(self): return self.vinculo
def set_vinculo(self, vinculo): self.vinculo = vinculo
def get_infoRegimeTrab(self): return self.infoRegimeTrab
def set_infoRegimeTrab(self, infoRegimeTrab): self.infoRegimeTrab = infoRegimeTrab
def get_infoContrato(self): return self.infoContrato
def set_infoContrato(self, infoContrato): self.infoContrato = infoContrato
def hasContent_(self):
if (
self.dtAlteracao is not None or
self.dtEf is not None or
self.dscAlt is not None or
self.vinculo is not None or
self.infoRegimeTrab is not None or
self.infoContrato is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='altContratual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('altContratual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='altContratual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='altContratual', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='altContratual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='altContratual', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dtAlteracao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtAlteracao>%s</%sdtAlteracao>%s' % (namespace_, self.gds_format_date(self.dtAlteracao, input_name='dtAlteracao'), namespace_, eol_))
if self.dtEf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtEf>%s</%sdtEf>%s' % (namespace_, self.gds_format_date(self.dtEf, input_name='dtEf'), namespace_, eol_))
if self.dscAlt is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscAlt>%s</%sdscAlt>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscAlt), input_name='dscAlt')), namespace_, eol_))
if self.vinculo is not None:
self.vinculo.export(outfile, level, namespace_, name_='vinculo', pretty_print=pretty_print)
if self.infoRegimeTrab is not None:
self.infoRegimeTrab.export(outfile, level, namespace_, name_='infoRegimeTrab', pretty_print=pretty_print)
if self.infoContrato is not None:
self.infoContrato.export(outfile, level, namespace_, name_='infoContrato', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dtAlteracao':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtAlteracao = dval_
elif nodeName_ == 'dtEf':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtEf = dval_
elif nodeName_ == 'dscAlt':
dscAlt_ = child_.text
dscAlt_ = self.gds_validate_string(dscAlt_, node, 'dscAlt')
self.dscAlt = dscAlt_
elif nodeName_ == 'vinculo':
obj_ = vinculo.factory()
obj_.build(child_)
self.vinculo = obj_
obj_.original_tagname_ = 'vinculo'
elif nodeName_ == 'infoRegimeTrab':
obj_ = infoRegimeTrab.factory()
obj_.build(child_)
self.infoRegimeTrab = obj_
obj_.original_tagname_ = 'infoRegimeTrab'
elif nodeName_ == 'infoContrato':
obj_ = infoContrato.factory()
obj_.build(child_)
self.infoContrato = obj_
obj_.original_tagname_ = 'infoContrato'
# end class altContratual
class dtAlteracao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtAlteracao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtAlteracao.subclass:
return dtAlteracao.subclass(*args_, **kwargs_)
else:
return dtAlteracao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtAlteracao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtAlteracao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtAlteracao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtAlteracao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtAlteracao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtAlteracao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtAlteracao
class dtEf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtEf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtEf.subclass:
return dtEf.subclass(*args_, **kwargs_)
else:
return dtEf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtEf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtEf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtEf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtEf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtEf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtEf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtEf
class dscAlt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscAlt)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscAlt.subclass:
return dscAlt.subclass(*args_, **kwargs_)
else:
return dscAlt(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscAlt', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscAlt')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscAlt')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscAlt', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscAlt'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscAlt', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscAlt
class vinculo(GeneratedsSuper):
"""Grupo de informações do vínculo trabalhista."""
subclass = None
superclass = None
def __init__(self, tpRegPrev=None):
self.original_tagname_ = None
self.tpRegPrev = tpRegPrev
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vinculo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vinculo.subclass:
return vinculo.subclass(*args_, **kwargs_)
else:
return vinculo(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpRegPrev(self): return self.tpRegPrev
def set_tpRegPrev(self, tpRegPrev): self.tpRegPrev = tpRegPrev
def hasContent_(self):
if (
self.tpRegPrev is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='vinculo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vinculo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='vinculo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='vinculo', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='vinculo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='vinculo', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpRegPrev is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpRegPrev>%s</%stpRegPrev>%s' % (namespace_, self.gds_format_integer(self.tpRegPrev, input_name='tpRegPrev'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpRegPrev':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpRegPrev')
self.tpRegPrev = ival_
# end class vinculo
class tpRegPrev(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRegPrev)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRegPrev.subclass:
return tpRegPrev.subclass(*args_, **kwargs_)
else:
return tpRegPrev(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpRegPrev', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRegPrev')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRegPrev')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpRegPrev', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpRegPrev'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpRegPrev', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpRegPrev
class infoRegimeTrab(GeneratedsSuper):
"""Informações do regime trabalhista"""
subclass = None
superclass = None
def __init__(self, infoCeletista=None, infoEstatutario=None):
self.original_tagname_ = None
self.infoCeletista = infoCeletista
self.infoEstatutario = infoEstatutario
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoRegimeTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoRegimeTrab.subclass:
return infoRegimeTrab.subclass(*args_, **kwargs_)
else:
return infoRegimeTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_infoCeletista(self): return self.infoCeletista
def set_infoCeletista(self, infoCeletista): self.infoCeletista = infoCeletista
def get_infoEstatutario(self): return self.infoEstatutario
def set_infoEstatutario(self, infoEstatutario): self.infoEstatutario = infoEstatutario
def hasContent_(self):
if (
self.infoCeletista is not None or
self.infoEstatutario is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoRegimeTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoRegimeTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoRegimeTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoRegimeTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoRegimeTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoRegimeTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.infoCeletista is not None:
self.infoCeletista.export(outfile, level, namespace_, name_='infoCeletista', pretty_print=pretty_print)
if self.infoEstatutario is not None:
self.infoEstatutario.export(outfile, level, namespace_, name_='infoEstatutario', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'infoCeletista':
obj_ = infoCeletista.factory()
obj_.build(child_)
self.infoCeletista = obj_
obj_.original_tagname_ = 'infoCeletista'
elif nodeName_ == 'infoEstatutario':
obj_ = infoEstatutario.factory()
obj_.build(child_)
self.infoEstatutario = obj_
obj_.original_tagname_ = 'infoEstatutario'
# end class infoRegimeTrab
class infoCeletista(GeneratedsSuper):
"""Informações de Trabalhador Celetista"""
subclass = None
superclass = None
def __init__(self, tpRegJor=None, natAtividade=None, dtBase=None, cnpjSindCategProf=None, trabTemp=None, aprend=None):
self.original_tagname_ = None
self.tpRegJor = tpRegJor
self.natAtividade = natAtividade
self.dtBase = dtBase
self.cnpjSindCategProf = cnpjSindCategProf
self.trabTemp = trabTemp
self.aprend = aprend
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoCeletista)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoCeletista.subclass:
return infoCeletista.subclass(*args_, **kwargs_)
else:
return infoCeletista(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpRegJor(self): return self.tpRegJor
def set_tpRegJor(self, tpRegJor): self.tpRegJor = tpRegJor
def get_natAtividade(self): return self.natAtividade
def set_natAtividade(self, natAtividade): self.natAtividade = natAtividade
def get_dtBase(self): return self.dtBase
def set_dtBase(self, dtBase): self.dtBase = dtBase
def get_cnpjSindCategProf(self): return self.cnpjSindCategProf
def set_cnpjSindCategProf(self, cnpjSindCategProf): self.cnpjSindCategProf = cnpjSindCategProf
def get_trabTemp(self): return self.trabTemp
def set_trabTemp(self, trabTemp): self.trabTemp = trabTemp
def get_aprend(self): return self.aprend
def set_aprend(self, aprend): self.aprend = aprend
def hasContent_(self):
if (
self.tpRegJor is not None or
self.natAtividade is not None or
self.dtBase is not None or
self.cnpjSindCategProf is not None or
self.trabTemp is not None or
self.aprend is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoCeletista', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoCeletista')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoCeletista')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoCeletista', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoCeletista'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoCeletista', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpRegJor is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpRegJor>%s</%stpRegJor>%s' % (namespace_, self.gds_format_integer(self.tpRegJor, input_name='tpRegJor'), namespace_, eol_))
if self.natAtividade is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snatAtividade>%s</%snatAtividade>%s' % (namespace_, self.gds_format_integer(self.natAtividade, input_name='natAtividade'), namespace_, eol_))
if self.dtBase is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtBase>%s</%sdtBase>%s' % (namespace_, self.gds_format_integer(self.dtBase, input_name='dtBase'), namespace_, eol_))
if self.cnpjSindCategProf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scnpjSindCategProf>%s</%scnpjSindCategProf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cnpjSindCategProf), input_name='cnpjSindCategProf')), namespace_, eol_))
if self.trabTemp is not None:
self.trabTemp.export(outfile, level, namespace_, name_='trabTemp', pretty_print=pretty_print)
if self.aprend is not None:
self.aprend.export(outfile, level, namespace_, name_='aprend', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpRegJor':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpRegJor')
self.tpRegJor = ival_
elif nodeName_ == 'natAtividade':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'natAtividade')
self.natAtividade = ival_
elif nodeName_ == 'dtBase':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'dtBase')
self.dtBase = ival_
elif nodeName_ == 'cnpjSindCategProf':
cnpjSindCategProf_ = child_.text
cnpjSindCategProf_ = self.gds_validate_string(cnpjSindCategProf_, node, 'cnpjSindCategProf')
self.cnpjSindCategProf = cnpjSindCategProf_
elif nodeName_ == 'trabTemp':
obj_ = trabTemp.factory()
obj_.build(child_)
self.trabTemp = obj_
obj_.original_tagname_ = 'trabTemp'
elif nodeName_ == 'aprend':
obj_ = aprend.factory()
obj_.build(child_)
self.aprend = obj_
obj_.original_tagname_ = 'aprend'
# end class infoCeletista
class tpRegJor(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpRegJor)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpRegJor.subclass:
return tpRegJor.subclass(*args_, **kwargs_)
else:
return tpRegJor(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpRegJor', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRegJor')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpRegJor')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpRegJor', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpRegJor'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpRegJor', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpRegJor
class natAtividade(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, natAtividade)
if subclass is not None:
return subclass(*args_, **kwargs_)
if natAtividade.subclass:
return natAtividade.subclass(*args_, **kwargs_)
else:
return natAtividade(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='natAtividade', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('natAtividade')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='natAtividade')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='natAtividade', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='natAtividade'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='natAtividade', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class natAtividade
class dtBase(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtBase)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtBase.subclass:
return dtBase.subclass(*args_, **kwargs_)
else:
return dtBase(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtBase', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtBase')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtBase')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtBase', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtBase'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtBase', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtBase
class cnpjSindCategProf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cnpjSindCategProf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cnpjSindCategProf.subclass:
return cnpjSindCategProf.subclass(*args_, **kwargs_)
else:
return cnpjSindCategProf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cnpjSindCategProf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cnpjSindCategProf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cnpjSindCategProf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cnpjSindCategProf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cnpjSindCategProf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cnpjSindCategProf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cnpjSindCategProf
class trabTemp(GeneratedsSuper):
"""Dados sobre trabalho temporário. Preenchimento obrigatório na
prorrogação de contrato de trabalhador temporário"""
subclass = None
superclass = None
def __init__(self, justProrr=None):
self.original_tagname_ = None
self.justProrr = justProrr
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, trabTemp)
if subclass is not None:
return subclass(*args_, **kwargs_)
if trabTemp.subclass:
return trabTemp.subclass(*args_, **kwargs_)
else:
return trabTemp(*args_, **kwargs_)
factory = staticmethod(factory)
def get_justProrr(self): return self.justProrr
def set_justProrr(self, justProrr): self.justProrr = justProrr
def hasContent_(self):
if (
self.justProrr is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='trabTemp', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('trabTemp')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='trabTemp')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='trabTemp', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='trabTemp'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='trabTemp', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.justProrr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sjustProrr>%s</%sjustProrr>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.justProrr), input_name='justProrr')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'justProrr':
justProrr_ = child_.text
justProrr_ = self.gds_validate_string(justProrr_, node, 'justProrr')
self.justProrr = justProrr_
# end class trabTemp
class justProrr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, justProrr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if justProrr.subclass:
return justProrr.subclass(*args_, **kwargs_)
else:
return justProrr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='justProrr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('justProrr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='justProrr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='justProrr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='justProrr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='justProrr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class justProrr
class aprend(GeneratedsSuper):
"""Informações para identificação do empregador contratante de
aprendiz. Preenchimento obrigatório na contratação de aprendiz
por entidade educativa sem fins lucrativos que tenha por
objetivo a assistência ao adolescente e à educação profissional
(art. 430, inciso II, CLT) ou por entidade de prática desportiva
filiada ao Sistema Nacional do Desporto ou a Sistema de Desporto
de Estado, do Distrito Federal ou de Município (art. 430, inciso
III, CLT)"""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, aprend)
if subclass is not None:
return subclass(*args_, **kwargs_)
if aprend.subclass:
return aprend.subclass(*args_, **kwargs_)
else:
return aprend(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='aprend', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('aprend')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='aprend')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='aprend', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='aprend'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='aprend', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class aprend
class tpInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpInsc.subclass:
return tpInsc.subclass(*args_, **kwargs_)
else:
return tpInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpInsc
class nrInsc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrInsc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrInsc.subclass:
return nrInsc.subclass(*args_, **kwargs_)
else:
return nrInsc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrInsc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrInsc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrInsc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrInsc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrInsc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrInsc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrInsc
class infoEstatutario(GeneratedsSuper):
"""Informações de Trabalhador Estatutário"""
subclass = None
superclass = None
def __init__(self, tpPlanRP=None):
self.original_tagname_ = None
self.tpPlanRP = tpPlanRP
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoEstatutario)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoEstatutario.subclass:
return infoEstatutario.subclass(*args_, **kwargs_)
else:
return infoEstatutario(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpPlanRP(self): return self.tpPlanRP
def set_tpPlanRP(self, tpPlanRP): self.tpPlanRP = tpPlanRP
def hasContent_(self):
if (
self.tpPlanRP is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoEstatutario', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoEstatutario')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoEstatutario')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoEstatutario', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoEstatutario'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoEstatutario', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpPlanRP is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpPlanRP>%s</%stpPlanRP>%s' % (namespace_, self.gds_format_integer(self.tpPlanRP, input_name='tpPlanRP'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpPlanRP':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpPlanRP')
self.tpPlanRP = ival_
# end class infoEstatutario
class tpPlanRP(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpPlanRP)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpPlanRP.subclass:
return tpPlanRP.subclass(*args_, **kwargs_)
else:
return tpPlanRP(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpPlanRP', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpPlanRP')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpPlanRP')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpPlanRP', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpPlanRP'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpPlanRP', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpPlanRP
class infoContrato(GeneratedsSuper):
"""Informações do Contrato de Trabalho"""
subclass = None
superclass = None
def __init__(self, codCargo=None, codFuncao=None, codCateg=None, codCarreira=None, dtIngrCarr=None, remuneracao=None, duracao=None, localTrabalho=None, horContratual=None, filiacaoSindical=None, alvaraJudicial=None, observacoes=None, servPubl=None):
self.original_tagname_ = None
self.codCargo = codCargo
self.codFuncao = codFuncao
self.codCateg = codCateg
self.codCarreira = codCarreira
if isinstance(dtIngrCarr, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtIngrCarr, '%Y-%m-%d').date()
else:
initvalue_ = dtIngrCarr
self.dtIngrCarr = initvalue_
self.remuneracao = remuneracao
self.duracao = duracao
self.localTrabalho = localTrabalho
self.horContratual = horContratual
if filiacaoSindical is None:
self.filiacaoSindical = []
else:
self.filiacaoSindical = filiacaoSindical
self.alvaraJudicial = alvaraJudicial
if observacoes is None:
self.observacoes = []
else:
self.observacoes = observacoes
self.servPubl = servPubl
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, infoContrato)
if subclass is not None:
return subclass(*args_, **kwargs_)
if infoContrato.subclass:
return infoContrato.subclass(*args_, **kwargs_)
else:
return infoContrato(*args_, **kwargs_)
factory = staticmethod(factory)
def get_codCargo(self): return self.codCargo
def set_codCargo(self, codCargo): self.codCargo = codCargo
def get_codFuncao(self): return self.codFuncao
def set_codFuncao(self, codFuncao): self.codFuncao = codFuncao
def get_codCateg(self): return self.codCateg
def set_codCateg(self, codCateg): self.codCateg = codCateg
def get_codCarreira(self): return self.codCarreira
def set_codCarreira(self, codCarreira): self.codCarreira = codCarreira
def get_dtIngrCarr(self): return self.dtIngrCarr
def set_dtIngrCarr(self, dtIngrCarr): self.dtIngrCarr = dtIngrCarr
def get_remuneracao(self): return self.remuneracao
def set_remuneracao(self, remuneracao): self.remuneracao = remuneracao
def get_duracao(self): return self.duracao
def set_duracao(self, duracao): self.duracao = duracao
def get_localTrabalho(self): return self.localTrabalho
def set_localTrabalho(self, localTrabalho): self.localTrabalho = localTrabalho
def get_horContratual(self): return self.horContratual
def set_horContratual(self, horContratual): self.horContratual = horContratual
def get_filiacaoSindical(self): return self.filiacaoSindical
def set_filiacaoSindical(self, filiacaoSindical): self.filiacaoSindical = filiacaoSindical
def add_filiacaoSindical(self, value): self.filiacaoSindical.append(value)
def insert_filiacaoSindical_at(self, index, value): self.filiacaoSindical.insert(index, value)
def replace_filiacaoSindical_at(self, index, value): self.filiacaoSindical[index] = value
def get_alvaraJudicial(self): return self.alvaraJudicial
def set_alvaraJudicial(self, alvaraJudicial): self.alvaraJudicial = alvaraJudicial
def get_observacoes(self): return self.observacoes
def set_observacoes(self, observacoes): self.observacoes = observacoes
def add_observacoes(self, value): self.observacoes.append(value)
def insert_observacoes_at(self, index, value): self.observacoes.insert(index, value)
def replace_observacoes_at(self, index, value): self.observacoes[index] = value
def get_servPubl(self): return self.servPubl
def set_servPubl(self, servPubl): self.servPubl = servPubl
def hasContent_(self):
if (
self.codCargo is not None or
self.codFuncao is not None or
self.codCateg is not None or
self.codCarreira is not None or
self.dtIngrCarr is not None or
self.remuneracao is not None or
self.duracao is not None or
self.localTrabalho is not None or
self.horContratual is not None or
self.filiacaoSindical or
self.alvaraJudicial is not None or
self.observacoes or
self.servPubl is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='infoContrato', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('infoContrato')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='infoContrato')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='infoContrato', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='infoContrato'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='infoContrato', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.codCargo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCargo>%s</%scodCargo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codCargo), input_name='codCargo')), namespace_, eol_))
if self.codFuncao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodFuncao>%s</%scodFuncao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codFuncao), input_name='codFuncao')), namespace_, eol_))
if self.codCateg is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCateg>%s</%scodCateg>%s' % (namespace_, self.gds_format_integer(self.codCateg, input_name='codCateg'), namespace_, eol_))
if self.codCarreira is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodCarreira>%s</%scodCarreira>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codCarreira), input_name='codCarreira')), namespace_, eol_))
if self.dtIngrCarr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtIngrCarr>%s</%sdtIngrCarr>%s' % (namespace_, self.gds_format_date(self.dtIngrCarr, input_name='dtIngrCarr'), namespace_, eol_))
if self.remuneracao is not None:
self.remuneracao.export(outfile, level, namespace_, name_='remuneracao', pretty_print=pretty_print)
if self.duracao is not None:
self.duracao.export(outfile, level, namespace_, name_='duracao', pretty_print=pretty_print)
if self.localTrabalho is not None:
self.localTrabalho.export(outfile, level, namespace_, name_='localTrabalho', pretty_print=pretty_print)
if self.horContratual is not None:
self.horContratual.export(outfile, level, namespace_, name_='horContratual', pretty_print=pretty_print)
for filiacaoSindical_ in self.filiacaoSindical:
filiacaoSindical_.export(outfile, level, namespace_, name_='filiacaoSindical', pretty_print=pretty_print)
if self.alvaraJudicial is not None:
self.alvaraJudicial.export(outfile, level, namespace_, name_='alvaraJudicial', pretty_print=pretty_print)
for observacoes_ in self.observacoes:
observacoes_.export(outfile, level, namespace_, name_='observacoes', pretty_print=pretty_print)
if self.servPubl is not None:
self.servPubl.export(outfile, level, namespace_, name_='servPubl', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'codCargo':
codCargo_ = child_.text
codCargo_ = self.gds_validate_string(codCargo_, node, 'codCargo')
self.codCargo = codCargo_
elif nodeName_ == 'codFuncao':
codFuncao_ = child_.text
codFuncao_ = self.gds_validate_string(codFuncao_, node, 'codFuncao')
self.codFuncao = codFuncao_
elif nodeName_ == 'codCateg':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codCateg')
self.codCateg = ival_
elif nodeName_ == 'codCarreira':
codCarreira_ = child_.text
codCarreira_ = self.gds_validate_string(codCarreira_, node, 'codCarreira')
self.codCarreira = codCarreira_
elif nodeName_ == 'dtIngrCarr':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtIngrCarr = dval_
elif nodeName_ == 'remuneracao':
obj_ = TRemun.factory()
obj_.build(child_)
self.remuneracao = obj_
obj_.original_tagname_ = 'remuneracao'
elif nodeName_ == 'duracao':
obj_ = duracao.factory()
obj_.build(child_)
self.duracao = obj_
obj_.original_tagname_ = 'duracao'
elif nodeName_ == 'localTrabalho':
obj_ = localTrabalho.factory()
obj_.build(child_)
self.localTrabalho = obj_
obj_.original_tagname_ = 'localTrabalho'
elif nodeName_ == 'horContratual':
obj_ = horContratual.factory()
obj_.build(child_)
self.horContratual = obj_
obj_.original_tagname_ = 'horContratual'
elif nodeName_ == 'filiacaoSindical':
obj_ = filiacaoSindical.factory()
obj_.build(child_)
self.filiacaoSindical.append(obj_)
obj_.original_tagname_ = 'filiacaoSindical'
elif nodeName_ == 'alvaraJudicial':
obj_ = alvaraJudicial.factory()
obj_.build(child_)
self.alvaraJudicial = obj_
obj_.original_tagname_ = 'alvaraJudicial'
elif nodeName_ == 'observacoes':
obj_ = observacoes.factory()
obj_.build(child_)
self.observacoes.append(obj_)
obj_.original_tagname_ = 'observacoes'
elif nodeName_ == 'servPubl':
obj_ = servPubl.factory()
obj_.build(child_)
self.servPubl = obj_
obj_.original_tagname_ = 'servPubl'
# end class infoContrato
class codCargo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCargo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCargo.subclass:
return codCargo.subclass(*args_, **kwargs_)
else:
return codCargo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCargo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCargo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCargo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCargo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCargo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCargo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCargo
class codFuncao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codFuncao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codFuncao.subclass:
return codFuncao.subclass(*args_, **kwargs_)
else:
return codFuncao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codFuncao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codFuncao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codFuncao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codFuncao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codFuncao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codFuncao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codFuncao
class codCateg(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCateg)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCateg.subclass:
return codCateg.subclass(*args_, **kwargs_)
else:
return codCateg(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCateg', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCateg')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCateg')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCateg', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCateg'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCateg', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCateg
class codCarreira(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codCarreira)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codCarreira.subclass:
return codCarreira.subclass(*args_, **kwargs_)
else:
return codCarreira(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codCarreira', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codCarreira')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codCarreira')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codCarreira', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codCarreira'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codCarreira', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codCarreira
class dtIngrCarr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtIngrCarr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtIngrCarr.subclass:
return dtIngrCarr.subclass(*args_, **kwargs_)
else:
return dtIngrCarr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtIngrCarr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtIngrCarr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtIngrCarr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtIngrCarr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtIngrCarr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtIngrCarr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtIngrCarr
class duracao(GeneratedsSuper):
"""Duração do Contrato de Trabalho"""
subclass = None
superclass = None
def __init__(self, tpContr=None, dtTerm=None):
self.original_tagname_ = None
self.tpContr = tpContr
if isinstance(dtTerm, BaseStrType_):
initvalue_ = datetime_.datetime.strptime(dtTerm, '%Y-%m-%d').date()
else:
initvalue_ = dtTerm
self.dtTerm = initvalue_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, duracao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if duracao.subclass:
return duracao.subclass(*args_, **kwargs_)
else:
return duracao(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpContr(self): return self.tpContr
def set_tpContr(self, tpContr): self.tpContr = tpContr
def get_dtTerm(self): return self.dtTerm
def set_dtTerm(self, dtTerm): self.dtTerm = dtTerm
def hasContent_(self):
if (
self.tpContr is not None or
self.dtTerm is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='duracao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('duracao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='duracao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='duracao', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='duracao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='duracao', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpContr is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpContr>%s</%stpContr>%s' % (namespace_, self.gds_format_integer(self.tpContr, input_name='tpContr'), namespace_, eol_))
if self.dtTerm is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdtTerm>%s</%sdtTerm>%s' % (namespace_, self.gds_format_date(self.dtTerm, input_name='dtTerm'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpContr':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpContr')
self.tpContr = ival_
elif nodeName_ == 'dtTerm':
sval_ = child_.text
dval_ = self.gds_parse_date(sval_)
self.dtTerm = dval_
# end class duracao
class tpContr(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpContr)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpContr.subclass:
return tpContr.subclass(*args_, **kwargs_)
else:
return tpContr(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpContr', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpContr')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpContr')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpContr', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpContr'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpContr', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpContr
class dtTerm(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dtTerm)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dtTerm.subclass:
return dtTerm.subclass(*args_, **kwargs_)
else:
return dtTerm(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dtTerm', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dtTerm')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dtTerm')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dtTerm', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dtTerm'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dtTerm', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dtTerm
class localTrabalho(GeneratedsSuper):
"""Informações do local de trabalho"""
subclass = None
superclass = None
def __init__(self, localTrabGeral=None, localTrabDom=None):
self.original_tagname_ = None
self.localTrabGeral = localTrabGeral
self.localTrabDom = localTrabDom
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, localTrabalho)
if subclass is not None:
return subclass(*args_, **kwargs_)
if localTrabalho.subclass:
return localTrabalho.subclass(*args_, **kwargs_)
else:
return localTrabalho(*args_, **kwargs_)
factory = staticmethod(factory)
def get_localTrabGeral(self): return self.localTrabGeral
def set_localTrabGeral(self, localTrabGeral): self.localTrabGeral = localTrabGeral
def get_localTrabDom(self): return self.localTrabDom
def set_localTrabDom(self, localTrabDom): self.localTrabDom = localTrabDom
def hasContent_(self):
if (
self.localTrabGeral is not None or
self.localTrabDom is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='localTrabalho', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('localTrabalho')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='localTrabalho')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='localTrabalho', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='localTrabalho'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='localTrabalho', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.localTrabGeral is not None:
self.localTrabGeral.export(outfile, level, namespace_, name_='localTrabGeral', pretty_print=pretty_print)
if self.localTrabDom is not None:
self.localTrabDom.export(outfile, level, namespace_, name_='localTrabDom', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'localTrabGeral':
obj_ = TLocalTrab.factory()
obj_.build(child_)
self.localTrabGeral = obj_
obj_.original_tagname_ = 'localTrabGeral'
elif nodeName_ == 'localTrabDom':
obj_ = TEnderecoBrasil.factory()
obj_.build(child_)
self.localTrabDom = obj_
obj_.original_tagname_ = 'localTrabDom'
# end class localTrabalho
class horContratual(GeneratedsSuper):
"""Informações do Horário Contratual do Trabalhador. O preenchimento é
obrigatório se {tpRegJor} = [1]."""
subclass = None
superclass = None
def __init__(self, qtdHrsSem=None, tpJornada=None, dscTpJorn=None, tmpParc=None, horario=None):
self.original_tagname_ = None
self.qtdHrsSem = qtdHrsSem
self.tpJornada = tpJornada
self.dscTpJorn = dscTpJorn
self.tmpParc = tmpParc
if horario is None:
self.horario = []
else:
self.horario = horario
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, horContratual)
if subclass is not None:
return subclass(*args_, **kwargs_)
if horContratual.subclass:
return horContratual.subclass(*args_, **kwargs_)
else:
return horContratual(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qtdHrsSem(self): return self.qtdHrsSem
def set_qtdHrsSem(self, qtdHrsSem): self.qtdHrsSem = qtdHrsSem
def get_tpJornada(self): return self.tpJornada
def set_tpJornada(self, tpJornada): self.tpJornada = tpJornada
def get_dscTpJorn(self): return self.dscTpJorn
def set_dscTpJorn(self, dscTpJorn): self.dscTpJorn = dscTpJorn
def get_tmpParc(self): return self.tmpParc
def set_tmpParc(self, tmpParc): self.tmpParc = tmpParc
def get_horario(self): return self.horario
def set_horario(self, horario): self.horario = horario
def add_horario(self, value): self.horario.append(value)
def insert_horario_at(self, index, value): self.horario.insert(index, value)
def replace_horario_at(self, index, value): self.horario[index] = value
def hasContent_(self):
if (
self.qtdHrsSem is not None or
self.tpJornada is not None or
self.dscTpJorn is not None or
self.tmpParc is not None or
self.horario
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='horContratual', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('horContratual')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='horContratual')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='horContratual', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='horContratual'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='horContratual', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.qtdHrsSem is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sqtdHrsSem>%s</%sqtdHrsSem>%s' % (namespace_, self.gds_format_float(self.qtdHrsSem, input_name='qtdHrsSem'), namespace_, eol_))
if self.tpJornada is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpJornada>%s</%stpJornada>%s' % (namespace_, self.gds_format_integer(self.tpJornada, input_name='tpJornada'), namespace_, eol_))
if self.dscTpJorn is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscTpJorn>%s</%sdscTpJorn>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscTpJorn), input_name='dscTpJorn')), namespace_, eol_))
if self.tmpParc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stmpParc>%s</%stmpParc>%s' % (namespace_, self.gds_format_integer(self.tmpParc, input_name='tmpParc'), namespace_, eol_))
for horario_ in self.horario:
horario_.export(outfile, level, namespace_, name_='horario', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'qtdHrsSem':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'qtdHrsSem')
self.qtdHrsSem = fval_
elif nodeName_ == 'tpJornada':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpJornada')
self.tpJornada = ival_
elif nodeName_ == 'dscTpJorn':
dscTpJorn_ = child_.text
dscTpJorn_ = self.gds_validate_string(dscTpJorn_, node, 'dscTpJorn')
self.dscTpJorn = dscTpJorn_
elif nodeName_ == 'tmpParc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tmpParc')
self.tmpParc = ival_
elif nodeName_ == 'horario':
obj_ = THorario.factory()
obj_.build(child_)
self.horario.append(obj_)
obj_.original_tagname_ = 'horario'
# end class horContratual
class qtdHrsSem(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, qtdHrsSem)
if subclass is not None:
return subclass(*args_, **kwargs_)
if qtdHrsSem.subclass:
return qtdHrsSem.subclass(*args_, **kwargs_)
else:
return qtdHrsSem(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='qtdHrsSem', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('qtdHrsSem')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qtdHrsSem')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='qtdHrsSem', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='qtdHrsSem'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='qtdHrsSem', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qtdHrsSem
class tpJornada(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpJornada)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpJornada.subclass:
return tpJornada.subclass(*args_, **kwargs_)
else:
return tpJornada(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpJornada', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpJornada')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpJornada')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpJornada', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpJornada'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpJornada', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpJornada
class dscTpJorn(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscTpJorn)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscTpJorn.subclass:
return dscTpJorn.subclass(*args_, **kwargs_)
else:
return dscTpJorn(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscTpJorn', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscTpJorn')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscTpJorn')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscTpJorn', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscTpJorn'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscTpJorn', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscTpJorn
class tmpParc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tmpParc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tmpParc.subclass:
return tmpParc.subclass(*args_, **kwargs_)
else:
return tmpParc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tmpParc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tmpParc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tmpParc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tmpParc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tmpParc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tmpParc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tmpParc
class filiacaoSindical(GeneratedsSuper):
"""Filiação Sindical do Trabalhador"""
subclass = None
superclass = None
def __init__(self, cnpjSindTrab=None):
self.original_tagname_ = None
self.cnpjSindTrab = cnpjSindTrab
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, filiacaoSindical)
if subclass is not None:
return subclass(*args_, **kwargs_)
if filiacaoSindical.subclass:
return filiacaoSindical.subclass(*args_, **kwargs_)
else:
return filiacaoSindical(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cnpjSindTrab(self): return self.cnpjSindTrab
def set_cnpjSindTrab(self, cnpjSindTrab): self.cnpjSindTrab = cnpjSindTrab
def hasContent_(self):
if (
self.cnpjSindTrab is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='filiacaoSindical', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('filiacaoSindical')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='filiacaoSindical')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='filiacaoSindical', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='filiacaoSindical'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='filiacaoSindical', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cnpjSindTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scnpjSindTrab>%s</%scnpjSindTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cnpjSindTrab), input_name='cnpjSindTrab')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cnpjSindTrab':
cnpjSindTrab_ = child_.text
cnpjSindTrab_ = self.gds_validate_string(cnpjSindTrab_, node, 'cnpjSindTrab')
self.cnpjSindTrab = cnpjSindTrab_
# end class filiacaoSindical
class cnpjSindTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cnpjSindTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cnpjSindTrab.subclass:
return cnpjSindTrab.subclass(*args_, **kwargs_)
else:
return cnpjSindTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cnpjSindTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cnpjSindTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cnpjSindTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cnpjSindTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cnpjSindTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cnpjSindTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cnpjSindTrab
class alvaraJudicial(GeneratedsSuper):
"""Informações do alvará judicial em caso de contratação de menores de
14 anos, em qualquer categoria, e de maiores de 14 e menores de
16, em categoria diferente de "Aprendiz"."""
subclass = None
superclass = None
def __init__(self, nrProcJud=None):
self.original_tagname_ = None
self.nrProcJud = nrProcJud
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, alvaraJudicial)
if subclass is not None:
return subclass(*args_, **kwargs_)
if alvaraJudicial.subclass:
return alvaraJudicial.subclass(*args_, **kwargs_)
else:
return alvaraJudicial(*args_, **kwargs_)
factory = staticmethod(factory)
def get_nrProcJud(self): return self.nrProcJud
def set_nrProcJud(self, nrProcJud): self.nrProcJud = nrProcJud
def hasContent_(self):
if (
self.nrProcJud is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='alvaraJudicial', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('alvaraJudicial')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='alvaraJudicial')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='alvaraJudicial', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='alvaraJudicial'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='alvaraJudicial', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.nrProcJud is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrProcJud>%s</%snrProcJud>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrProcJud), input_name='nrProcJud')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'nrProcJud':
nrProcJud_ = child_.text
nrProcJud_ = self.gds_validate_string(nrProcJud_, node, 'nrProcJud')
self.nrProcJud = nrProcJud_
# end class alvaraJudicial
class nrProcJud(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrProcJud)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrProcJud.subclass:
return nrProcJud.subclass(*args_, **kwargs_)
else:
return nrProcJud(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrProcJud', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrProcJud')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrProcJud')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrProcJud', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrProcJud'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrProcJud', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrProcJud
class observacoes(GeneratedsSuper):
"""Observações do contrato de trabalho"""
subclass = None
superclass = None
def __init__(self, observacao=None):
self.original_tagname_ = None
self.observacao = observacao
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, observacoes)
if subclass is not None:
return subclass(*args_, **kwargs_)
if observacoes.subclass:
return observacoes.subclass(*args_, **kwargs_)
else:
return observacoes(*args_, **kwargs_)
factory = staticmethod(factory)
def get_observacao(self): return self.observacao
def set_observacao(self, observacao): self.observacao = observacao
def hasContent_(self):
if (
self.observacao is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='observacoes', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('observacoes')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='observacoes')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='observacoes', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='observacoes'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='observacoes', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.observacao is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sobservacao>%s</%sobservacao>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.observacao), input_name='observacao')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'observacao':
observacao_ = child_.text
observacao_ = self.gds_validate_string(observacao_, node, 'observacao')
self.observacao = observacao_
# end class observacoes
class observacao(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, observacao)
if subclass is not None:
return subclass(*args_, **kwargs_)
if observacao.subclass:
return observacao.subclass(*args_, **kwargs_)
else:
return observacao(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='observacao', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('observacao')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='observacao')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='observacao', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='observacao'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='observacao', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class observacao
class servPubl(GeneratedsSuper):
"""Alterações inerentes ao servidor público"""
subclass = None
superclass = None
def __init__(self, mtvAlter=None):
self.original_tagname_ = None
self.mtvAlter = mtvAlter
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, servPubl)
if subclass is not None:
return subclass(*args_, **kwargs_)
if servPubl.subclass:
return servPubl.subclass(*args_, **kwargs_)
else:
return servPubl(*args_, **kwargs_)
factory = staticmethod(factory)
def get_mtvAlter(self): return self.mtvAlter
def set_mtvAlter(self, mtvAlter): self.mtvAlter = mtvAlter
def hasContent_(self):
if (
self.mtvAlter is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='servPubl', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('servPubl')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='servPubl')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='servPubl', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='servPubl'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='servPubl', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.mtvAlter is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%smtvAlter>%s</%smtvAlter>%s' % (namespace_, self.gds_format_integer(self.mtvAlter, input_name='mtvAlter'), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'mtvAlter':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'mtvAlter')
self.mtvAlter = ival_
# end class servPubl
class mtvAlter(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, mtvAlter)
if subclass is not None:
return subclass(*args_, **kwargs_)
if mtvAlter.subclass:
return mtvAlter.subclass(*args_, **kwargs_)
else:
return mtvAlter(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='mtvAlter', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('mtvAlter')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='mtvAlter')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='mtvAlter', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='mtvAlter'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='mtvAlter', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class mtvAlter
class TIdeEveTrab(GeneratedsSuper):
"""Identificação do evento"""
subclass = None
superclass = None
def __init__(self, indRetif=None, nrRecibo=None, tpAmb=None, procEmi=None, verProc=None):
self.original_tagname_ = None
self.indRetif = indRetif
self.nrRecibo = nrRecibo
self.tpAmb = tpAmb
self.procEmi = procEmi
self.verProc = verProc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TIdeEveTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TIdeEveTrab.subclass:
return TIdeEveTrab.subclass(*args_, **kwargs_)
else:
return TIdeEveTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_indRetif(self): return self.indRetif
def set_indRetif(self, indRetif): self.indRetif = indRetif
def get_nrRecibo(self): return self.nrRecibo
def set_nrRecibo(self, nrRecibo): self.nrRecibo = nrRecibo
def get_tpAmb(self): return self.tpAmb
def set_tpAmb(self, tpAmb): self.tpAmb = tpAmb
def get_procEmi(self): return self.procEmi
def set_procEmi(self, procEmi): self.procEmi = procEmi
def get_verProc(self): return self.verProc
def set_verProc(self, verProc): self.verProc = verProc
def hasContent_(self):
if (
self.indRetif is not None or
self.nrRecibo is not None or
self.tpAmb is not None or
self.procEmi is not None or
self.verProc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TIdeEveTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TIdeEveTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TIdeEveTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TIdeEveTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TIdeEveTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TIdeEveTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.indRetif is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sindRetif>%s</%sindRetif>%s' % (namespace_, self.gds_format_integer(self.indRetif, input_name='indRetif'), namespace_, eol_))
if self.nrRecibo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrRecibo>%s</%snrRecibo>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrRecibo), input_name='nrRecibo')), namespace_, eol_))
if self.tpAmb is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpAmb>%s</%stpAmb>%s' % (namespace_, self.gds_format_integer(self.tpAmb, input_name='tpAmb'), namespace_, eol_))
if self.procEmi is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sprocEmi>%s</%sprocEmi>%s' % (namespace_, self.gds_format_integer(self.procEmi, input_name='procEmi'), namespace_, eol_))
if self.verProc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sverProc>%s</%sverProc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.verProc), input_name='verProc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'indRetif':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'indRetif')
self.indRetif = ival_
elif nodeName_ == 'nrRecibo':
nrRecibo_ = child_.text
nrRecibo_ = self.gds_validate_string(nrRecibo_, node, 'nrRecibo')
self.nrRecibo = nrRecibo_
elif nodeName_ == 'tpAmb':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpAmb')
self.tpAmb = ival_
elif nodeName_ == 'procEmi':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'procEmi')
self.procEmi = ival_
elif nodeName_ == 'verProc':
verProc_ = child_.text
verProc_ = self.gds_validate_string(verProc_, node, 'verProc')
self.verProc = verProc_
# end class TIdeEveTrab
class indRetif(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, indRetif)
if subclass is not None:
return subclass(*args_, **kwargs_)
if indRetif.subclass:
return indRetif.subclass(*args_, **kwargs_)
else:
return indRetif(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='indRetif', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('indRetif')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='indRetif')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='indRetif', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='indRetif'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='indRetif', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class indRetif
class nrRecibo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrRecibo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrRecibo.subclass:
return nrRecibo.subclass(*args_, **kwargs_)
else:
return nrRecibo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrRecibo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrRecibo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrRecibo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrRecibo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrRecibo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrRecibo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrRecibo
class tpAmb(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpAmb)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpAmb.subclass:
return tpAmb.subclass(*args_, **kwargs_)
else:
return tpAmb(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpAmb', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpAmb')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpAmb')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpAmb', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpAmb'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpAmb', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpAmb
class procEmi(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, procEmi)
if subclass is not None:
return subclass(*args_, **kwargs_)
if procEmi.subclass:
return procEmi.subclass(*args_, **kwargs_)
else:
return procEmi(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='procEmi', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('procEmi')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='procEmi')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='procEmi', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='procEmi'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='procEmi', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class procEmi
class verProc(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, verProc)
if subclass is not None:
return subclass(*args_, **kwargs_)
if verProc.subclass:
return verProc.subclass(*args_, **kwargs_)
else:
return verProc(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='verProc', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('verProc')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='verProc')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='verProc', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='verProc'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='verProc', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class verProc
class TEmpregador(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEmpregador)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEmpregador.subclass:
return TEmpregador.subclass(*args_, **kwargs_)
else:
return TEmpregador(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEmpregador', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEmpregador')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEmpregador')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEmpregador', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEmpregador'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEmpregador', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
# end class TEmpregador
class TIdeVinculoNisObrig(GeneratedsSuper):
"""Informações do Vínculo"""
subclass = None
superclass = None
def __init__(self, cpfTrab=None, nisTrab=None, matricula=None):
self.original_tagname_ = None
self.cpfTrab = cpfTrab
self.nisTrab = nisTrab
self.matricula = matricula
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TIdeVinculoNisObrig)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TIdeVinculoNisObrig.subclass:
return TIdeVinculoNisObrig.subclass(*args_, **kwargs_)
else:
return TIdeVinculoNisObrig(*args_, **kwargs_)
factory = staticmethod(factory)
def get_cpfTrab(self): return self.cpfTrab
def set_cpfTrab(self, cpfTrab): self.cpfTrab = cpfTrab
def get_nisTrab(self): return self.nisTrab
def set_nisTrab(self, nisTrab): self.nisTrab = nisTrab
def get_matricula(self): return self.matricula
def set_matricula(self, matricula): self.matricula = matricula
def hasContent_(self):
if (
self.cpfTrab is not None or
self.nisTrab is not None or
self.matricula is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TIdeVinculoNisObrig', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TIdeVinculoNisObrig')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TIdeVinculoNisObrig')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TIdeVinculoNisObrig', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TIdeVinculoNisObrig'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TIdeVinculoNisObrig', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.cpfTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scpfTrab>%s</%scpfTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cpfTrab), input_name='cpfTrab')), namespace_, eol_))
if self.nisTrab is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snisTrab>%s</%snisTrab>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nisTrab), input_name='nisTrab')), namespace_, eol_))
if self.matricula is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%smatricula>%s</%smatricula>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.matricula), input_name='matricula')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'cpfTrab':
cpfTrab_ = child_.text
cpfTrab_ = self.gds_validate_string(cpfTrab_, node, 'cpfTrab')
self.cpfTrab = cpfTrab_
elif nodeName_ == 'nisTrab':
nisTrab_ = child_.text
nisTrab_ = self.gds_validate_string(nisTrab_, node, 'nisTrab')
self.nisTrab = nisTrab_
elif nodeName_ == 'matricula':
matricula_ = child_.text
matricula_ = self.gds_validate_string(matricula_, node, 'matricula')
self.matricula = matricula_
# end class TIdeVinculoNisObrig
class cpfTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cpfTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cpfTrab.subclass:
return cpfTrab.subclass(*args_, **kwargs_)
else:
return cpfTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cpfTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cpfTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cpfTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cpfTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cpfTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cpfTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cpfTrab
class nisTrab(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nisTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nisTrab.subclass:
return nisTrab.subclass(*args_, **kwargs_)
else:
return nisTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nisTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nisTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nisTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nisTrab', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nisTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nisTrab', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nisTrab
class matricula(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, matricula)
if subclass is not None:
return subclass(*args_, **kwargs_)
if matricula.subclass:
return matricula.subclass(*args_, **kwargs_)
else:
return matricula(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='matricula', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('matricula')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='matricula')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='matricula', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='matricula'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='matricula', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class matricula
class TRemun(GeneratedsSuper):
"""Remuneração e periodicidade de pagamento"""
subclass = None
superclass = None
def __init__(self, vrSalFx=None, undSalFixo=None, dscSalVar=None):
self.original_tagname_ = None
self.vrSalFx = vrSalFx
self.undSalFixo = undSalFixo
self.dscSalVar = dscSalVar
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TRemun)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TRemun.subclass:
return TRemun.subclass(*args_, **kwargs_)
else:
return TRemun(*args_, **kwargs_)
factory = staticmethod(factory)
def get_vrSalFx(self): return self.vrSalFx
def set_vrSalFx(self, vrSalFx): self.vrSalFx = vrSalFx
def get_undSalFixo(self): return self.undSalFixo
def set_undSalFixo(self, undSalFixo): self.undSalFixo = undSalFixo
def get_dscSalVar(self): return self.dscSalVar
def set_dscSalVar(self, dscSalVar): self.dscSalVar = dscSalVar
def hasContent_(self):
if (
self.vrSalFx is not None or
self.undSalFixo is not None or
self.dscSalVar is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TRemun', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TRemun')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TRemun')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TRemun', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TRemun'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TRemun', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.vrSalFx is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%svrSalFx>%s</%svrSalFx>%s' % (namespace_, self.gds_format_float(self.vrSalFx, input_name='vrSalFx'), namespace_, eol_))
if self.undSalFixo is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sundSalFixo>%s</%sundSalFixo>%s' % (namespace_, self.gds_format_integer(self.undSalFixo, input_name='undSalFixo'), namespace_, eol_))
if self.dscSalVar is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscSalVar>%s</%sdscSalVar>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscSalVar), input_name='dscSalVar')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'vrSalFx':
sval_ = child_.text
try:
fval_ = float(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires float or double: %s' % exp)
fval_ = self.gds_validate_float(fval_, node, 'vrSalFx')
self.vrSalFx = fval_
elif nodeName_ == 'undSalFixo':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'undSalFixo')
self.undSalFixo = ival_
elif nodeName_ == 'dscSalVar':
dscSalVar_ = child_.text
dscSalVar_ = self.gds_validate_string(dscSalVar_, node, 'dscSalVar')
self.dscSalVar = dscSalVar_
# end class TRemun
class vrSalFx(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, vrSalFx)
if subclass is not None:
return subclass(*args_, **kwargs_)
if vrSalFx.subclass:
return vrSalFx.subclass(*args_, **kwargs_)
else:
return vrSalFx(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='vrSalFx', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('vrSalFx')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='vrSalFx')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='vrSalFx', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='vrSalFx'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='vrSalFx', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class vrSalFx
class undSalFixo(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, undSalFixo)
if subclass is not None:
return subclass(*args_, **kwargs_)
if undSalFixo.subclass:
return undSalFixo.subclass(*args_, **kwargs_)
else:
return undSalFixo(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='undSalFixo', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('undSalFixo')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='undSalFixo')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='undSalFixo', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='undSalFixo'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='undSalFixo', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class undSalFixo
class dscSalVar(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscSalVar)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscSalVar.subclass:
return dscSalVar.subclass(*args_, **kwargs_)
else:
return dscSalVar(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscSalVar', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscSalVar')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscSalVar')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscSalVar', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscSalVar'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscSalVar', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscSalVar
class TLocalTrab(GeneratedsSuper):
"""Informações do Local de Trabalho"""
subclass = None
superclass = None
def __init__(self, tpInsc=None, nrInsc=None, descComp=None):
self.original_tagname_ = None
self.tpInsc = tpInsc
self.nrInsc = nrInsc
self.descComp = descComp
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TLocalTrab)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TLocalTrab.subclass:
return TLocalTrab.subclass(*args_, **kwargs_)
else:
return TLocalTrab(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpInsc(self): return self.tpInsc
def set_tpInsc(self, tpInsc): self.tpInsc = tpInsc
def get_nrInsc(self): return self.nrInsc
def set_nrInsc(self, nrInsc): self.nrInsc = nrInsc
def get_descComp(self): return self.descComp
def set_descComp(self, descComp): self.descComp = descComp
def hasContent_(self):
if (
self.tpInsc is not None or
self.nrInsc is not None or
self.descComp is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TLocalTrab', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TLocalTrab')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TLocalTrab')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TLocalTrab', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TLocalTrab'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TLocalTrab', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpInsc>%s</%stpInsc>%s' % (namespace_, self.gds_format_integer(self.tpInsc, input_name='tpInsc'), namespace_, eol_))
if self.nrInsc is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrInsc>%s</%snrInsc>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrInsc), input_name='nrInsc')), namespace_, eol_))
if self.descComp is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdescComp>%s</%sdescComp>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.descComp), input_name='descComp')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpInsc':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'tpInsc')
self.tpInsc = ival_
elif nodeName_ == 'nrInsc':
nrInsc_ = child_.text
nrInsc_ = self.gds_validate_string(nrInsc_, node, 'nrInsc')
self.nrInsc = nrInsc_
elif nodeName_ == 'descComp':
descComp_ = child_.text
descComp_ = self.gds_validate_string(descComp_, node, 'descComp')
self.descComp = descComp_
# end class TLocalTrab
class descComp(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, descComp)
if subclass is not None:
return subclass(*args_, **kwargs_)
if descComp.subclass:
return descComp.subclass(*args_, **kwargs_)
else:
return descComp(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='descComp', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('descComp')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='descComp')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='descComp', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='descComp'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='descComp', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class descComp
class TEnderecoBrasil(GeneratedsSuper):
"""Informações do Endereço no Brasil"""
subclass = None
superclass = None
def __init__(self, tpLograd=None, dscLograd=None, nrLograd=None, complemento=None, bairro=None, cep=None, codMunic=None, uf=None):
self.original_tagname_ = None
self.tpLograd = tpLograd
self.dscLograd = dscLograd
self.nrLograd = nrLograd
self.complemento = complemento
self.bairro = bairro
self.cep = cep
self.codMunic = codMunic
self.uf = uf
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TEnderecoBrasil)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TEnderecoBrasil.subclass:
return TEnderecoBrasil.subclass(*args_, **kwargs_)
else:
return TEnderecoBrasil(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tpLograd(self): return self.tpLograd
def set_tpLograd(self, tpLograd): self.tpLograd = tpLograd
def get_dscLograd(self): return self.dscLograd
def set_dscLograd(self, dscLograd): self.dscLograd = dscLograd
def get_nrLograd(self): return self.nrLograd
def set_nrLograd(self, nrLograd): self.nrLograd = nrLograd
def get_complemento(self): return self.complemento
def set_complemento(self, complemento): self.complemento = complemento
def get_bairro(self): return self.bairro
def set_bairro(self, bairro): self.bairro = bairro
def get_cep(self): return self.cep
def set_cep(self, cep): self.cep = cep
def get_codMunic(self): return self.codMunic
def set_codMunic(self, codMunic): self.codMunic = codMunic
def get_uf(self): return self.uf
def set_uf(self, uf): self.uf = uf
def hasContent_(self):
if (
self.tpLograd is not None or
self.dscLograd is not None or
self.nrLograd is not None or
self.complemento is not None or
self.bairro is not None or
self.cep is not None or
self.codMunic is not None or
self.uf is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='TEnderecoBrasil', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TEnderecoBrasil')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='TEnderecoBrasil')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='TEnderecoBrasil', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='TEnderecoBrasil'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='TEnderecoBrasil', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.tpLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%stpLograd>%s</%stpLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.tpLograd), input_name='tpLograd')), namespace_, eol_))
if self.dscLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdscLograd>%s</%sdscLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.dscLograd), input_name='dscLograd')), namespace_, eol_))
if self.nrLograd is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%snrLograd>%s</%snrLograd>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.nrLograd), input_name='nrLograd')), namespace_, eol_))
if self.complemento is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scomplemento>%s</%scomplemento>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.complemento), input_name='complemento')), namespace_, eol_))
if self.bairro is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sbairro>%s</%sbairro>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.bairro), input_name='bairro')), namespace_, eol_))
if self.cep is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scep>%s</%scep>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.cep), input_name='cep')), namespace_, eol_))
if self.codMunic is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodMunic>%s</%scodMunic>%s' % (namespace_, self.gds_format_integer(self.codMunic, input_name='codMunic'), namespace_, eol_))
if self.uf is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%suf>%s</%suf>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.uf), input_name='uf')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'tpLograd':
tpLograd_ = child_.text
tpLograd_ = self.gds_validate_string(tpLograd_, node, 'tpLograd')
self.tpLograd = tpLograd_
elif nodeName_ == 'dscLograd':
dscLograd_ = child_.text
dscLograd_ = self.gds_validate_string(dscLograd_, node, 'dscLograd')
self.dscLograd = dscLograd_
elif nodeName_ == 'nrLograd':
nrLograd_ = child_.text
nrLograd_ = self.gds_validate_string(nrLograd_, node, 'nrLograd')
self.nrLograd = nrLograd_
elif nodeName_ == 'complemento':
complemento_ = child_.text
complemento_ = self.gds_validate_string(complemento_, node, 'complemento')
self.complemento = complemento_
elif nodeName_ == 'bairro':
bairro_ = child_.text
bairro_ = self.gds_validate_string(bairro_, node, 'bairro')
self.bairro = bairro_
elif nodeName_ == 'cep':
cep_ = child_.text
cep_ = self.gds_validate_string(cep_, node, 'cep')
self.cep = cep_
elif nodeName_ == 'codMunic':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'codMunic')
self.codMunic = ival_
elif nodeName_ == 'uf':
uf_ = child_.text
uf_ = self.gds_validate_string(uf_, node, 'uf')
self.uf = uf_
# end class TEnderecoBrasil
class tpLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, tpLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if tpLograd.subclass:
return tpLograd.subclass(*args_, **kwargs_)
else:
return tpLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='tpLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='tpLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='tpLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='tpLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='tpLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class tpLograd
class dscLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dscLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dscLograd.subclass:
return dscLograd.subclass(*args_, **kwargs_)
else:
return dscLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dscLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dscLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dscLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dscLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dscLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dscLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dscLograd
class nrLograd(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, nrLograd)
if subclass is not None:
return subclass(*args_, **kwargs_)
if nrLograd.subclass:
return nrLograd.subclass(*args_, **kwargs_)
else:
return nrLograd(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='nrLograd', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('nrLograd')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='nrLograd')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='nrLograd', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='nrLograd'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='nrLograd', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class nrLograd
class complemento(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, complemento)
if subclass is not None:
return subclass(*args_, **kwargs_)
if complemento.subclass:
return complemento.subclass(*args_, **kwargs_)
else:
return complemento(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='complemento', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('complemento')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='complemento')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='complemento', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='complemento'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='complemento', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class complemento
class bairro(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, bairro)
if subclass is not None:
return subclass(*args_, **kwargs_)
if bairro.subclass:
return bairro.subclass(*args_, **kwargs_)
else:
return bairro(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='bairro', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('bairro')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='bairro')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='bairro', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='bairro'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='bairro', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class bairro
class cep(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, cep)
if subclass is not None:
return subclass(*args_, **kwargs_)
if cep.subclass:
return cep.subclass(*args_, **kwargs_)
else:
return cep(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='cep', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('cep')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cep')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='cep', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='cep'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='cep', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cep
class codMunic(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codMunic)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codMunic.subclass:
return codMunic.subclass(*args_, **kwargs_)
else:
return codMunic(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codMunic', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codMunic')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codMunic')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codMunic', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codMunic'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codMunic', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codMunic
class uf(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, uf)
if subclass is not None:
return subclass(*args_, **kwargs_)
if uf.subclass:
return uf.subclass(*args_, **kwargs_)
else:
return uf(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='uf', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('uf')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='uf')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='uf', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='uf'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='uf', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class uf
class THorario(GeneratedsSuper):
"""Informações de Horário Contratual"""
subclass = None
superclass = None
def __init__(self, dia=None, codHorContrat=None):
self.original_tagname_ = None
self.dia = dia
self.codHorContrat = codHorContrat
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, THorario)
if subclass is not None:
return subclass(*args_, **kwargs_)
if THorario.subclass:
return THorario.subclass(*args_, **kwargs_)
else:
return THorario(*args_, **kwargs_)
factory = staticmethod(factory)
def get_dia(self): return self.dia
def set_dia(self, dia): self.dia = dia
def get_codHorContrat(self): return self.codHorContrat
def set_codHorContrat(self, codHorContrat): self.codHorContrat = codHorContrat
def hasContent_(self):
if (
self.dia is not None or
self.codHorContrat is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='THorario', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('THorario')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='THorario')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='THorario', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='THorario'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='THorario', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.dia is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdia>%s</%sdia>%s' % (namespace_, self.gds_format_integer(self.dia, input_name='dia'), namespace_, eol_))
if self.codHorContrat is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<%scodHorContrat>%s</%scodHorContrat>%s' % (namespace_, self.gds_encode(self.gds_format_string(quote_xml(self.codHorContrat), input_name='codHorContrat')), namespace_, eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'dia':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'dia')
self.dia = ival_
elif nodeName_ == 'codHorContrat':
codHorContrat_ = child_.text
codHorContrat_ = self.gds_validate_string(codHorContrat_, node, 'codHorContrat')
self.codHorContrat = codHorContrat_
# end class THorario
class dia(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, dia)
if subclass is not None:
return subclass(*args_, **kwargs_)
if dia.subclass:
return dia.subclass(*args_, **kwargs_)
else:
return dia(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='dia', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('dia')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='dia')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='dia', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='dia'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='dia', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class dia
class codHorContrat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self):
self.original_tagname_ = None
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, codHorContrat)
if subclass is not None:
return subclass(*args_, **kwargs_)
if codHorContrat.subclass:
return codHorContrat.subclass(*args_, **kwargs_)
else:
return codHorContrat(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='codHorContrat', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('codHorContrat')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='codHorContrat')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='codHorContrat', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='codHorContrat'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='codHorContrat', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class codHorContrat
GDSClassesMapping = {
'horario': THorario,
'ideEmpregador': TEmpregador,
'ideEvento': TIdeEveTrab,
'ideVinculo': TIdeVinculoNisObrig,
'localTrabDom': TEnderecoBrasil,
'localTrabGeral': TLocalTrab,
'remuneracao': TRemun,
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='',
pretty_print=True)
return rootObj
def parseEtree(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
if sys.version_info.major == 2:
from StringIO import StringIO as IOBuffer
else:
from io import BytesIO as IOBuffer
parser = None
doc = parsexml_(IOBuffer(inString), parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='')
return rootObj
def parseLiteral(inFileName, silence=False):
parser = None
doc = parsexml_(inFileName, parser)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'eSocial'
rootClass = eSocial
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from evtAltContratual import *\n\n')
sys.stdout.write('import evtAltContratual as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"TEmpregador",
"TEnderecoBrasil",
"THorario",
"TIdeEveTrab",
"TIdeVinculoNisObrig",
"TLocalTrab",
"TRemun",
"eSocial"
]
| StarcoderdataPython |
3414155 | # -*- coding: utf-8 -*-
"""Assignment_6_notebook_.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1UqM-uY5syVVVrflH5Uaj6hi1qnMuXuG2
**Import** *and* setup some auxiliary functions
"""
# Don't edit this cell
import os
import timeit
import time
import numpy as np
from collections import OrderedDict
from pprint import pformat
from tqdm import tqdm
from google.colab import drive
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
from torch.utils.data.sampler import *
from torchvision import transforms, datasets
torch.multiprocessing.set_sharing_strategy('file_system')
cudnn.benchmark = True
# TODO: Main model definition + any utilities such as weight initialization or custom layers, ADD DROPOUT, BATCHNORM, SKIP CONNECTION,
class BasicBlock(nn.Module):
def __init__(self, in_channels, out_channels, stride):
super(BasicBlock, self).__init__()
self.layer = nn.Sequential()
self.layer.add_module("Conv", nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1))
self.layer.add_module("Bn", nn.BatchNorm2d(out_channels))
self.skip = nn.Sequential()
if stride != 1 or in_channels != out_channels:
self.skip = nn.Sequential()
self.skip.add_module("Conv", nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1))
self.skip.add_module("Bn", nn.BatchNorm2d(out_channels))
def forward(self, x):
out = self.layer(x)
out += self.skip(x)
return F.relu(out)
class ResNet(nn.Module):
def __init__(self, block, num_class=10):
super(ResNet, self).__init__()
self.layer1 = nn.Sequential()
self.layer1.add_module("Conv", nn.Conv2d(in_channels=3, out_channels=32, kernel_size=3, padding=1))
self.layer1.add_module("Bn", nn.BatchNorm2d(32))
self.layer1.add_module("Relu", nn.ReLU())
self.pool = nn.MaxPool2d(kernel_size=4, stride=4)
self.layer2 = nn.Sequential(
block(32, 32, 1),
block(32, 32, 1),
)
self.layer3 = nn.Sequential(
block(32, 64, 2),
block(64, 64, 1),
)
self.layer4 = nn.Sequential(
block(64, 128, 2),
block(128, 128, 1),
)
self.linear1 = nn.Sequential(
nn.Dropout(p=0.1),
nn.Linear(512, num_class),
)
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.pool(x)
x = self.linear1(x.view(x.size(0), -1))
return x
# TODO: Cifar-10 dataloading
def load_data(config):
"""
Load cifar-10 dataset using torchvision, take the last 10k of the training data to be validation data
"""
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
CIFAR10_training = datasets.CIFAR10("/CIFAR10_dataset/",train=True, download=True, transform=transform_train)
CIFAR10_test = datasets.CIFAR10("/CIFAR10_dataset/",train=False, download=True, transform=transform_test)
dataset_size = len(CIFAR10_training)
indices = list(range(dataset_size))
split = int(np.floor(0.1 * dataset_size))
train_indices, valid_indices = indices[split:], indices[:split]
CIFAR10_training_sampler = SubsetRandomSampler(train_indices)
CIFAR10_validation_sampler = SubsetRandomSampler(valid_indices)
train_dataloader = torch.utils.data.DataLoader(CIFAR10_training, batch_size=config['batch_size'], sampler=CIFAR10_training_sampler, num_workers=2)
valid_dataloader = torch.utils.data.DataLoader(CIFAR10_training, batch_size=config['batch_size'], sampler=CIFAR10_validation_sampler, num_workers=2)
test_dataloader = torch.utils.data.DataLoader(CIFAR10_test)
return train_dataloader, valid_dataloader, test_dataloader
# TODO : Main trainig + validation, returns the final model, save your best checkpoint based on the best validation accuracy
def train(trainloader, testloader, device, config):
model = ResNet(BasicBlock).to(device)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=config['lr'], momentum=config['momentum'], weight_decay=config['regular_constant'])
for epoch in range(1, config['num_epochs']+1):
model.train()
for _, (images, labels) in enumerate(trainloader):
images = images.to(device)
labels = labels.to(device)
optimizer.zero_grad()
outputs = model(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# validation
if epoch%5== 0:
model.eval()
correct = 0
total = 0
with torch.no_grad():
for images, labels in testloader:
images = images.to(device)
labels = labels.to(device)
outputs = model(images)
_, pred = torch.max(outputs.data, 1)
correct += (pred == labels).sum().item()
total += labels.size(0)
accuracy = 100. * correct / total
print("epoch: {}. Accuracy: {:.2f}.".format(epoch, accuracy))
return model
def save_model_colab_for_submission(model): # if you are running on colab
drive.mount('/content/gdrive/', force_remount=True)
torch.save(model.to(torch.device("cpu")), '/content/gdrive/My Drive/model.pt') # you will find the model in your home drive
def save_model_local_for_submission(model): # if you are running on your local machine
torch.save(model.to(torch.device("cpu")), 'model.pt')
#TODO: Implement testing
def test(net, testloader, device):
correct = 0
total = 0
net.eval()
with torch.no_grad():
for images, labels in testloader:
images = images.to(device)
labels = labels.to(device)
outputs = net(images)
_, pred = torch.max(outputs.data, 1)
correct += (pred == labels).sum().item()
total += labels.size(0)
###
return 100.*correct/total, correct, total
def run():
# set parameters cifar10
config = {
'lr': 0.001,
'num_epochs': 20,
'batch_size': 128,
'num_classes': 10,
'momentum':0.97,
'regular_constant': 5e-3,
}
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_dataloader, valid_dataloader, test_dataloader = load_data(config)
model = train(train_dataloader, valid_dataloader, device, config)
# Testing and saving for submission
device = torch.device("cpu")
try:
assert os.path.isdir('checkpoint'), 'Error: no checkpoint directory found!'
except AssertionError:
os.mkdir('./checkpoint')
torch.save(model.state_dict(),'./checkpoint/ckpt.pth')
checkpoint = torch.load('./checkpoint/ckpt.pth')
model.load_state_dict(checkpoint)
model.eval()
start_time = timeit.default_timer()
test_acc, test_correct, test_total = test(model.to(device), test_dataloader, device)
end_time = timeit.default_timer()
test_time = (end_time - start_time)
save_model_colab_for_submission(model)
return test_acc, test_correct, test_time
"""Main loop. Run time and total score will be shown below."""
# Don't edit this cell
def compute_score(acc, min_thres=65, max_thres=8):
# Your Score thresholds
if acc <= min_thres:
base_score = 0.0
elif acc >= max_thres:
base_score = 100.0
else:
base_score = float(acc - min_thres) / (max_thres - min_thres) * 100
return base_score
def main():
accuracy, correct, run_time = run()
score = compute_score(accuracy)
result = OrderedDict(correct=correct,
accuracy=accuracy,
run_time=run_time,
score=score)
with open('result.txt', 'w') as f:
f.writelines(pformat(result, indent=4))
print("\nResult:\n", pformat(result, indent=4))
main() | StarcoderdataPython |
22301 | from odoo import models, fields, api
from odoo.exceptions import ValidationError
class DemoOdooWizardTutorial(models.Model):
_name = 'demo.odoo.wizard.tutorial'
_description = 'Demo Odoo Wizard Tutorial'
name = fields.Char('Description', required=True)
partner_id = fields.Many2one('res.partner', string='Partner')
@api.multi
def action_context_demo(self):
# if self._context.get('context_data', False):
if self.env.context.get('context_data'):
raise ValidationError('have context data')
raise ValidationError('hello')
@api.multi
def action_button(self):
for record in self:
record.with_context(context_data=True).action_context_demo() | StarcoderdataPython |
6569101 | <reponame>joepetrini/bike-counter
from django.conf import settings
from django.http import HttpResponseRedirect
#from django.core.urlresolvers import reverse
from django.contrib.auth import login as auth_login, logout, authenticate
#from django.views.generic import ListView, DetailView
from django.contrib.auth.forms import AuthenticationForm
from django.views.generic.edit import FormView, View, CreateView
from .forms import ProfileForm, UserForm
class LoginView(FormView):
form_class = AuthenticationForm
template_name = 'login.html'
def form_valid(self, form):
auth_login(self.request, form.get_user())
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
#return super(LoginView, self).form_valid(form)
def form_invalid(self, form):
return super(LoginView, self).form_invalid(form)
class RegisterView(CreateView):
form_class = UserForm
template_name = 'register.html'
success_url = settings.LOGIN_REDIRECT_URL #'/orgs'
def form_valid(self, form):
resp = super(RegisterView, self).form_valid(form)
user = authenticate(username=form.cleaned_data['username'], password=<PASSWORD>.cleaned_data['<PASSWORD>'])
auth_login(self.request, user)
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
class LogoutView(View):
def get(self, request, *args, **kwargs):
logout(request)
return HttpResponseRedirect(settings.LOGOUT_REDIRECT_URL)
class ProfileView(FormView):
form_class = ProfileForm
template_name = 'profile.html'
| StarcoderdataPython |
5123172 | <filename>camkes/parser/tests/testexamples.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017, Data61
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# ABN 41 687 119 230.
#
# This software may be distributed and modified according to the terms of
# the BSD 2-Clause license. Note that NO WARRANTY is provided.
# See "LICENSE_BSD2.txt" for details.
#
# @TAG(DATA61_BSD)
#
'''
Tests for input from good/ subdirectory, that are intended to be legitimate
CAmkES input.
'''
from __future__ import absolute_import, division, print_function, \
unicode_literals
import functools, os, re, sys, unittest
ME = os.path.abspath(__file__)
# Make CAmkES importable
sys.path.append(os.path.join(os.path.dirname(ME), '../../..'))
from camkes.ast import ASTError
from camkes.internal.tests.utils import CAmkESTest, cpp_available
from camkes.parser import ParseError
from camkes.parser.stage0 import CPP, Reader
from camkes.parser.stage1 import Parse1
from camkes.parser.stage2 import Parse2
from camkes.parser.stage3 import Parse3
from camkes.parser.stage4 import Parse4
from camkes.parser.stage5 import Parse5
from camkes.parser.stage6 import Parse6
from camkes.parser.stage7 import Parse7
from camkes.parser.stage8 import Parse8
from camkes.parser.stage9 import Parse9
from camkes.parser.stage10 import Parse10
PARSERS = ('reader', 'cpp', 's1', 's2', 's3', 's4', 's5', 's6', 's7', 's8', 's9', 's10')
class TestExamples(CAmkESTest):
def setUp(self):
super(TestExamples, self).setUp()
self.reader = Reader()
self.cpp = CPP()
self.s1 = Parse1(self.cpp)
self.s2 = Parse2(self.s1)
self.s3 = Parse3(self.s2)
self.s4 = Parse4(self.s3)
self.s5 = Parse5(self.s4)
self.s6 = Parse6(self.s5)
self.s7 = Parse7(self.s6)
self.s8 = Parse8(self.s7)
self.s9 = Parse9(self.s8)
self.s10 = Parse10(self.s9)
assert all([hasattr(self, p) for p in PARSERS])
# Locate all the test files in good/*.camkes and add each as a separate test
# case for each parser.
added_good = False
for eg in os.listdir(os.path.join(os.path.dirname(ME), 'good')):
if re.match(r'.*\.camkes$', eg) is not None:
path = os.path.join(os.path.dirname(ME), 'good', eg)
for parser in PARSERS:
test_name = 'test_good_%s_%s' % (parser, re.sub(r'[^\w]', '_', eg))
setattr(TestExamples, test_name,
lambda self, f=path, p=parser: getattr(self, p).parse_file(f))
added_good = True
if not added_good:
# We didn't find any valid tests.
def no_good(self):
self.fail('no good example input found')
TestExamples.test_no_good = no_good
def _check_until(tester, filename, limit):
for p in PARSERS:
if p == limit:
with tester.assertRaises((ASTError, ParseError)):
getattr(tester, p).parse_file(filename)
break
else:
getattr(tester, p).parse_file(filename)
# Locate all the files in bad-at-*/*.camkes and add each as a separate test
# case, failing at the specific parser level.
for p in PARSERS:
dirname = os.path.join(os.path.dirname(ME), 'bad-at-%s' % p)
if not os.path.exists(dirname):
continue
for eg in os.listdir(dirname):
if re.match(r'.*\.camkes$', eg) is not None:
path = os.path.join(dirname, eg)
test_name = 'test_bad_at_%s_%s' % (p, re.sub(r'[^\w]', '_', eg))
setattr(TestExamples, test_name,
lambda self, f=path, limit=p: _check_until(self, f, limit))
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1962332 | """
Reader for the hashtable, in combination with the
:class:`SpatialRegion` objects from ``regions.py``.
Use the :class:`SpatialLoader` class to set up and
read from the hashtables.
Note that all large data is actually contained in the
region objects, and the loader class is really just
a convenience object.
"""
from pathlib import Path
from typing import Dict, List
import attr
import h5py
import numpy as np
from sparepo.particle_types import ParticleType
from sparepo.regions import SpatialRegion
@attr.s
class ChunkFileHashtable:
"""
Hashtable for a single chunk file and particle type.
"""
filename: Path = attr.ib(converter=Path)
file_number: int = attr.ib(converter=int)
hashtable: np.ndarray = attr.ib()
@attr.s
class SpatialLoader:
"""
Spatially load data from files based on the pre-generated
hashtable. If you need to create a hashtable, see the
``build_hashtable.py``.
Note that there is no built-in periodic wrapping.
Parameters
----------
hashtable: Path
Path to the hashtable hdf5 file.
snapshot: Path
Path to the first snapshot (the one including ``.0.hdf5``)
"""
hashtable: Path = attr.ib(converter=Path)
snapshot: Path = attr.ib(converter=Path)
box_size: float
number_of_chunks: int
unit: str
hubble_param: float
hubble_param_scaling: int
available_part_types: List[ParticleType]
centers: np.ndarray
counts: Dict[ParticleType, np.ndarray]
cell_size: float
number_of_cells: int
cells_per_axis: int
def __attrs_post_init__(self):
"""
Loads in metadata from the hashtable.
"""
with h5py.File(self.hashtable, "r") as handle:
header_attrs = handle["Header"].attrs
cell_centers = handle["Cells/Centers"][...]
cell_counts = {
ParticleType(int(name[-1])): value[:]
for name, value in handle["Cells/Counts"].items()
}
cell_attrs = handle["Cells"].attrs
self.box_size = header_attrs["BoxSize"]
self.number_of_chunks = header_attrs["NumberOfChunks"]
self.unit = header_attrs["Units"]
self.hubble_param = header_attrs["HubbleParam"]
self.hubble_param_scaling = header_attrs["HubbleParamScaling"]
self.centers = cell_centers
self.counts = cell_counts
self.available_part_types = list(cell_counts.keys())
self.cell_size = cell_attrs["Size"]
self.number_of_cells = cell_attrs["NumberOfCells"]
self.cells_per_axis = cell_attrs["CellsPerAxis"]
def snapshot_filename_for_chunk(self, chunk: int):
"""
Gets the snapshot filename for a given chunk.
"""
return self.snapshot.parent / (
self.snapshot.stem.split(".")[0] + f".{chunk}.hdf5"
)
def read_dataset(
self,
part_type: ParticleType,
field_name: str,
region: SpatialRegion,
) -> np.ndarray:
"""
Reads a dataset in a given spatial region.
Parameters
----------
part_type: ParticleType
Particle type to read. Example: ParticleType.Gas
field_name: str
Particle field to read. Example: Coordinates
region: SpatialRegion
Spatial region to load data within.
Returns
-------
dataset: np.ndarray
Particle dataset within the specified spatial region.
"""
if not region.mask_calculated:
region.set_cell_mask(
centers=self.centers,
cell_size=self.cell_size,
)
# First, read out the cell data from the hashtable file.
# This is one contiguous read so doesn't need to be cached,
# as relative to the particle data reading it is very fast.
file_mask, file_count = region.get_file_mask(
hashtable=self.hashtable, part_type=part_type
)
particles_to_read = sum(file_count.values())
dataset_path = f"PartType{part_type.value}/{field_name}"
with h5py.File(self.snapshot, "r") as handle:
dataset = handle[dataset_path]
shape = list(dataset.shape)
dtype = dataset.dtype
# Truncate the shape
shape[0] = particles_to_read
output = np.empty(shape, dtype=dtype)
already_read = 0
for file_number, ranges in file_mask.items():
with h5py.File(
self.snapshot_filename_for_chunk(chunk=file_number), "r"
) as handle:
dataset = handle[dataset_path]
for read_start, read_end in ranges:
if read_end == read_start:
continue
# Because we read inclusively
size_of_range = read_end - read_start
# Construct selectors so we can use read_direct to prevent creating
# copies of data from the hdf5 file.
hdf5_read_sel = np.s_[read_start:read_end]
output_dest_sel = np.s_[already_read : size_of_range + already_read]
dataset.read_direct(
output, source_sel=hdf5_read_sel, dest_sel=output_dest_sel
)
already_read += size_of_range
return output
| StarcoderdataPython |
8023202 | <filename>tests.py
from textgen import TextGenerator
def test_add_item():
generator = TextGenerator()
generator._add("x", "a")
assert generator._get("x")[0] == "a"
def test_add_two_items():
generator = TextGenerator()
generator._add("x", "a")
generator._add("x", "b")
assert generator._get("x")[0] == "a"
assert generator._get("x")[1] == "b"
def test_process_corpus():
corpus = "this is a test. this is only an example."
generator = TextGenerator()
generator.process_corpus(corpus)
assert generator._get("this")[0] == "is"
assert generator._get("this")[1] == "is"
assert generator._get("is")[0] == "a"
assert generator._get("is")[1] == "only"
def test_process_corpus_k2():
corpus = "this is a test. this is only an example."
generator = TextGenerator(2)
generator.process_corpus(corpus)
assert generator._get(("this", "is"))[0] == "a"
assert generator._get(("this", "is"))[1] == "only"
assert generator._get(("is", "a"))[0] == "test"
assert generator._get(("is", "only"))[0] == "an"
| StarcoderdataPython |
4935352 | <reponame>StudyForCoding/BEAKJOON
import sys
N=int(sys.stdin.readline())
num=[]
for _ in range(N):
num.append(list(map(int, sys.stdin.readline().split())))
result = []
for n in range(1,N+1):
result.append([0]*n)
result[0][0]=num[0][0]
for i in range(1,N):
for j in range(i+1):
if j ==0:
result[i][j] =result[i-1][j] + num[i][j]
elif j==i:
result[i][j] = result[i - 1][j-1] + num[i][j]
else:
result[i][j] = max(result[i-1][j] + num[i][j],result[i - 1][j-1] + num[i][j])
print(max(result[N-1])) | StarcoderdataPython |
3404109 | <reponame>MatthewTsan/Leetcode
# Definition for a binary tree node.
from collections import defaultdict, deque
from typing import List
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def distanceK(self, root: TreeNode, target: TreeNode, K: int) -> List[int]:
parMap = defaultdict(TreeNode)
def DFS(node, par):
if node is None:
return
parMap[node] = par
DFS(node.left, node)
DFS(node.right, node)
DFS(root, None)
queue = deque()
queue.append([target, 0])
visit = []
visit.append(target)
while queue:
# print([node.val for node, _ in queue])
if queue[0][1] == K:
return [node.val for node, _ in queue]
node, distence = queue.popleft()
for neibor in (node.left, node.right, parMap[node]):
if neibor and neibor not in visit:
visit.append(neibor)
queue.append([neibor, distence + 1])
return [] | StarcoderdataPython |
9606469 | <filename>fips-generators/util/hlslcompiler.py<gh_stars>10-100
'''
Python wrapper for HLSL compiler (fxc.exe)
NOTE: this module contains Windows specific code and should
only be imported when running on Windows.
'''
import subprocess, platform, os, sys
import genutil as util
if sys.version_info[0] < 3:
import _winreg as winreg
else:
import winreg
#-------------------------------------------------------------------------------
def findFxc() :
'''
fcx.exe is located in the 'Windows Kits' SDKs, we first check the
registry for the installation paths, and then see if we can
find the fxc.exe file there under 'bin/x86', if it's not there
try any of the subdirectories.
Returns an unicode path string of fxc.exe if found, or None if
not found.
'''
fxcPath = None;
fxcSubPath = u'\\x86\\fxc.exe'
# first get the preferred kit name (either 8.1 or 10, are there others?)
try :
with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, 'Software\\Microsoft\\Windows Kits\\Installed Roots') as key :
for kit in ['KitsRoot10', 'KitsRoot81'] :
try :
winkit_dir, _ = winreg.QueryValueEx(key, kit)
fxcPath = winkit_dir + u'bin' + fxcSubPath
if os.path.isfile(fxcPath) :
return fxcPath
# try subdirectories
for cur_dir in os.listdir(winkit_dir + u'bin'):
fxcPath = winkit_dir + u'bin\\' + cur_dir + fxcSubPath
if os.path.isfile(fxcPath):
return fxcPath
except :
fxcPath = None
# if registry is not found, try a few other likely paths
for path in [
'C:\\Program Files (x86)\\Windows Kits\\10\\',
'C:\\Program Files (x86)\\Windows Kits\\8.1\\'
] :
if os.path.isdir(path) :
fxcPath = path + fxcSubPath
if os.path.isfile(fxcPath) :
return fxcPath
return None
except WindowsError :
return None
#-------------------------------------------------------------------------------
def callFxc(cmd) :
'''
call the fxc compiler and return its output
'''
print(cmd)
child = subprocess.Popen(cmd, stderr=subprocess.PIPE)
out = ''
while True :
out += bytes.decode(child.stderr.read())
if child.poll() != None :
break
return out
#-------------------------------------------------------------------------------
def parseOutput(output, lines) :
'''
Parse error output lines from FXC,
map them to the original source code location and output
an error message compatible with Xcode or VStudio
'''
hasError = False
hasWarning = False
outLines = output.splitlines()
for outLine in outLines :
# extract generated shader source column, line and message
# format is 'filename(line,startcol-endcol): msg
lineStartIndex = outLine.find('(', 0) + 1
if lineStartIndex == 0 :
continue
lineEndIndex = outLine.find(',', lineStartIndex)
if lineEndIndex == -1 :
continue
colStartIndex = lineEndIndex + 1
colEndIndex = outLine.find('-', colStartIndex)
if colEndIndex == -1 :
colEndIndex = outLine.find(')', colStartIndex)
if colEndIndex == -1 :
continue
msgStartIndex = outLine.find(':', colStartIndex+1)
if msgStartIndex == -1 :
continue
colNr = int(outLine[colStartIndex:colEndIndex])
lineNr = int(outLine[lineStartIndex:lineEndIndex])
msg = outLine[msgStartIndex:]
# map to original location
lineIndex = lineNr - 1
if lineIndex >= len(lines) :
lineIndex = len(lines) - 1
srcPath = lines[lineIndex].path
srcLineNr = lines[lineIndex].lineNumber
# and output...
util.setErrorLocation(srcPath, srcLineNr)
if 'error' in outLine :
hasError = True
util.fmtError(msg, False)
elif 'warning' in outLine :
hasWarning = True
util.fmtWarning(msg)
if hasError :
for line in lines :
print(line.content)
sys.exit(10)
#-------------------------------------------------------------------------------
def compile(lines, base_path, type, c_name, args) :
fxcPath = findFxc()
if not fxcPath :
util.fmtError("fxc.exe not found!\n")
ext = {
'vs': '.vsh',
'fs': '.psh'
}
profile = {
'vs': 'vs_5_0',
'fs': 'ps_5_0'
}
hlsl_src_path = base_path + '.hlsl'
out_path = base_path + '.hlsl.h'
# /Gec is backward compatibility mode
cmd = [fxcPath, '/T', profile[type], '/Fh', out_path, '/Vn', c_name, '/Gec']
if 'debug' in args and args['debug'] == 'true' :
cmd.extend(['/Zi', '/Od'])
else :
cmd.append('/O3')
cmd.append(hlsl_src_path)
output = callFxc(cmd)
parseOutput(output, lines) | StarcoderdataPython |
74996 | <reponame>sgondala/Automix<filename>yahoo_with_mixtext/hyperopt_eval_single.py<gh_stars>1-10
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
import numpy as np
from FastAutoAugment.read_data import *
from FastAutoAugment.classification_models.MixText import *
import pickle
import wandb
import argparse
from tqdm import tqdm
from hyperopt import fmin, tpe, hp, Trials
parser = argparse.ArgumentParser(description='PyTorch MixText')
parser.add_argument('--batch-size', default=64, type=int, metavar='N',
help='train batchsize')
parser.add_argument('--checkpoint-path', type=str, default='checkpoints/train_yahoo_on_mixtext_10_per_class_no_augmentations/model_best.pth', help='Saved model checkpoint')
parser.add_argument('--sub-policies-per-policy', type=int, default=3)
parser.add_argument('--number-of-policies-to-evaluate', type=int, default=50)
parser.add_argument('--alpha', type=float, default=2)
parser.add_argument('--mix-layers', nargs='+',
default=[7,9,12], type=int, help='define mix layer set')
args = parser.parse_args()
# Seeds
np.random.seed(42)
torch.manual_seed(42)
torch.cuda.manual_seed_all(42)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
def own_loss(logits, target, num_labels):
assert logits.shape == target.shape
loss = -torch.sum(F.log_softmax(logits, dim=1)*target, axis=1)
assert loss.shape[0] == target.shape[0]
return loss.mean()
def optimization_function(input_arguments):
arg1, arg2, arg3 = input_arguments
wandb.init(project="auto_augment", reinit=True)
wandb_name = f'hyperopt_single_inter_lada_layers_{arg1}_{arg2}_{arg3}'
model_name = 'bert-base-uncased'
dataset_identifier = 'val_200'
val = pickle.load(open('data/paper_yahoo_split/yahoo_val_200_per_class.pkl', 'rb'))
# knn = arg1
# mu = arg2
knn = 7
mu = 0.23
val_dataset = create_dataset(val['X'], val['y'], model_name, 256, mix='Inter_LADA', num_classes=10,knn_lada=knn, mu_lada=mu, dataset_identifier = dataset_identifier)
wandb.run.name = wandb_name
wandb.run.save()
val_dataloader = DataLoader(val_dataset, batch_size=args.batch_size, num_workers=4)
base_model = torch.load(args.checkpoint_path).cuda()
base_model.eval()
with torch.no_grad():
loss_total = 0
total_sample = 0
for batch in tqdm(val_dataloader, desc='Validation loop'):
encoded_1, encoded_2, label_1, label_2 = batch
assert encoded_1.shape == encoded_2.shape
# mix_layer = np.random.choice(args.mix_layers)
mix_layer = np.random.choice([arg1, arg2, arg3])
l = np.random.beta(args.alpha, args.alpha)
l = max(l, 1-l)
logits = base_model(encoded_1.cuda(), encoded_2.cuda(), l, mix_layer)
combined_labels = label_1 * l + label_2 * (1-l)
loss = own_loss(logits, combined_labels.cuda(), num_labels=10)
loss_total += loss.item() * encoded_1.shape[0]
total_sample += encoded_1.shape[0]
loss_total = loss_total/total_sample
wandb.log({'Test loss' : loss_total})
print('Test loss ', loss_total)
return loss_total
if __name__ == "__main__":
trials = Trials()
space = []
# space.append(hp.choice(f'arg1', list(range(1, 10))))
# space.append(hp.uniform(f'arg2', 0, 1))
space.append(hp.choice(f'arg1', list(range(1,12))))
space.append(hp.choice(f'arg2', list(range(1,12))))
space.append(hp.choice(f'arg3', list(range(1,12))))
best = fmin(fn=optimization_function,
space=space,
algo=tpe.suggest,
max_evals=args.number_of_policies_to_evaluate,
trials=trials)
pickle.dump(
trials,
open(f'data/saved_logs/hyperopt_single_inter_lada_layers_changes_{args.number_of_policies_to_evaluate}.pkl', 'wb')) | StarcoderdataPython |
11229715 | from abc import ABC
from src.model.BagOfWords import BagOfWords
class CoefficientStrategy(ABC):
def exec(self):
pass
class DiceStrategy(CoefficientStrategy):
def __init__(self, bag1: BagOfWords, bag2: BagOfWords):
self.__bag1 = bag1
self.__bag2 = bag2
def exec(self):
return 2.0 * len(self.__bag1.intersection(self.__bag2)) / (len(self.__bag1) + len(self.__bag2))
class JaccardStrategy(CoefficientStrategy):
def __init__(self, bag1: BagOfWords, bag2: BagOfWords):
self.__bag1 = bag1
self.__bag2 = bag2
def exec(self):
return len(self.__bag1.intersection(self.__bag2)) / len(self.__bag1.union(self.__bag2))
class CosineStrategy(CoefficientStrategy):
def __init__(self, bag1: BagOfWords, bag2: BagOfWords):
self.__bag1 = bag1
self.__bag2 = bag2
def exec(self):
return len(self.__bag1.intersection(self.__bag2)) / (len(self.__bag1) * len(self.__bag2))
class OverlappingStrategy(CoefficientStrategy):
def __init__(self, bag1: BagOfWords, bag2: BagOfWords):
self.__bag1 = bag1
self.__bag2 = bag2
def exec(self):
return len(self.__bag1.intersection(self.__bag2)) / min(len(self.__bag1), len(self.__bag2))
| StarcoderdataPython |
3429871 | <reponame>TheShadow29/subreddit-classification-dataset
"""
Creates the final json file to be submitted
Author: <NAME>
"""
import json
from pathlib import Path
import pandas as pd
def get_corpus_from_csv(csvf):
"""
Returns the corpus after reading the csv file
"""
csv_data = pd.read_csv(csvf)
corpus_list = []
for _, row in csv_data.iterrows():
tmp = {"data": str(row[0]), "label": str(row[1])}
corpus_list.append(tmp)
return corpus_list
if __name__ == '__main__':
OUT_JSON = Path('../data/proposal.json')
DESCRIPTION = ["The following dictionary contains corpus for subreddit "
"classification task. The corpus contains the title of the post "
"and the sub-reddit it was chosen from."]
AUTHORS = {"author1": "<NAME>", "author2": "<NAME>"}
EMAILS = {"email1": "<EMAIL>", "email2": "<EMAIL>"}
CSV_FILE = Path('../data/coarse_out/sample1.csv')
CORPUS = get_corpus_from_csv(CSV_FILE)
OUT_DICT = dict()
OUT_DICT['description'] = DESCRIPTION
OUT_DICT['authors'] = AUTHORS
OUT_DICT['emails'] = EMAILS
OUT_DICT['corpus'] = CORPUS
json.dump(OUT_DICT, OUT_JSON.open('w'))
| StarcoderdataPython |
6485526 | """Unit tests for ProductManifold."""
import random
import geomstats.backend as gs
import geomstats.tests
from geomstats.geometry.euclidean import Euclidean
from geomstats.geometry.hyperboloid import Hyperboloid
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.minkowski import Minkowski
from geomstats.geometry.product_manifold import (
NFoldManifold,
NFoldMetric,
ProductManifold,
)
from geomstats.geometry.product_riemannian_metric import ProductRiemannianMetric
from geomstats.geometry.special_orthogonal import SpecialOrthogonal
from tests.conftest import Parametrizer
from tests.data_generation import _ManifoldTestData, _RiemannianMetricTestData
from tests.geometry_test_cases import ManifoldTestCase, RiemannianMetricTestCase
smoke_manifolds_1 = [Hypersphere(dim=2), Hyperboloid(dim=2)]
smoke_metrics_1 = [Hypersphere(dim=2).metric, Hyperboloid(dim=2).metric]
smoke_manifolds_2 = [Euclidean(3), Minkowski(3)]
smoke_metrics_2 = [Euclidean(3).metric, Minkowski(3).metric]
class TestProductManifold(ManifoldTestCase, metaclass=Parametrizer):
space = ProductManifold
skip_test_random_tangent_vec_is_tangent = True
skip_test_projection_belongs = True
class ProductManifoldTestData(_ManifoldTestData):
n_list = random.sample(range(2, 4), 2)
default_point_list = ["vector", "matrix"]
manifolds_list = [[Hypersphere(dim=n), Hyperboloid(dim=n)] for n in n_list]
space_args_list = [
(manifold, None, default_point)
for manifold, default_point in zip(manifolds_list, default_point_list)
]
shape_list = [
(n + 1, n + 1) if default_point == "matrix" else (2 * (n + 1),)
for n, default_point in zip(n_list, default_point_list)
]
n_points_list = random.sample(range(2, 5), 2)
n_vecs_list = random.sample(range(2, 5), 2)
def dimension_test_data(self):
smoke_data = [
dict(
manifold=smoke_manifolds_1,
default_point_type="vector",
expected=4,
),
dict(
manifold=smoke_manifolds_1,
default_point_type="matrix",
expected=4,
),
]
return self.generate_tests(smoke_data)
def regularize_test_data(self):
smoke_data = [
dict(
manifold=smoke_manifolds_1,
default_point_type="vector",
point=ProductManifold(
smoke_manifolds_1, default_point_type="vector"
).random_point(5),
),
dict(
manifold=smoke_manifolds_1,
default_point_type="matrix",
point=ProductManifold(
smoke_manifolds_1, default_point_type="matrix"
).random_point(5),
),
]
return self.generate_tests(smoke_data)
def random_point_belongs_test_data(self):
smoke_space_args_list = [
(smoke_manifolds_1, None, "vector"),
(smoke_manifolds_1, None, "matrix"),
]
smoke_n_points_list = [1, 2]
return self._random_point_belongs_test_data(
smoke_space_args_list,
smoke_n_points_list,
self.space_args_list,
self.n_points_list,
)
def projection_belongs_test_data(self):
return self._projection_belongs_test_data(
self.space_args_list,
self.shape_list,
self.n_points_list,
belongs_atol=1e-1,
)
def to_tangent_is_tangent_test_data(self):
return self._to_tangent_is_tangent_test_data(
ProductManifold,
self.space_args_list,
self.shape_list,
self.n_vecs_list,
is_tangent_atol=gs.atol * 100,
)
def random_tangent_vec_is_tangent_test_data(self):
return self._random_tangent_vec_is_tangent_test_data(
ProductManifold,
self.space_args_list,
self.n_vecs_list,
is_tangent_atol=gs.atol * 100,
)
testing_data = ProductManifoldTestData()
def test_dimension(self, manifolds, default_point_type, expected):
space = self.space(manifolds, default_point_type=default_point_type)
self.assertAllClose(space.dim, expected)
def test_regularize(self, manifolds, default_point_type, point):
space = self.space(manifolds, default_point_type=default_point_type)
result = space.regularize(point)
self.assertAllClose(result, point)
class TestProductRiemannianMetric(RiemannianMetricTestCase, metaclass=Parametrizer):
metric = connection = ProductRiemannianMetric
skip_test_parallel_transport_ivp_is_isometry = True
skip_test_parallel_transport_bvp_is_isometry = True
skip_test_exp_geodesic_ivp = True
class ProductRiemannianMetricTestData(_RiemannianMetricTestData):
n_list = random.sample(range(2, 3), 1)
default_point_list = ["vector", "matrix"]
manifolds_list = [[Hypersphere(dim=n), Hyperboloid(dim=n)] for n in n_list]
metrics_list = [
[Hypersphere(dim=n).metric, Hyperboloid(dim=n).metric] for n in n_list
]
metric_args_list = list(zip(metrics_list, default_point_list))
shape_list = [
(n + 1, n + 1) if default_point == "matrix" else (2 * (n + 1),)
for n, default_point in zip(n_list, default_point_list)
]
space_list = [
ProductManifold(manifolds, None, default_point_type)
for manifolds, default_point_type in zip(manifolds_list, default_point_list)
]
n_points_list = random.sample(range(2, 5), 1)
n_tangent_vecs_list = random.sample(range(2, 5), 1)
n_points_a_list = random.sample(range(2, 5), 1)
n_points_b_list = [1]
alpha_list = [1] * 1
n_rungs_list = [1] * 1
scheme_list = ["pole"] * 1
def inner_product_matrix_test_data(self):
smoke_data = [
dict(
metric=smoke_metrics_2,
default_point_type="vector",
point=ProductManifold(
smoke_manifolds_1, default_point_type="vector"
).random_point(5),
base_point=ProductManifold(
smoke_manifolds_1, default_point_type="vector"
).random_point(5),
),
dict(
manifold=smoke_metrics_2,
default_point_type="matrix",
point=ProductManifold(
smoke_manifolds_2, default_point_type="matrix"
).random_point(5),
base_point=ProductManifold(
smoke_manifolds_2, default_point_type="matrix"
).random_point(5),
),
]
return self.generate_tests(smoke_data)
def exp_shape_test_data(self):
return self._exp_shape_test_data(
self.metric_args_list, self.space_list, self.shape_list
)
def log_shape_test_data(self):
return self._log_shape_test_data(self.metric_args_list, self.space_list)
def squared_dist_is_symmetric_test_data(self):
return self._squared_dist_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
atol=gs.atol * 1000,
)
def exp_belongs_test_data(self):
return self._exp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
belongs_atol=gs.atol * 1000,
)
def log_is_tangent_test_data(self):
return self._log_is_tangent_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
is_tangent_atol=1e-1,
)
def geodesic_ivp_belongs_test_data(self):
return self._geodesic_ivp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_points_list,
belongs_atol=gs.atol * 1000,
)
def geodesic_bvp_belongs_test_data(self):
return self._geodesic_bvp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
belongs_atol=gs.atol * 1000,
)
def log_then_exp_test_data(self):
return self._log_then_exp_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
rtol=gs.rtol * 1000,
atol=1e-1,
)
def exp_then_log_test_data(self):
return self._exp_then_log_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
amplitude=10,
rtol=gs.rtol * 1000,
atol=1e-1,
)
def exp_ladder_parallel_transport_test_data(self):
return self._exp_ladder_parallel_transport_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
self.n_rungs_list,
self.alpha_list,
self.scheme_list,
)
def exp_geodesic_ivp_test_data(self):
return self._exp_geodesic_ivp_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
self.n_points_list,
rtol=gs.rtol * 100000,
atol=gs.atol * 100000,
)
def parallel_transport_ivp_is_isometry_test_data(self):
return self._parallel_transport_ivp_is_isometry_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
is_tangent_atol=gs.atol * 1000,
atol=gs.atol * 1000,
)
def parallel_transport_bvp_is_isometry_test_data(self):
return self._parallel_transport_bvp_is_isometry_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
is_tangent_atol=gs.atol * 1000,
atol=gs.atol * 1000,
)
def dist_is_symmetric_test_data(self):
return self._dist_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_is_positive_test_data(self):
return self._dist_is_positive_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def squared_dist_is_positive_test_data(self):
return self._squared_dist_is_positive_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_is_norm_of_log_test_data(self):
return self._dist_is_norm_of_log_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_point_to_itself_is_zero_test_data(self):
return self._dist_point_to_itself_is_zero_test_data(
self.metric_args_list, self.space_list, self.n_points_list
)
def inner_product_is_symmetric_test_data(self):
return self._inner_product_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
)
def inner_product_matrix_vector_test_data(self):
random_data = [
dict(default_point_type="matrix"),
dict(default_point_type="vector"),
]
return self.generate_tests([], random_data)
def dist_log_then_exp_norm_test_data(self):
smoke_data = [
dict(
space=smoke_manifolds_1,
default_point_type="vector",
n_samples=10,
einsum_str="..., ...j->...j",
expected=gs.ones(10),
),
dict(
space=smoke_manifolds_1,
default_point_type="matrix",
n_samples=10,
einsum_str="..., ...jl->...jl",
expected=gs.ones(
10,
),
),
]
return self.generate_tests(smoke_data)
testing_data = ProductRiemannianMetricTestData()
@geomstats.tests.np_autograd_and_torch_only
def test_inner_product_matrix(
self, manifolds, default_point_type, point, base_point
):
metric = self.metric(manifolds, default_point_type=default_point_type)
logs = metric.log(point, base_point)
result = metric.inner_product(logs, logs)
expected = metric.squared_dist(base_point, point)
self.assertAllClose(result, expected)
@geomstats.tests.np_autograd_and_torch_only
def test_inner_product_matrix_vector(self, default_point_type):
euclidean = Euclidean(3)
minkowski = Minkowski(3)
space = ProductManifold(manifolds=[euclidean, minkowski])
point = space.random_point(1)
expected = gs.eye(6)
expected[3, 3] = -1
result = space.metric.metric_matrix(point)
self.assertAllClose(result, expected)
@geomstats.tests.np_and_autograd_only
def test_dist_log_then_exp_norm(
self, manifolds, default_point_type, n_samples, einsum_str, expected
):
space = ProductManifold(
manifolds=manifolds, default_point_type=default_point_type
)
point = space.random_point(n_samples)
base_point = space.random_point(n_samples)
logs = space.metric.log(point, base_point)
normalized_logs = gs.einsum(
einsum_str,
1.0 / space.metric.norm(logs, base_point),
logs,
)
point = space.metric.exp(normalized_logs, base_point)
result = space.metric.dist(point, base_point)
self.assertAllClose(result, expected)
class TestNFoldManifold(ManifoldTestCase, metaclass=Parametrizer):
space = NFoldManifold
skip_test_random_tangent_vec_is_tangent = True
class NFoldManifoldTestData(_ManifoldTestData):
n_list = random.sample(range(2, 4), 2)
base_list = [SpecialOrthogonal(n) for n in n_list]
power_list = random.sample(range(2, 4), 2)
space_args_list = list(zip(base_list, power_list))
shape_list = [(power, n, n) for n, power in zip(n_list, power_list)]
n_points_list = random.sample(range(2, 5), 2)
n_vecs_list = random.sample(range(2, 5), 2)
def belongs_test_data(self):
smoke_data = [
dict(
base=SpecialOrthogonal(3),
power=2,
point=gs.stack([gs.eye(3) + 1.0, gs.eye(3)])[None],
expected=gs.array(False),
),
dict(
base=SpecialOrthogonal(3),
power=2,
point=gs.array([gs.eye(3), gs.eye(3)]),
expected=gs.array(True),
),
]
return self.generate_tests(smoke_data)
def shape_test_data(self):
smoke_data = [dict(base=SpecialOrthogonal(3), power=2, shape=(2, 3, 3))]
return self.generate_tests(smoke_data)
def random_point_belongs_test_data(self):
smoke_space_args_list = [
(SpecialOrthogonal(2), 2),
(SpecialOrthogonal(2), 2),
]
smoke_n_points_list = [1, 2]
return self._random_point_belongs_test_data(
smoke_space_args_list,
smoke_n_points_list,
self.space_args_list,
self.n_points_list,
)
def projection_belongs_test_data(self):
return self._projection_belongs_test_data(
self.space_args_list,
self.shape_list,
self.n_points_list,
belongs_atol=1e-1,
)
def to_tangent_is_tangent_test_data(self):
return self._to_tangent_is_tangent_test_data(
NFoldManifold,
self.space_args_list,
self.shape_list,
self.n_vecs_list,
is_tangent_atol=gs.atol * 1000,
)
def random_tangent_vec_is_tangent_test_data(self):
return self._random_tangent_vec_is_tangent_test_data(
NFoldManifold, self.space_args_list, self.n_vecs_list
)
def test_belongs(self, base, power, point, expected):
space = self.space(base, power)
self.assertAllClose(space.belongs(point), expected)
def test_shape(self, base, power, expected):
space = self.space(base, power)
self.assertAllClose(space.shape, expected)
testing_data = NFoldManifoldTestData()
class TestNFoldMetric(RiemannianMetricTestCase, metaclass=Parametrizer):
metric = connection = NFoldMetric
skip_test_parallel_transport_ivp_is_isometry = True
skip_test_parallel_transport_bvp_is_isometry = True
skip_test_exp_geodesic_ivp = True
skip_test_geodesic_bvp_belongs = True
skip_test_geodesic_ivp_belongs = True
skip_test_log_is_tangent = True
class NFoldMetricTestData(_RiemannianMetricTestData):
n_list = random.sample(range(3, 5), 2)
power_list = random.sample(range(2, 5), 2)
base_list = [SpecialOrthogonal(n) for n in n_list]
metric_args_list = [
(base.metric, power) for base, power in zip(base_list, power_list)
]
shape_list = [(power, n, n) for n, power in zip(n_list, power_list)]
space_list = [
NFoldManifold(base, power) for base, power in zip(base_list, power_list)
]
n_points_list = random.sample(range(2, 5), 2)
n_tangent_vecs_list = random.sample(range(2, 5), 2)
n_points_a_list = random.sample(range(2, 5), 2)
n_points_b_list = [1]
alpha_list = [1] * 2
n_rungs_list = [1] * 2
scheme_list = ["pole"] * 2
def exp_shape_test_data(self):
return self._exp_shape_test_data(
self.metric_args_list, self.space_list, self.shape_list
)
def log_shape_test_data(self):
return self._log_shape_test_data(self.metric_args_list, self.space_list)
def squared_dist_is_symmetric_test_data(self):
return self._squared_dist_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
atol=gs.atol * 1000,
)
def exp_belongs_test_data(self):
return self._exp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
belongs_atol=gs.atol * 1000,
)
def log_is_tangent_test_data(self):
return self._log_is_tangent_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
is_tangent_atol=1e-1,
)
def geodesic_ivp_belongs_test_data(self):
return self._geodesic_ivp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_points_list,
belongs_atol=gs.atol * 100000,
)
def geodesic_bvp_belongs_test_data(self):
return self._geodesic_bvp_belongs_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
belongs_atol=gs.atol * 100000,
)
def log_then_exp_test_data(self):
return self._log_then_exp_test_data(
self.metric_args_list,
self.space_list,
self.n_points_list,
rtol=gs.rtol * 10000,
atol=1e-1,
)
def exp_then_log_test_data(self):
return self._exp_then_log_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
amplitude=10.0,
rtol=gs.rtol * 10000,
atol=1e-1,
)
def exp_ladder_parallel_transport_test_data(self):
return self._exp_ladder_parallel_transport_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
self.n_rungs_list,
self.alpha_list,
self.scheme_list,
)
def exp_geodesic_ivp_test_data(self):
return self._exp_geodesic_ivp_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
self.n_points_list,
rtol=gs.rtol * 100000,
atol=gs.atol * 100000,
)
def parallel_transport_ivp_is_isometry_test_data(self):
return self._parallel_transport_ivp_is_isometry_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
is_tangent_atol=gs.atol * 1000,
atol=gs.atol * 1000,
)
def parallel_transport_bvp_is_isometry_test_data(self):
return self._parallel_transport_bvp_is_isometry_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
is_tangent_atol=gs.atol * 1000,
atol=gs.atol * 1000,
)
def dist_is_symmetric_test_data(self):
print()
return self._dist_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_is_positive_test_data(self):
return self._dist_is_positive_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def squared_dist_is_positive_test_data(self):
return self._squared_dist_is_positive_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_is_norm_of_log_test_data(self):
return self._dist_is_norm_of_log_test_data(
self.metric_args_list,
self.space_list,
self.n_points_a_list,
self.n_points_b_list,
)
def dist_point_to_itself_is_zero_test_data(self):
return self._dist_point_to_itself_is_zero_test_data(
self.metric_args_list, self.space_list, self.n_points_list
)
def inner_product_is_symmetric_test_data(self):
return self._inner_product_is_symmetric_test_data(
self.metric_args_list,
self.space_list,
self.shape_list,
self.n_tangent_vecs_list,
)
def inner_product_shape_test_data(self):
space = NFoldManifold(SpecialOrthogonal(3), 2)
n_samples = 4
point = gs.stack([gs.eye(3)] * space.n_copies * n_samples)
point = gs.reshape(point, (n_samples, *space.shape))
tangent_vec = space.to_tangent(gs.zeros((n_samples, *space.shape)), point)
smoke_data = [
dict(space=space, n_samples=4, point=point, tangent_vec=tangent_vec)
]
return self.generate_tests(smoke_data)
testing_data = NFoldMetricTestData()
def test_inner_product_shape(self, space, n_samples, point, tangent_vec):
result = space.metric.inner_product(tangent_vec, tangent_vec, point)
expected = gs.zeros(n_samples)
self.assertAllClose(result, expected)
point = point[0]
result = space.metric.inner_product(tangent_vec, tangent_vec, point)
expected = gs.zeros(n_samples)
self.assertAllClose(result, expected)
result = space.metric.inner_product(tangent_vec[0], tangent_vec, point)
self.assertAllClose(result, expected)
expected = 0.0
result = space.metric.inner_product(tangent_vec[0], tangent_vec[0], point)
self.assertAllClose(result, expected)
| StarcoderdataPython |
6608308 | <gh_stars>0
from azure.quantum.target.ionq import IonQ
from azure.quantum.target.honeywell import Honeywell
from azure.quantum.target.target import Target
| StarcoderdataPython |
8086773 | #!/usr/bin/env python
"""
_NewWorkflow_
MySQL implementation of NewWorkflow
"""
from WMCore.Database.DBFormatter import DBFormatter
class New(DBFormatter):
"""
Create a workflow ready for subscriptions
"""
sql = """insert into wmbs_workflow (spec, owner, name, task, type, alt_fs_close, priority)
values (:spec, :owner, :name, :task, :type, :alt_fs_close, :priority)"""
def execute(self, spec = None, owner = None, name = None, task = None,
wfType = None, alt_fs_close = False, priority = None,
conn = None, transaction = False):
binds = {"spec": spec, "owner": owner, "name": name, "task": task,
"type": wfType, "alt_fs_close": int(alt_fs_close), "priority" : priority}
self.dbi.processData(self.sql, binds, conn = conn, transaction = transaction)
return
| StarcoderdataPython |
1607209 | from typing import Iterable
from django.db.models import QuerySet
from django.utils import timezone
from accounts.models import User
from schedules.models import Event, Attendant
def get_events(church_name: str = None, limit: int = None, order_by_start: str = None) -> QuerySet[Event]:
event_list = Event.objects.filter(visibility='public', end__gt=timezone.now())
if church_name:
event_list = event_list.filter(church__name__iexact=church_name.replace('-', ' '))
if order_by_start:
if order_by_start == 'desc':
event_list = event_list.order_by('start')
elif order_by_start == 'asc':
event_list = event_list.order_by('-start')
if limit:
return event_list[:limit]
return event_list
def get_admin_events(user: User, current_events_only: bool = False, order_by_start: str = None) -> QuerySet[Event]:
event_list: QuerySet[Event] = Event.objects.filter(church__members=user)
if current_events_only:
event_list = event_list.filter(end__gt=timezone.now())
if order_by_start:
if order_by_start == 'desc':
event_list = event_list.order_by('start')
elif order_by_start == 'asc':
event_list = event_list.order_by('-start')
return event_list
def get_admin_member_attendants(user: User) -> QuerySet[Attendant]:
"""
Returns all attendants of church where user is member
"""
return Attendant.objects.filter(event__church__members=user)
| StarcoderdataPython |
1932708 | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url( r'^admin/', include(admin.site.urls) ), # eg host/project_x/admin/
url( r'^', include('ebook_finder.urls_app') ), # eg host/project_x/anything/
)
| StarcoderdataPython |
357258 | <reponame>AbrahamSanders/ir-dialogue-eval<filename>ir-dialogue-eval/dataset_loaders/frames_dataset_loader.py
"""
DatasetLoader implementation for the Frames dataset
"""
from os import path
import json
import re
from dataset_loaders.dataset_loader import DatasetLoader
from domain import Domain
class FramesDatasetLoader(DatasetLoader):
"""DatasetLoader implementation for Frames dataset
"""
def __init__(self, dataset_path):
super().__init__(dataset_path)
def _load_dataset(self):
""" See docstring in base class.
"""
# Import the dialogs from the dataset
frames_filepath = path.join(self.dataset_path, "frames.json")
with open(frames_filepath, encoding="utf-8") as f:
frames_json = json.load(f)
ids = [dialog["id"] for dialog in frames_json]
dialogs = [[(utterance["author"], self._normalize_whitespace(utterance["text"])) for utterance in dialog["turns"]]
for dialog in frames_json]
domains = []
for dialog in dialogs:
dialog_text = " ".join([utt[1] for utt in dialog])
domains.append([Domain.TRAVEL.value])
if re.search("(?:fly|flight|airline|plane)", dialog_text, flags=re.IGNORECASE):
domains[-1].append(Domain.FLIGHTS.value)
if re.search("(?:hotel|motel|inn|resort|room|guest)", dialog_text, flags=re.IGNORECASE):
domains[-1].append(Domain.HOTELS.value)
return ids, dialogs, domains | StarcoderdataPython |
9600158 | <reponame>rjzamora/dask-cuda<filename>dask_cuda/tests/test_device_host_file.py
from random import randint
import dask.array as da
from dask_cuda.device_host_file import (
DeviceHostFile,
device_to_host,
host_to_device,
)
from distributed.protocol import deserialize_bytes, serialize_bytelist
import numpy as np
import pytest
cupy = pytest.importorskip("cupy")
@pytest.mark.parametrize("num_host_arrays", [1, 10, 100])
@pytest.mark.parametrize("num_device_arrays", [1, 10, 100])
@pytest.mark.parametrize("array_size_range", [(1, 1000), (100, 100), (1000, 1000)])
def test_device_host_file_short(
tmp_path, num_device_arrays, num_host_arrays, array_size_range
):
tmpdir = tmp_path / "storage"
tmpdir.mkdir()
dhf = DeviceHostFile(
device_memory_limit=1024 * 16, memory_limit=1024 * 16, local_directory=tmpdir
)
host = [
("x-%d" % i, np.random.random(randint(*array_size_range)))
for i in range(num_host_arrays)
]
device = [
("dx-%d" % i, cupy.random.random(randint(*array_size_range)))
for i in range(num_device_arrays)
]
import random
full = host + device
random.shuffle(full)
for k, v in full:
dhf[k] = v
random.shuffle(full)
for k, original in full:
acquired = dhf[k]
da.assert_eq(original, acquired)
del dhf[k]
assert set(dhf.device.keys()) == set()
assert set(dhf.host.keys()) == set()
assert set(dhf.disk.keys()) == set()
def test_device_host_file_step_by_step(tmp_path):
tmpdir = tmp_path / "storage"
tmpdir.mkdir()
dhf = DeviceHostFile(
device_memory_limit=1024 * 16, memory_limit=1024 * 16, local_directory=tmpdir
)
a = np.random.random(1000)
b = cupy.random.random(1000)
dhf["a1"] = a
assert set(dhf.device.keys()) == set()
assert set(dhf.host.keys()) == set(["a1"])
assert set(dhf.disk.keys()) == set()
dhf["b1"] = b
assert set(dhf.device.keys()) == set(["b1"])
assert set(dhf.host.keys()) == set(["a1"])
assert set(dhf.disk.keys()) == set()
dhf["b2"] = b
assert set(dhf.device.keys()) == set(["b1", "b2"])
assert set(dhf.host.keys()) == set(["a1"])
assert set(dhf.disk.keys()) == set()
dhf["b3"] = b
assert set(dhf.device.keys()) == set(["b2", "b3"])
assert set(dhf.host.keys()) == set(["a1", "b1"])
assert set(dhf.disk.keys()) == set()
dhf["a2"] = a
assert set(dhf.device.keys()) == set(["b2", "b3"])
assert set(dhf.host.keys()) == set(["a2", "b1"])
assert set(dhf.disk.keys()) == set(["a1"])
dhf["b4"] = b
assert set(dhf.device.keys()) == set(["b3", "b4"])
assert set(dhf.host.keys()) == set(["a2", "b2"])
assert set(dhf.disk.keys()) == set(["a1", "b1"])
dhf["b4"] = b
assert set(dhf.device.keys()) == set(["b3", "b4"])
assert set(dhf.host.keys()) == set(["a2", "b2"])
assert set(dhf.disk.keys()) == set(["a1", "b1"])
da.assert_eq(dhf["a1"], a)
del dhf["a1"]
da.assert_eq(dhf["a2"], a)
del dhf["a2"]
da.assert_eq(dhf["b1"], b)
del dhf["b1"]
da.assert_eq(dhf["b2"], b)
del dhf["b2"]
da.assert_eq(dhf["b3"], b)
del dhf["b3"]
da.assert_eq(dhf["b4"], b)
del dhf["b4"]
assert set(dhf.device.keys()) == set()
assert set(dhf.host.keys()) == set()
assert set(dhf.disk.keys()) == set()
@pytest.mark.parametrize("collection", [dict, list, tuple])
@pytest.mark.parametrize("length", [0, 1, 3, 6])
@pytest.mark.parametrize("value", [10, {"x": [1, 2, 3], "y": [4.0, 5.0, 6.0]}])
def test_serialize_cupy_collection(collection, length, value):
# Avoid running test for length 0 (no collection) multiple times
if length == 0 and collection is not list:
return
if isinstance(value, dict):
cudf = pytest.importorskip("cudf")
dd = pytest.importorskip("dask.dataframe")
x = cudf.DataFrame(value)
assert_func = dd.assert_eq
else:
x = cupy.arange(10)
assert_func = da.assert_eq
if length == 0:
obj = device_to_host(x)
elif collection is dict:
obj = device_to_host(dict(zip(range(length), (x,) * length)))
else:
obj = device_to_host(collection((x,) * length))
if length > 5:
assert obj.header["serializer"] == "pickle"
elif length > 0:
assert all([h["serializer"] == "cuda" for h in obj.header["sub-headers"]])
else:
assert obj.header["serializer"] == "cuda"
btslst = serialize_bytelist(obj)
bts = deserialize_bytes(b"".join(btslst))
res = host_to_device(bts)
if length == 0:
assert_func(res, x)
else:
assert isinstance(res, collection)
values = res.values() if collection is dict else res
[assert_func(v, x) for v in values]
| StarcoderdataPython |
1635406 | # -*- coding: utf-8 -*-
# @Time : 2018/6/7 下午5:22
# @Author : waitWalker
# @Email : <EMAIL>
# @File : MTTDataBase.py
# @Software: PyCharm
# 数据连接
import pymysql
import time
class MTTDataBase:
error_code = ''
instance = None
# db = None
# cursor = None
timeout = 30
time_count = 0
# 构造函数 初始化实例 创建 连接连接db对象
def __init__(self, config):
try:
self.db = pymysql.connect(
host=config['host'],
user=config['user'],
password=config['password'],
db=config['db'],
charset=config['charset'],
cursorclass=pymysql.cursors.DictCursor)
print("connect database success")
except pymysql.Error as error:
self.error_code = error.args[0]
error_msg = 'mysql connect error !',error[1]
print(error_msg)
if self.time_count < self.timeout:
interval = 5
self.time_count += interval
time.sleep(interval)
return self.__init__(config)
else:
raise Exception(error_msg)
self.c = self.db.cursor()
# 查询数据 根据查询结果 添加相应的返回值
def query(self, sql):
try:
result = self.cursor.execute(sql)
except pymysql.Error as error:
print('query error:', error)
self.error_code = error.args[0]
result = False
return result
# 更新数据 数据更新失败:回滚
def update(self, sql):
try:
result = self.cursor.execute(sql)
self.db.commit()
except pymysql.Error as error:
print("update database error:", error)
self.error_code = error.args[0]
result = False
self.rollback()
return result
# 插入输入 数据插入失败:回滚
def insert(self, sql):
try:
result = self.cursor.execute(sql)
self.db.commit()
except pymysql.Error as error:
print("insert error:",error)
self.error_code = error.args[0]
result = False
self.rollback()
return result
# 删除数据 数据删除失败:回滚
def delete(self, sql):
try:
result = self.cursor.execute(sql)
self.db.commit()
except pymysql.Error as error:
print("delete error:",error)
self.error_code = error.args[0]
result = False
self.rollback()
return result
# 获取所有数据
def fetchall(self):
return self.cursor.fetchall()
# 回滚: 遇到错误或者其他情况
def rollback(self):
self.db.rollback()
# 关闭数据库
def close(self):
try:
self.cursor.close()
self.db.close()
except pymysql.Error as error:
print(error)
| StarcoderdataPython |
228888 | import numpy as np
def deadband(value, band_radius):
return max(value - band_radius, 0) + min(value + band_radius, 0)
def clipped_first_order_filter(input, target, max_rate, tau):
rate = (target - input) / tau
return np.clip(rate, -max_rate, max_rate)
| StarcoderdataPython |
1925268 | <filename>WhatsAppManifest/automator/whatsapp/database/companion_devices.py
from WhatsAppManifest.manifest.whatsapp.path import Path
from WhatsAppManifest.automator.whatsapp.database.base import WhatsAppDatabase
class WhatsAppDatabaseCompanionDevices(WhatsAppDatabase):
"""
WhatsApp Companion Devices Database
"""
_database = Path.companion_devices
| StarcoderdataPython |
8068720 | <reponame>RaviPandey33/gym-electric-motor-1
import numpy as np
from gym.spaces import Box
from ..random_component import RandomComponent
from ..core import ReferenceGenerator
from ..utils import instantiate
class SwitchedReferenceGenerator(ReferenceGenerator, RandomComponent):
"""Reference Generator that switches randomly between multiple sub generators with a certain probability p for each.
"""
def __init__(self, sub_generators, p=None, super_episode_length=(100, 10000)):
"""
Args:
sub_generators(list(ReferenceGenerator)): ReferenceGenerator instances to be used as the sub_generators.
p(list(float)/None): (Optional) Probabilities for each sub_generator. If None a uniform
probability for each sub_generator is used.
super_episode_length(Tuple(int, int)): Minimum and maximum number of time steps a sub_generator is used.
"""
ReferenceGenerator.__init__(self)
RandomComponent.__init__(self)
self.reference_space = Box(-1, 1, shape=(1,), dtype=np.float64)
self._reference = None
self._k = 0
self._sub_generators = list(sub_generators)
assert len(self._sub_generators) > 0, 'No sub generator was passed.'
ref_names = self._sub_generators[0].reference_names
assert all(sub_gen.reference_names == ref_names for sub_gen in self._sub_generators),\
'The passed sub generators have different referenced states.'
self._reference_names = ref_names
self._probabilities = p or [1/len(sub_generators)] * len(sub_generators)
self._current_episode_length = 0
if type(super_episode_length) in [float, int]:
super_episode_length = super_episode_length, super_episode_length + 1
self._super_episode_length = super_episode_length
self._current_ref_generator = self._sub_generators[0]
def set_modules(self, physical_system):
"""
Args:
physical_system(PhysicalSystem): The physical system of the environment.
"""
super().set_modules(physical_system)
for sub_generator in self._sub_generators:
sub_generator.set_modules(physical_system)
ref_space_low = np.min([sub_generator.reference_space.low for sub_generator in self._sub_generators], axis=0)
ref_space_high = np.max([sub_generator.reference_space.high for sub_generator in self._sub_generators], axis=0)
self.reference_space = Box(ref_space_low, ref_space_high)
self._referenced_states = self._sub_generators[0].referenced_states
for sub_generator in self._sub_generators:
assert np.all(sub_generator.referenced_states == self._referenced_states), \
'Reference Generators reference different state variables'
assert sub_generator.reference_space.shape == self.reference_space.shape, \
'Reference Generators have differently shaped reference spaces'
def reset(self, initial_state=None, initial_reference=None):
self.next_generator()
self._reset_reference()
return self._current_ref_generator.reset(initial_state, initial_reference)
def get_reference(self, state, **kwargs):
self._reference = self._current_ref_generator.get_reference(state, **kwargs)
return self._reference
def get_reference_observation(self, state, *_, **kwargs):
if self._k >= self._current_episode_length:
self._reset_reference()
_, obs, _ = self._current_ref_generator.reset(state, self._reference)
else:
obs = self._current_ref_generator.get_reference_observation(state, **kwargs)
self._k += 1
return obs
def _reset_reference(self):
self._current_episode_length = self.random_generator.integers(
self._super_episode_length[0], self._super_episode_length[1]
)
self._k = 0
self._current_ref_generator = self.random_generator.choice(self._sub_generators, p=self._probabilities)
def seed(self, seed=None):
super().seed(seed)
for sub_generator in self._sub_generators:
if isinstance(sub_generator, RandomComponent):
seed = self._seed_sequence.spawn(1)[0]
sub_generator.seed(seed)
| StarcoderdataPython |
12830687 | <gh_stars>0
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = ['Dataset']
class Dataset(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
time_zone: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A Healthcare `Dataset` is a toplevel logical grouping of `dicomStores`, `fhirStores` and `hl7V2Stores`.
To get more information about Dataset, see:
* [API documentation](https://cloud.google.com/healthcare/docs/reference/rest/v1/projects.locations.datasets)
* How-to Guides
* [Creating a dataset](https://cloud.google.com/healthcare/docs/how-tos/datasets)
## Example Usage
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: The location for the Dataset.
:param pulumi.Input[str] name: The resource name for the Dataset.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] time_zone: The default timezone used by this dataset. Must be a either a valid IANA time zone name such as
"America/New_York" or empty, which defaults to UTC. This is used for parsing times in resources
(e.g., HL7 messages) where no explicit timezone is specified.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if location is None:
raise TypeError("Missing required property 'location'")
__props__['location'] = location
__props__['name'] = name
__props__['project'] = project
__props__['time_zone'] = time_zone
__props__['self_link'] = None
super(Dataset, __self__).__init__(
'gcp:healthcare/dataset:Dataset',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
time_zone: Optional[pulumi.Input[str]] = None) -> 'Dataset':
"""
Get an existing Dataset resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: The location for the Dataset.
:param pulumi.Input[str] name: The resource name for the Dataset.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The fully qualified name of this dataset
:param pulumi.Input[str] time_zone: The default timezone used by this dataset. Must be a either a valid IANA time zone name such as
"America/New_York" or empty, which defaults to UTC. This is used for parsing times in resources
(e.g., HL7 messages) where no explicit timezone is specified.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["location"] = location
__props__["name"] = name
__props__["project"] = project
__props__["self_link"] = self_link
__props__["time_zone"] = time_zone
return Dataset(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location for the Dataset.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name for the Dataset.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The fully qualified name of this dataset
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> pulumi.Output[str]:
"""
The default timezone used by this dataset. Must be a either a valid IANA time zone name such as
"America/New_York" or empty, which defaults to UTC. This is used for parsing times in resources
(e.g., HL7 messages) where no explicit timezone is specified.
"""
return pulumi.get(self, "time_zone")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| StarcoderdataPython |
1942254 | from schematics.models import Model
from schematics.types import StringType
from schematics.types.compound import DictType, ModelType
from .schema import Schema
from .headers import Headers
class Response(Model):
description = StringType(required=True, serialize_when_none=False)
schema = ModelType(Schema, serialize_when_none=False)
headers = Headers
# examples
Responses = DictType(ModelType(Response), serialize_when_none=False)
| StarcoderdataPython |
3555790 | <filename>ancillary/Outliner.py
True = 1
False = None
class OutlinerNode:
_expanded_p = True
_parent = None
_depth = 0
def __init__(self):
self._children = []
def __repr__(self):
tabdepth = self._depth - 1
if self.leaf_p(): tag = ' '
elif self.expanded_p(): tag = '+'
else: tag = '-'
return (' ' * (tabdepth * 3)) + tag
def clone(self):
newnode = OutlinerNode()
newnode._expanded_p = self._expanded_p
newnode._depth = self._depth
for child in self._children:
newchild = child.clone()
newchild._parent = newnode
newnode._children.append(newchild)
return newnode
def close(self):
self._parent = None
for child in self._children: child.close()
def _redepthify(self, node):
depth = node.depth()
for child in node.children():
child._depth = depth + 1
self._redepthify(child)
def append_child(self, node):
self._children.append(node)
node._parent = self
node._depth = self._depth + 1
self._redepthify(node)
def insert_child(self, node, index):
self._children.insert(index, node)
node._parent = self
node._depth = self._depth + 1
self._redepthify(node)
def del_child(self, node):
try:
child_i = self._children.index(node)
rtnnode = self._children[child_i]
del self._children[child_i]
return rtnnode
except (ValueError, IndexError):
return False
def replace_child(self, node, newnode):
newnode._depth = self._depth + 1
try:
child_i = self._children.index(node)
rtnnode = self._children[child_i]
self._children[child_i] = newnode
return rtnnode
except (ValueError, IndexError):
return False
def expand(self): self._expanded_p = True
def collapse(self): self._expanded_p = False
def children(self): return self._children
def parent(self): return self._parent
def expanded_p(self): return self._expanded_p
def leaf_p(self): return not self._children
def depth(self): return self._depth
class OutlinerViewer:
def __init__(self, root, follow_all_children=None, shared_root=None):
"""Create a new viewer for a tree of nodes.
If follow_all_children is true, then child links are followed
even if the child is collapsed. If false, then only expanded
child links are followed.
If shared_root is true, then the tree is not close()'d when
the viewer is destroyed. This can be cause memory leaks if
misused.
"""
self._root = root
self._nodes = []
self._shared_root = shared_root
self._follow_all_children_p = follow_all_children
def __del__(self):
if not self._shared_root:
self._root.close()
## Derived class specializations
def _insert(self, node, index=None): pass
def _delete(self, start, end=None): pass
def _select(self, index): pass
def _clear(self): pass
def _populate(self, node):
# insert into linear list
self._nodes.append(node)
# calculate the string to insert into the list box
self._insert(node)
if node.get_nodetype() == "Folder" \
and (node.expanded_p() or self._follow_all_children_p):
for child in node.children():
self._populate(child)
## API methods
def populate(self, showroot=0):
if showroot:
self._populate(self._root)
else:
for child in self._root.children():
OutlinerViewer._populate(self, child)
def clear(self):
self._clear()
self._nodes = []
def insert_nodes(self, at_index, node_list, before_p=0):
if not before_p: at_index = at_index + 1
nodecount = len(node_list)
for node in node_list:
self._nodes.insert(at_index, node)
self._insert(node, at_index)
at_index = at_index + 1
def delete_nodes(self, start, end):
self._delete(start, end)
del self._nodes[start:end+1]
def update_node(self, node):
index = self.index(node)
# TBD: is there a more efficient way of doing this?
self._delete(index)
self._insert(node, index)
def _expand(self, node, at):
for child in node.children():
self.insert_nodes(at, [child], True)
at = at + 1
if not child.leaf_p() and child.expanded_p():
self._expand(child)
def expand_node(self, node):
self._expand(node, self.index(node)+1)
def select_node(self, node):
self._select(self.index(node))
def node(self, index):
if 0 <= index < len(self._nodes):
return self._nodes[index]
else:
return None
def index(self, node):
try:
return self._nodes.index(node)
except ValueError:
return None
def count(self):
return len(self._nodes)
class OutlinerController:
def __init__(self, root=None, viewer=None):
self._viewer = viewer
self._root = root
self._backup = root.clone()
self._aggressive_p = None
if not root: self._root = OutlinerNode()
if not viewer: self._viewer = OutlinerViewer(self._root)
def root(self): return self._root
def set_root(self, newroot):
self._root.close()
self._backup.close()
self._root = newroot
self._backup = newroot.clone()
def update_backup(self):
self._backup.close()
self._backup = self._root.clone()
def root_redisplay(self):
self._viewer.clear()
self._viewer.populate()
def revert(self):
self._root.close()
self._root = self._backup.clone()
def viewer(self): return self._viewer
def set_viewer(self, viewer): self._viewer = viewer
def set_aggressive_collapse(self, flag): self._aggressive_p = flag
def aggressive_collapse_p(self): return self._aggressive_p
def _sibi(self, node):
parent = node.parent()
if not parent: return (None, None, [])
sibs = parent.children()
sibi = sibs.index(node)
return parent, sibi, sibs
def collapsable_p(self, node):
# This node is only collapsable if it is an unexpanded branch
# node, or the aggressive collapse flag is set.
if node.leaf_p() or not node.expanded_p(): return False
else: return True
def collapse_node(self, node):
if not self.collapsable_p(node):
if self.aggressive_collapse_p():
node = node.parent()
if not self.collapsable_p(node): return
else: return
node.collapse()
self.root_redisplay()
return node
def expand_node(self, node):
# don't expand a leaf or an already expanded node
if node.leaf_p() or node.expanded_p(): return
# now toggle the expanded flag and update the listbox
node.expand()
self.root_redisplay()
def show_node(self, node):
# travel up tree from this node, making sure all ancestors are
# expanded (i.e. visible)
node = node.parent()
while node:
node.expand()
node = node.parent()
self.root_redisplay()
def shift_left(self, node):
# find the index of the node in the sib list.
parent, sibi, sibs = self._sibi(node)
if not parent: return
grandparent, parenti, aunts = self._sibi(parent)
if not grandparent: return
# node now becomes a sibling of it's parent, and all of node's
# later siblings become the node's children
parent.del_child(node)
grandparent.insert_child(node, parenti+1)
if sibi < len(sibs):
for sib in sibs[sibi:]:
parent.del_child(sib)
node.append_child(sib)
self.root_redisplay()
def shift_right(self, node):
# find the index of the node in the sib list.
parent, sibi, sibs = self._sibi(node)
# cannot shift right the first child in the sib list
if sibi == 0: return
# reparent the node such that it is now the child of the
# preceding sibling in the sib list
newparent = sibs[sibi-1]
# cannot shift right if the above node is a leaf
if newparent.leaf_p(): return
parent.del_child(node)
newparent.append_child(node)
newparent.expand()
# update the viewer
self.root_redisplay()
def shift_up(self, node):
# find the viewer index of the node, and get the node just
# above it. if it's the first visible node, it cannot be
# shifted up.
nodevi = self._viewer.index(node)
if nodevi == 0: return
above = self._viewer.node(nodevi-1)
parent, sibi, sibs = self._sibi(node)
if not parent: return
# if node and above are at the same depth, just rearrange.
if node.depth() == above.depth():
parent.del_child(node)
parent.insert_child(node, sibi-1)
# if node is deeper than above, node becomes a sibling of
# above and move just above *it*
elif node.depth() > above.depth():
aparent, asibi, asibs = self._sibi(above)
if not aparent: return
parent.del_child(node)
aparent.insert_child(node, asibi)
aparent.expand()
# if above is deeper than node, then above becomes a sibling
# of node and gets appended to the end of node's sibling list.
else:
aparent, asibi, asibs = self._sibi(above)
if not aparent: return
parent.del_child(node)
aparent.append_child(node)
aparent.expand()
self.root_redisplay()
def shift_down(self, node):
# find the viewer index of the node, and get the node just
# below it. if it's the last visible node, it cannot be
# shifted down.
nodevi = self._viewer.index(node)
if nodevi is None or nodevi >= self._viewer.count()-1: return
below = self._viewer.node(nodevi+1)
parent, sibi, sibs = self._sibi(node)
if not parent: return
# if below is really node's first child, then what we want to
# do is try to shift into node's next sibling's child list
if node.get_nodetype() == "Folder":
children = node.children()
if len(children) > 0 and below == children[0]:
if sibi+1 < len(sibs) and not sibs[sibi+1].leaf_p():
below = sibs[sibi+1]
# if node and below are at the same depth, then what happens
# depends on the state of below. If below is an expanded
# branch, then node becomes it's first sibling, otherwise it
# just swaps places
if node.depth() == below.depth():
if not below.leaf_p() and below.expanded_p():
parent.del_child(node)
below.insert_child(node, 0)
else:
parent.del_child(node)
parent.insert_child(node, sibi+1)
# if node is deeper than below, node becomes a sibling of it's parent
elif node.depth() > below.depth():
grandparent, parenti, aunts = self._sibi(parent)
if not grandparent: return
parent.del_child(node)
grandparent.insert_child(node, parenti+1)
# if below is deeper than node, then node actually swaps
# places with it's next sibling
else:
# if it's the last of the sibling, then it actually shifts left
if sibi >= len(sibs)-1:
self.shift_left(node)
return
else:
parent.del_child(node)
parent.insert_child(node, sibi+1)
self.root_redisplay()
| StarcoderdataPython |
6629380 | from mock import MagicMock
from nose.tools import assert_equals, assert_not_equals, raises, with_setup
import json
from hdijupyterutils.configuration import override, override_all, with_override
from hdijupyterutils.configuration import _merge_conf
# This is a sample implementation of how a module would use the config methods.
# We'll use these three functions to test it works.
d = {}
path = "~/.testing/config.json"
original_value = 0
def module_override(config, value):
global d, path
override(d, path, config, value)
def module_override_all(obj):
global d
override_all(d, obj)
# Configs
@with_override(d, path)
def my_config():
global original_value
return original_value
@with_override(d, path)
def my_config_2():
global original_value
return original_value
# Test helper functions
def _setup():
module_override_all({})
def _teardown():
module_override_all({})
# Unit tests begin
@with_setup(_setup, _teardown)
def test_original_value_without_overrides():
assert_equals(original_value, my_config())
@with_setup(_setup, _teardown)
def test_original_value_with_overrides():
new_value = 2
module_override(my_config.__name__, new_value)
assert_equals(new_value, my_config())
@with_setup(_setup, _teardown)
def test_original_values_when_others_override():
new_value = 2
module_override(my_config.__name__, new_value)
assert_equals(new_value, my_config())
assert_equals(original_value, my_config_2())
@with_setup(_setup, _teardown)
def test_resetting_values_when_others_override():
new_value = 2
module_override(my_config.__name__, new_value)
assert_equals(new_value, my_config())
assert_equals(original_value, my_config_2())
# Reset
module_override_all({})
assert_equals(original_value, my_config())
assert_equals(original_value, my_config_2())
@with_setup(_setup, _teardown)
def test_configuration_merge_required():
current_session_confs = {
"archives": ["s3://my-test-archive"],
"numExecutors": 5,
"conf": {
"spark.dynamicAllocation.enabled":
"false",
"spark.sql.shuffle.partitions":
20,
"spark.yarn.tags":
"my=tag,wee=wa",
"spark.jars.packages":
"net.snowflake:spark-snowflake_2.11:2.5.1-spark_2.4"
}
}
required_session_confs = {
"conf": {
"spark.yarn.tags":
"created-by=vaatu-raava"
},
"numExecutors": 10
}
_merge_conf(current_session_confs, required_session_confs)
assert_equals(current_session_confs, {
"archives": ["s3://my-test-archive"],
"numExecutors": 10,
"conf": {
"spark.dynamicAllocation.enabled":
"false",
"spark.sql.shuffle.partitions":
20,
"spark.yarn.tags":
"created-by=vaatu-raava",
"spark.jars.packages":
"net.snowflake:spark-snowflake_2.11:2.5.1-spark_2.4"
}
})
| StarcoderdataPython |
96320 |
# -*- coding:utf-8 -*-
import re
def parse(s):
l = re.sub(r'\s+', ', ', (' '+s.lower()+' ').replace('(', '[').replace(')', ']'))[2:-2]
return eval(re.sub(r'(?P<symbol>[\w#%\\/^*+_\|~<>?!:-]+)', lambda m : '"%s"' % m.group('symbol'), l))
def cons(a, d):
if atom(d):
return (a, d)
return (lambda *args : list(args))(a, *d)
def car(s):
return s[0]
def cdr(s):
if isinstance(s, tuple):
return s[1]
if len(s) == 1:
return []
return s[1:]
def atom(s):
return not isinstance(s, list)
def eq(s, t):
return s == t
def cond(l, d):
for [p, e] in cdr(l):
if eval_(p, d):
return eval_(e, d)
class lambda_object:
count = 0
def __init__(self, l, d):
self.dic = d
self.li = l[1]
self.ex = l[2]
lambda_object.count += 1
self.serial = lambda_object.count
def __call__(self, *args):
for i in range(len(self.li)):
self.dic[self.li[i]] = args[i]
return eval_(self.ex, self.dic)
def __str__(self):
return '<COMPOND-PROCEDURE-#%d>' % self.serial
__repr__ = __str__
def label(l, d):
d[l[1]] = eval_(l[2])
try:
if re.match(r'<COMPOND-PROCEDURE-#\d+>', str(d[l[1]])):
symbol_t[str(d[l[1]])] = '%s' % l[1]
finally:
pass
def quote(l, d):
return l[1]
symbol_s = {'cons':cons, 'car':car, 'cdr':cdr, 'atom?':atom, 'eq?':eq, '#t':True, '#f':False}
syntax_s = {'cond':cond, 'lambda':lambda_object, 'quote':quote, 'label':label}
symbol_t = dict()
for k, v in symbol_s.items():
symbol_t[str(v)] = '%s' % k
symbol_t[True] = '#t'
symbol_t[False] = '#f'
def sstring(l, align=0):
if atom(l):
if str(l) in symbol_t:
return symbol_t[str(l)]
elif l == None:
return 'unspecific-return-value'
elif isinstance(l, tuple):
return '%s . %s' % (l[0], l[1])
else:
return str(l)
elif l == []:
return '()'
s = '('
for x in l:
s += sstring(x, align) + ' '
return s[:-1] + ')'
def eval_(l, s=symbol_s):
if atom(l):
return symbol_s[l]
eval_.depth += 1
print '; ='+'>'*eval_.depth, sstring(l)
if atom(l[0]) and l[0] in syntax_s:
u = syntax_s[l[0]](l, s)
print '; ='+'|'*eval_.depth, sstring(u), '--', l[0]
eval_.depth -= 1
return u
else:
operator = eval_(l[0], s)
operands = map(lambda e: eval_(e,s), l[1:])
#print 'sval ='+'|'*eval_.depth, sstring(cons(operator, operands))
u = operator(*operands)
print '; -' +'|'*eval_.depth, sstring(u), '<<', '%s[%s]' % (sstring(operator), (len(operands) > 1) and str.join(', ', map(sstring, operands)) or sstring(*operands))
eval_.depth -= 1
return u
eval_.depth = 0
if __name__ == '__main__':
code = '''
(label ff
(lambda (s)
(cond
((atom? s) s)
(#t (ff (car s))))))
'''
print eval_(parse(code))
print symbol_s
print symbol_t
print sstring(eval_(parse("(cons (ff (quote (((a b) c)))) (quote (d)))")))
eval_(parse('''
((cond (#f cdr) (#t car)) (quote a b c))'''))
| StarcoderdataPython |
79825 | <reponame>PrinceOfPuppers/qbot<filename>qbot/density.py
import numpy as np
import numpy.linalg as linalg
from qbot.helpers import ensureSquare, log2
import qbot.qgates as gates
def ketsToDensity(kets:[np.ndarray],probs: [float] = None) -> np.ndarray:
'''converts set of kets to a density matrix'''
if probs == None:
return ketToDensity(kets[0])
if len(kets) != len(probs):
raise Exception("number of state vectors an number of probabilites must equal")
result = np.zeros( (kets[0].shape[0],kets[0].shape[0]),dtype=complex )
for i,ket in enumerate(kets):
result += probs[i]* np.outer(ket,ket)
return result
def ketToDensity(ket: np.ndarray) -> np.ndarray:
return np.outer(ket,ket)
# def normalizeDensity(density: np.ndarray):
# density /= np.trace(density)
def partialTrace(density:np.ndarray, nQubits, mQubits, traceN = True):
'''
traces nQubits if traceN is true, leaving you with the density matrix for the latter mQubits
'''
dimN = 2**nQubits
dimM = 2**mQubits
size = ensureSquare(density)
if(dimN + dimM != size):
raise Exception("incorrect number of qubits")
axis1,axis2 = (1,3) if traceN else (0,2)
return np.trace(
density.reshape(dimN,dimM,dimN,dimM),
axis1=axis1, axis2=axis2
)
def partialTraceBoth(density,nQubits,mQubits):
'''
traces out n and m qubits seperatly (used for measurement simulation)
'''
numQubits = log2(ensureSquare(density))
if(nQubits + mQubits != numQubits):
raise Exception("incorrect number of qubits")
dimN = 2**nQubits
dimM = 2**mQubits
return (
np.trace(
density.reshape(dimN,dimM,dimN,dimM),
axis1=1, axis2=3
),
np.trace(
density.reshape(dimN,dimM,dimN,dimM),
axis1=0, axis2=2
)
)
def partialTraceArbitrary(density: np.ndarray, numQubits: int, systemAQubits: [int]):
size = ensureSquare(density)
systemAQubits.sort()
systemBQubits = [i for i in range(0,numQubits) if i not in systemAQubits]
numSysAQubits = len(systemAQubits)
numSysBQubits = len(systemBQubits)
def stateMap(state):
res = 0
for i,aQubit in enumerate(systemAQubits):
mask = 1 << numQubits - aQubit - 1
res |= ((mask & state)!= 0) << (numQubits - 1 - i)
for i,bQubit in enumerate(systemBQubits):
mask = 1 << numQubits - bQubit - 1
res |= ((mask & state) != 0) << numSysBQubits - i - 1
return res
swapGate = gates.genArbitrarySwap(size, stateMap)
swappedDensity = swapGate @ density @ swapGate.conj().T
return partialTraceBoth(swappedDensity,numSysAQubits,numSysBQubits)
def combineDensity(d1: np.ndarray, d2: np.ndarray):
return np.kron(d1,d2)
class MeasurementResult:
__slots__ = (
'unMeasuredDensity', # [np.ndarray] state of the unmeasured qubits after the measurement
'toMeasureDensity', # [np.ndarray] state of the qubits to measure, before measurement
'probs', # [float] probabilities of getting each of the basis states
)
def __init__(self, unMeasuredDensity, toMeasureDensity, probs):
self.unMeasuredDensity = unMeasuredDensity
self.toMeasureDensity = toMeasureDensity
self.probs = probs
def __repr__(self):
return (
f'MeasurementResult:\n'
f'unMeasuredDensity: \n{np.array_repr(self.unMeasuredDensity)}\n'
f'toMeasureDensity: \n{np.array_repr(self.toMeasureDensity)}\n'
f'probs: \n{self.probs.__repr__()}\n'
)
def measureTopNQubits(density: np.ndarray, basisDensity: [np.ndarray], N: int) -> MeasurementResult:
'''
Measures the top N Qubits with respect to the provided basis (must also be density matrices)
'''
numQubits = log2(ensureSquare(density))
if (numQubits == N):
toMeasureDensity = density
unMeasuredDensity = np.array([],dtype=complex)
else:
toMeasureDensity, unMeasuredDensity = partialTraceBoth(density, N, numQubits - N)
probs = []
s = 0
for b in basisDensity:
probs.append(
abs(np.trace(np.matmul(toMeasureDensity, b)))
)
s += probs[-1]
for i in range(0,len(probs)):
probs[i] /= s
return MeasurementResult(unMeasuredDensity,toMeasureDensity,probs)
def measureArbitrary(density: np.ndarray, basisDensity: [np.ndarray], toMeasure: [int]) -> MeasurementResult:
'''
Measures all qubits in toMeasure
'''
numQubits = log2(ensureSquare(density))
if len(toMeasure) == numQubits:
toMeasureDensity = density
unMeasuredDensity = np.array([],dtype=complex)
else:
toMeasureDensity, unMeasuredDensity = partialTraceArbitrary(density, numQubits, toMeasure)
probs = []
s = 0
for b in basisDensity:
probs.append(
abs(np.trace(np.matmul(toMeasureDensity, b)))
)
s += probs[-1]
for i in range(0,len(probs)):
probs[i] /= s
return MeasurementResult(unMeasuredDensity,toMeasureDensity,probs)
def densityToStateEnsable(density:np.ndarray) -> [(float, np.ndarray)]:
'''Returns Eiganvalue pairs corrisponding which represent probability, state pairs'''
_ = ensureSquare(density)
eigVals,eigVecs = linalg.eig(density)
eigPair = []
for i,eigVal in enumerate(eigVals):
if eigVal != 0:
eigPair.append( (abs(eigVal),eigVecs[i]) )
return eigPair
if __name__ == "__main__":
density = ketsToDensity([ np.array([1j,0],dtype=complex),np.array([0,1j],dtype=complex) ],[3/4,1/4])
print(density)
#print(partialTrace(density,1,1,False))
reconstructed = np.zeros((density.shape[0],density.shape[1]),dtype=complex)
for pair in densityToStateEnsable(density):
print(pair)
reconstructed += pair[0] * np.outer(pair[1],pair[1])
print(reconstructed)
density = reconstructed
reconstructed = np.zeros((density.shape[0],density.shape[1]),dtype=complex)
for pair in densityToStateEnsable(density):
print(pair)
reconstructed += pair[0] * np.outer(pair[1],pair[1])
print(reconstructed) | StarcoderdataPython |
8066305 | <gh_stars>1-10
# -*- coding: utf-8 -*
from search_functions import *
from config import url_sc
import time
pyautogui.FAILSAFE = True
screenWidth, screenHeight = pyautogui.size()
pyautogui.hotkey('alt', 'Tab')
open_url(url_sc, screenWidth * 0.2, screenHeight / 0.06)
time.sleep(5)
for song in open('song_list.txt', 'r', encoding='utf-8'):
navigate_to_find_bar(screenWidth * 0.5, screenHeight * 0.11)
clear_text()
type_line(song)
time.sleep(2)
click_like(screenWidth * 0.4, screenHeight * 0.42)
click_like(screenWidth * 0.4, screenHeight * 0.47) # he's drunk and can't push like at the first try
| StarcoderdataPython |
8196161 | <reponame>roedoejet/wordweaver-legacy
from wordweaver.app import app
from wordweaver.config import ENV_CONFIG
DEBUG = ENV_CONFIG['DEBUG']
HOST = ENV_CONFIG['HOST']
PORT = int(ENV_CONFIG['PORT'])
THREADED = ENV_CONFIG['THREADED']
app.run(debug=DEBUG, host=HOST, port=PORT, threaded=THREADED) | StarcoderdataPython |
38486 | <filename>eventi/core/admin.py
# coding: utf-8
from django.contrib import admin
from eventi.core.models import Club, Info
admin.site.register(Club)
admin.site.register(Info)
| StarcoderdataPython |
6682506 | from copy import copy
from graphviz import Digraph
from typing import List, Tuple, Dict
from DataObjects.ClassState import State
from DataObjects.ClassArchitecture import Architecture
from DataObjects.ClassMachine import Machine
from DataObjects.ClassSystemTuple import SystemTuple
from Parser.ForkTree import ForkTree
from Algorithms.General.Tracing.TraceNode import TraceNodeObj
from itertools import permutations
class ModelChecker:
def __init__(self, archs: List[Architecture], longest_trace: bool = True, handle_evicts: bool = True):
self.archs: List[Architecture] = archs
self.allowed_state_tuples: Dict[SystemTuple, SystemTuple] = {}
self.forbidden_state_tuples: Dict[SystemTuple, SystemTuple] = {}
# Iteration count
self.search_iterations = 0
# Options
self.longest_trace = longest_trace # Only selects the longest trace if multiple actions compete
self.handle_evicts = handle_evicts # Only if enabled evict accesses are being served
def set_longest_trace(self, longest_trace: bool = True):
self.longest_trace = longest_trace
def set_handle_evicts(self, handle_evicts: bool = True):
self.handle_evicts = handle_evicts
def single_cache_directory_state_space(self) -> List[SystemTuple]:
state_tuples = []
evict_state_tuples = []
safe_tuple_upper_bound = None
if self.handle_evicts:
evict_state_tuples = self.gen_mc_sd_state_space()
safe_tuple_upper_bound = len(evict_state_tuples[0]) - 1
# The safe state tuples do not include evictions and are always safe
safe_state_tuples = self.gen_mc_sd_state_space(False, safe_tuple_upper_bound)
for state_tuple in safe_state_tuples:
self.allowed_state_tuples[state_tuple] = state_tuple
self.search_iterations = len(safe_state_tuples) - 1
# Build evict tree to include system state tuples that are only reachable through evictions
if self.handle_evicts:
state_tuples = self.build_evict_tree(evict_state_tuples)
return state_tuples
# Generate state space for a single directory and for multiple caches
def gen_mc_sd_state_space(self, handle_evicts: bool = True, loop_count: int = None) -> List[SystemTuple]:
ind_iter = 0
old_complete_cnt = 0
state_tuples = []
old_reduced_set_tuples = {}
if loop_count is not None:
assert loop_count != 0, "The iteration loop count must be at least one"
while True:
init_tuple = SystemTuple(self.gen_mult_cc_single_dc(ind_iter))
machine_state_tuple_dict = self.single_access_new_state_tuples(init_tuple, handle_evicts)
new_complete_cnt = self.check_machine_completion(init_tuple)
new_reduced_set_tuples = dict([state_tuple.get_reduced_set()
for state_tuple in machine_state_tuple_dict.values()])
# Compare state space based on trace coverage and more important sets of different state space combinations!
if (new_complete_cnt == old_complete_cnt and
set(new_reduced_set_tuples.keys()) == set(old_reduced_set_tuples.keys())) and \
loop_count is None:
# Remove the init tuple from the state space as it is only a dummy start point
# and does not contain actual traces
#state_tuples.pop(0)
return state_tuples
# Save the additional state space
state_tuples = [init_tuple] + list(machine_state_tuple_dict.values())
# Add cache
ind_iter += 1
old_complete_cnt = new_complete_cnt
old_reduced_set_tuples = new_reduced_set_tuples
if loop_count is not None and ind_iter >= loop_count:
# Remove the init tuple from the state space as it is only a dummy start point
# and does not contain actual traces
#state_tuples.pop(0)
return state_tuples
def check_machine_completion(self, system_tuple: SystemTuple) -> int:
archs = []
machines = []
for machine in system_tuple.system_tuple:
if machine.arch not in archs:
archs.append(machine.arch)
machines.append(machine)
trace_cnt = 0
for machine in machines:
trace_cnt += len(machine.covered_traces)
return trace_cnt
def gen_mult_cc_single_dc(self, ind_iter=0):
mach_list = []
for arch in self.archs:
mach_list.append(Machine(arch))
mach_comb = []
for ind in range(0, ind_iter + 1):
mach_comb.append(copy(mach_list[0]))
mach_comb.append(mach_list[1])
mach_comb = tuple(mach_comb)
return mach_comb
def single_access_new_state_tuples(self, initial_tuple: SystemTuple, handle_evicts: bool = True) \
-> Dict[int, SystemTuple]:
cur_tuples = [initial_tuple]
next_tuples = {}
state_tuples = {}
# Make new tuple for system state [ll_CC_State, Dir_State, hl_CC_State]
while cur_tuples:
for cur_tuple in cur_tuples:
# Permutate tuple
pre_perm_list = list(permutations(cur_tuple.system_tuple))
# Reduce permutation list
perm_list = self.reduce_permutation_states(pre_perm_list)
for perm in perm_list:
# Execute accesses
next_tuples.update(self.single_access_find_next_tuple(perm, handle_evicts))
cur_tuples = []
for next_tuple in next_tuples:
if next_tuple not in state_tuples:
cur_tuples.append(next_tuples[next_tuple])
state_tuples[next_tuple] = next_tuples[next_tuple]
next_tuples = {}
return state_tuples
def single_access_find_next_tuple(self, cur_tuple: Tuple[Machine], handle_evicts: bool = True):
# If no access can be found this is not as bad as if multiple accesses cannot be served
new_tuples = {}
new_traces = []
request_machine: Machine = cur_tuple[0]
remote_machines: List[Machine] = cur_tuple[1:]
# self.debug_assert(cur_tuple)
for trace in request_machine.arch.traces.start_state_dict[request_machine.final_state]:
if not trace.access:
continue
else:
evict_exists = 0
for access in trace.access:
if access in request_machine.arch.evict_def and not handle_evicts:
evict_exists = 1
break
if evict_exists:
continue
trace_tree = ForkTree()
basenode = trace_tree.insertnode(TraceNodeObj(request_machine, trace))
nextlist = self.find_next_trace_nodes([basenode], remote_machines)
while nextlist:
endnodes = []
for nextnode in nextlist:
endnodes += trace_tree.append_data_list(nextnode[1], nextnode[0])
nextlist = []
for node in endnodes:
nextlist += self.find_next_trace_nodes([node], remote_machines)
new_traces += self.validate_traces(trace_tree.gettraces())
if self.longest_trace:
longest_new_traces = self.find_longest_traces(new_traces)
# Register transactions taken
self.make_new_system_tuple(cur_tuple, [x for x in new_traces if x not in longest_new_traces])
new_traces = longest_new_traces
new_system_tuples = self.make_new_system_tuple(cur_tuple, new_traces)
for new_system_tuple in new_system_tuples:
new_tuples[new_system_tuple.__hash__()] = new_system_tuple
return new_tuples
def debug_assert(self, machines: Tuple[Machine]):
# Tuple size
if len(machines) != 3:
return
if set([str(machine.final_state) for machine in machines]) == {'I', 'S', 'I'}:
print('FOUND')
def find_next_trace_nodes(self, nodes: List[TraceNodeObj],
remote_machines: List[Machine]) -> List[Tuple[TraceNodeObj, List[List[TraceNodeObj]]]]:
next_nodes = []
for node in nodes:
nextlist = []
prev_machines = [node.data.state]
prev_traces = [node.data.transition]
cur_node = node
while cur_node.predecessor:
prev_machines.append(cur_node.predecessor.data.state)
prev_traces.append(cur_node.predecessor.data.transition)
cur_node = cur_node.predecessor
outmsg_list = [str(outmsg) for trace in prev_traces for outmsg in trace.outmsg]
inmsg_list = [str(inmsg) for trace in prev_traces for inmsg in trace.inmsg]
pending_mach = set(remote_machines) - set(prev_machines)
# trace is complete
if set(inmsg_list) == set(outmsg_list):
continue
# while new traces are found
for machine in pending_mach:
for trace in machine.arch.traces.start_state_dict[machine.final_state]:
# No parallel accesses allowed!
if trace.access:
continue
trace_in_msg_list = [str(in_msg) for in_msg in trace.inmsg]
if set(trace_in_msg_list).intersection(set(outmsg_list)):
new_trace_object = TraceNodeObj(machine, trace)
nextlist.append(new_trace_object)
# Cluster machines based on transition ids
cluster_nextlist = {}
for trace_node in nextlist:
if id(trace_node.transition) in cluster_nextlist:
cluster_nextlist[id(trace_node.transition)].append(trace_node)
else:
cluster_nextlist[id(trace_node.transition)] = [trace_node]
clusters = list(cluster_nextlist.values())
next_nodes.append((node, clusters))
return next_nodes
@staticmethod
def validate_traces(traces: List[List[TraceNodeObj]]) -> List[List[TraceNodeObj]]:
validated_traces = []
for trace in traces:
prev_machines = []
prev_traces = []
for node in trace:
prev_machines.append(node.state)
prev_traces.append(node.transition)
outmsg_list = [str(outmsg) for trace in prev_traces for outmsg in trace.outmsg]
inmsg_list = [str(inmsg) for trace in prev_traces for inmsg in trace.inmsg]
if set(inmsg_list) != set(outmsg_list):
continue
validated_traces.append(trace)
return validated_traces
@staticmethod
def find_longest_traces(traces: List[List[TraceNodeObj]]):
trace_access_map = {}
for trace in traces:
access = trace[-1].transition.access[0]
if access in trace_access_map:
if len(trace_access_map[access][0]) < len(trace):
trace_access_map[access] = [trace]
elif len((trace_access_map[access])[0]) == len(trace):
trace_access_map[access].append(trace)
else:
trace_access_map[access] = [trace]
return [trace for access in trace_access_map for trace in trace_access_map[access]]
@staticmethod
def make_new_system_tuple(cur_tuple: Tuple[Machine],
traces: List[List[TraceNodeObj]]):
system_tupels = []
for trace in traces:
prev_machines = []
prev_traces = []
for node in trace:
prev_machines.append(node.state)
prev_traces.append(node.transition)
outmsg_list = [str(outmsg) for trace in prev_traces for outmsg in trace.outmsg]
inmsg_list = [str(inmsg) for trace in prev_traces for inmsg in trace.inmsg]
if set(inmsg_list) != set(outmsg_list):
continue
mach_copies = []
for machine, mach_trace in zip(prev_machines, prev_traces):
mach_copies.append(machine.add_trace(mach_trace))
mach_idle = []
# Process idle machines
for machine in list(set(cur_tuple) - set(prev_machines)):
mach_idle.append(machine.add_idle())
system_tupels.append(SystemTuple(tuple(mach_copies + mach_idle)))
return system_tupels
# Reduce permutation start state
@staticmethod
def reduce_permutation_states(tuple_list: List[Tuple[Machine]]):
start_states: List[State] = []
reduced_tuple_list: List[Tuple[Machine]] = []
for system_tuple in tuple_list:
start_state = system_tuple[0].final_state
if start_state not in start_states:
start_states.append(system_tuple[0].final_state)
reduced_tuple_list.append(system_tuple)
return reduced_tuple_list
def build_evict_tree(self, system_tuple_list: List[SystemTuple]) -> List[SystemTuple]:
evicts = []
for arch in self.archs:
for evict in arch.evict_def:
if evict not in evicts:
evicts.append(evict)
# Trace to make evict tree, the list should be ordered in a way that all evictions are anyway leading to previous
# evict states (Single for loop over all states sufficient, but check if all evict traces have been covered
# The init tuple is always safe
assert system_tuple_list, "No system tuple found by model checker"
safe_states = [system_tuple_list[0].get_final_state_set()] + \
[system_tuple.get_final_state_set() for system_tuple in self.allowed_state_tuples.values()]
cur_len = 0
while cur_len < len(safe_states):
cur_len = len(safe_states)
for state_tuple in system_tuple_list:
for access_trace in state_tuple.get_arch_access_trace():
for access in access_trace.access:
if access in evicts:
if state_tuple.get_final_state_set() in safe_states:
if state_tuple.get_start_state_set() not in safe_states:
safe_states.append(state_tuple.get_start_state_set())
# If no interaction with the directory is required, it is also a safe trace as it is atomic
elif not access_trace.outmsg:
if state_tuple.get_start_state_set() in safe_states:
if state_tuple.get_final_state_set() not in safe_states:
safe_states.append(state_tuple.get_final_state_set())
for system_tuple in system_tuple_list:
if system_tuple.get_start_state_set() in safe_states and \
system_tuple.get_final_state_set() in safe_states:
self.allowed_state_tuples[system_tuple] = system_tuple
else:
self.forbidden_state_tuples[system_tuple] = system_tuple
return list(self.allowed_state_tuples.values())
def draw_allowed_system_tuples(self):
self.draw_system_tuples(list(self.allowed_state_tuples.values()))
@staticmethod
def draw_system_tuples(system_tuple_list: List[SystemTuple]):
for system_tuple in system_tuple_list:
system_tuple.get_permutation_machines()
if system_tuple_list:
name = "SystemTupleOutput"
else:
return
graph = Digraph(comment=name, engine='dot')
state_tuples = {}
for state_tuple in system_tuple_list:
tuple_id = (state_tuple.start_state_tuple_str(),
state_tuple.final_state_tuple_str(),
state_tuple.access_state_tuple_str())
state_tuples[tuple_id] = state_tuple
for state_tuple in state_tuples.values():
graph.edge(state_tuple.start_state_tuple_str(),
state_tuple.final_state_tuple_str(),
label=state_tuple.access_state_tuple_str())
graph.render('level_state_tuples/' + name + '.gv', view=True)
| StarcoderdataPython |
3488976 | """
package have crawling stuffs
"""
import logging
import typing
from http.client import responses
import requests
logger = logging.getLogger(__name__)
class CodeForcesHTTPClient(object):
host: str
port: int
lang: str
# generate API key at: https://codeforces.cc/settings/api
# (public, secret)
api_key: (str, str)
def __init__(self, host="http://codeforces.cc", port=80, lang="en", api_key: (str, str) = None):
self.host = host
self.port = port
self.lang = lang
# TODO: verify key pair
self.api_key = api_key
def ping(self) -> bool:
"""Send an simple requests to verify CodeForces server is still up running"""
try:
self.send_request("recentActions", dict(maxCount=1))
return True
except requests.exceptions.RequestException:
return False
def send_request(self, method_name: str, params: dict) -> typing.Any:
"""generic request action"""
url = "{host}:{port}/api/{method}".format(host=self.host, port=self.port, method=method_name)
try:
response = requests.get(url, params=params)
logger.info(f"Send: {url} - {responses.get(response.status_code)}")
if response.status_code == requests.codes.OK:
return response.json().get("result")
elif response.status_code == requests.codes.BAD_REQUEST:
raise requests.exceptions.RequestException(response.json().get("comment"))
else:
logger.debug(f"{url} return: {response.status_code} - {response.text}")
response.raise_for_status()
except requests.exceptions.RequestException as e:
raise e
| StarcoderdataPython |
1634712 | """shader_noise shader function and texture generator
as described in "GPU Gems" chapter 5:
http://http.developer.nvidia.com/GPUGems/gpugems_ch05.html
"""
__version__ = "$Id: shader_noise.py 37 2008-06-27 22:25:39Z casey.duncan $"
from noise import pnoise3
import ctypes
from pyglet.gl import *
class ShaderNoiseTexture:
"""tiling 3D noise texture with two channels for use by the
shader noise functions.
"""
def __init__(self, freq=8, width=32):
"""Generate the 3D noise texture.
freq -- frequency of generated noise over the width of the
texture.
width -- Width of the texture in texels. The texture is cubic,
thus all sides are the same width. Must be a power of two.
Using a larger width can reduce artifacts caused by linear
interpolation of the noise texture, at the cost of video
memory, and possibly slower texture access.
"""
self.freq = freq
self.width = width
scale = float(freq) / width
width2 = width**2
texel = (ctypes.c_ushort * (2 * width**3))()
for z in range(width):
for y in range(width):
for x in range(width):
texel[(x + (y * width) + (z * width2)) * 2] = int((pnoise3(
x * scale, y * scale, z * scale,
repeatx=freq, repeaty=freq, repeatz=freq) + 1.0) * 32767)
texel[(x + (y * width) + (z * width2)) * 2 + 1] = int((pnoise3(
x * scale, y * scale, z * scale,
repeatx=freq, repeaty=freq, repeatz=freq, base=freq + 1) + 1.0) * 32767)
self.data = texel
def load(self):
"""Load the noise texture data into the current texture unit"""
glTexImage3D(GL_TEXTURE_3D, 0, GL_LUMINANCE16_ALPHA16,
self.width, self.width, self.width, 0, GL_LUMINANCE_ALPHA,
GL_UNSIGNED_SHORT, ctypes.byref(self.data))
def enable(self):
"""Convenience method to enable 3D texturing state so the texture may be used by the
ffpnoise shader function
"""
glEnable(GL_TEXTURE_3D)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, GL_REPEAT)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_REPEAT)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_REPEAT)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
shader_noise_glsl = '''
/*
* GLSL Shader functions for fast fake Perlin 3D noise
*
* The required shader_noise_tex texture can be generated using the
* ShaderNoiseTexture class. It is a toroidal tiling 3D texture with each texel
* containing two 16-bit noise source channels. The shader permutes the source
* texture values by combining the channels such that the noise repeats at a
* much larger interval than the input texture.
*/
uniform sampler3D shader_noise_tex;
const float twopi = 3.1415926 * 2.0;
/* Simple perlin noise work-alike */
float
pnoise(vec3 position)
{
vec4 hi = 2.0 * texture3D(shader_noise_tex, position.xyz) - 1.0;
vec4 lo = 2.0 * texture3D(shader_noise_tex, position.xyz / 9.0) - 1.0;
return hi.r * cos(twopi * lo.r) + hi.a * sin(twopi * lo.r);
}
/* Multi-octave fractal brownian motion perlin noise */
float
fbmnoise(vec3 position, int octaves)
{
float m = 1.0;
vec3 p = position;
vec4 hi = vec4(0.0);
/* XXX Loops may not work correctly on all video cards */
for (int x = 0; x < octaves; x++) {
hi += (2.0 * texture3D(shader_noise_tex, p.xyz) - 1.0) * m;
p *= 2.0;
m *= 0.5;
}
vec4 lo = 2.0 * texture3D(shader_noise_tex, position.xyz / 9.0) - 1.0;
return hi.r * cos(twopi * lo.r) + hi.a * sin(twopi * lo.r);
}
/* Multi-octave turbulent noise */
float
fbmturbulence(vec3 position, int octaves)
{
float m = 1.0;
vec3 p = position;
vec4 hi = vec4(0.0);
/* XXX Loops may not work correctly on all video cards */
for (int x = 0; x < octaves; x++) {
hi += abs(2.0 * texture3D(shader_noise_tex, p.xyz) - 1.0) * m;
p *= 2.0;
m *= 0.5;
}
vec4 lo = texture3D(shader_noise_tex, position.xyz / 9.0);
return 2.0 * mix(hi.r, hi.a, cos(twopi * lo.r) * 0.5 + 0.5) - 1.0;
}
'''
if __name__ == '__main__':
# Demo using a simple noise-textured rotating sphere
import shader
win = pyglet.window.Window(width=640, height=640, resizable=True, visible=False)
vert_shader = shader.VertexShader('stupid', '''
/* simple vertex shader that stores the vertex position in a varying
* for easy access by the frag shader
*/
varying vec3 position;
void main(void) {
position = gl_Vertex.xyz * 5.0;
gl_Position = ftransform();
}
''')
frag_shader = shader.FragmentShader('noise_test', shader_noise_glsl + '''
varying vec3 position;
void main(void) {
float v;
float a = atan(position.y, position.x);
float arc = 3.14159 / 3.0;
if (a > -arc && a < arc) {
v = pnoise(position) * 0.5 + 0.5;
} else if (a > arc && a < arc * 4.0) {
v = fbmnoise(position, 4) * 0.5 + 0.5;
} else {
v = fbmturbulence(position, 4) * 0.5 + 0.5;
}
gl_FragColor = vec4(v, v, v, 1.0);
}
''')
shader_prog = shader.ShaderProgram(vert_shader, frag_shader)
shader_prog.install()
tex = ShaderNoiseTexture()
tex.load()
tex.enable()
shader_prog.uset1I('shader_noise_tex', 0)
quadratic = gluNewQuadric()
gluQuadricNormals(quadratic, GLU_SMOOTH)
gluQuadricTexture(quadratic, GL_TRUE)
glEnable(GL_CULL_FACE)
global spin
spin = 0
def on_resize(width, height):
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(70, 1.0*width/height, 0.1, 1000.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
win.on_resize = on_resize
@win.event
def on_draw():
global spin
win.clear()
glLoadIdentity()
glTranslatef(0, 0, -1.5)
glRotatef(spin, 1.0, 1.0, 1.0)
gluSphere(quadratic, 0.65, 60, 60)
def update(dt):
global spin
spin += dt * 10.0
pyglet.clock.schedule_interval(update, 1.0/30.0)
win.set_visible()
pyglet.app.run()
| StarcoderdataPython |
1922097 | from dataclasses import dataclass
from typing import List, Union
import numpy as np
import pytest
from pytest_cases import cases_data, THIS_MODULE
from eddington import (
constant,
exponential,
hyperbolic,
linear,
parabolic,
polynom,
cos,
sin,
straight_power,
inverse_power,
FitFunctionRuntimeError,
FitFunctionLoadError,
FitFunction,
)
@dataclass()
class FittingFunctionTestCase:
func: FitFunction
func_name: str
title: str
n: int
syntax: Union[str, None]
a: np.ndarray
x: np.ndarray
y: List[float]
x_derivatives: List[float]
a_derivatives: List[List[float]]
eps: float = 1e-5
decimal: int = 5
def case_constant():
return FittingFunctionTestCase(
func=constant,
func_name="constant",
title="Constant",
n=1,
syntax="a[0]",
a=np.array([2]),
x=np.arange(5),
y=[2, 2, 2, 2, 2],
x_derivatives=[0, 0, 0, 0, 0],
a_derivatives=[[1], [1], [1], [1], [1]],
)
def case_linear():
return FittingFunctionTestCase(
func=linear,
func_name="linear",
title="Linear",
n=2,
syntax="a[0] + a[1] * x",
a=np.array([-7, 2]),
x=np.arange(5),
y=[-7, -5, -3, -1, 1],
x_derivatives=[2, 2, 2, 2, 2],
a_derivatives=[[1, 0], [1, 1], [1, 2], [1, 3], [1, 4]],
)
def case_polynom_1():
return FittingFunctionTestCase(
func=polynom(1),
func_name="linear",
title="Linear",
n=2,
syntax="a[0] + a[1] * x",
a=np.array([-7, 2]),
x=np.arange(5),
y=[-7, -5, -3, -1, 1],
x_derivatives=[2, 2, 2, 2, 2],
a_derivatives=[[1, 0], [1, 1], [1, 2], [1, 3], [1, 4]],
)
def case_parabolic():
return FittingFunctionTestCase(
func=parabolic,
func_name="parabolic",
title="Parabolic",
n=3,
syntax="a[0] + a[1] * x + a[2] * x ^ 2",
a=np.array([3, 4, -2]),
x=np.arange(5),
y=[3, 5, 3, -3, -13],
x_derivatives=[4, 0, -4, -8, -12],
a_derivatives=[[1, 0, 0], [1, 1, 1], [1, 2, 4], [1, 3, 9], [1, 4, 16]],
)
def case_hyperbolic():
return FittingFunctionTestCase(
func=hyperbolic,
func_name="hyperbolic",
title="Hyperbolic",
n=3,
syntax="a[0] / (x + a[1]) + a[2]",
a=np.array([3, 4, -2]),
x=np.arange(5),
y=[-1.25, -1.4, -1.5, -1.57142, -1.625],
x_derivatives=[-0.1875, -0.12, -0.0833333, -0.0612244, -0.046875],
a_derivatives=[
[0.25, -0.1875, 1],
[0.2, -0.12, 1],
[0.1666666, -0.0833333, 1],
[0.1428571, -0.0612244, 1],
[0.125, -0.046875, 1],
],
)
def case_exponential():
return FittingFunctionTestCase(
func=exponential,
func_name="exponential",
title="Exponential",
syntax="a[0] * exp(a[1] * x) + a[2]",
n=3,
a=np.array([3, 0.5, -1]),
x=np.arange(5),
y=[2, 3.94616, 7.15484, 12.44506, 21.16716],
x_derivatives=[1.5, 2.47308, 4.07742, 6.72253, 11.08358],
a_derivatives=[
[1, 0, 1],
[1.64872, 4.94616, 1],
[2.71828, 16.30969, 1],
[4.48169, 40.3352, 1],
[7.38906, 88.66867, 1],
],
)
def case_cos():
return FittingFunctionTestCase(
func=cos,
func_name="cos",
title="Cos",
syntax="a[0] * cos(a[1] * x + a[2]) + a[3]",
n=4,
a=np.array([3, 0.5 * np.pi, 0.25 * np.pi, 2]),
x=np.arange(5),
y=[4.12132, -0.12132, -0.12132, 4.12132, 4.12132],
x_derivatives=[-3.33216, -3.33216, 3.33216, 3.33216, -3.33216],
a_derivatives=[
[0.70711, -0, -2.12132, 1],
[-0.70711, -2.12132, -2.12132, 1],
[-0.70711, 4.24264, 2.12132, 1],
[0.70711, 6.36396, 2.12132, 1],
[0.70711, -8.48528, -2.12132, 1],
],
)
def case_sin():
return FittingFunctionTestCase(
func=sin,
func_name="sin",
title="Sin",
syntax="a[0] * sin(a[1] * x + a[2]) + a[3]",
n=4,
a=np.array([3, 0.5 * np.pi, 0.25 * np.pi, 2]),
x=np.arange(5),
y=[4.12132, 4.12132, -0.12132, -0.12132, 4.12132],
x_derivatives=[3.33216, -3.33216, -3.33216, 3.33216, 3.33216],
a_derivatives=[
[0.70711, -0, 2.12132, 1],
[0.70711, -2.12132, -2.12132, 1],
[-0.70711, -4.24264, -2.12132, 1],
[-0.70711, 6.36396, 2.12132, 1],
[0.70711, 8.48528, 2.12132, 1],
],
)
def case_polynom_3():
return FittingFunctionTestCase(
func=polynom(3),
func_name="polynom_3",
title="Polynom 3",
syntax="a[0] + a[1] * x + a[2] * x ^ 2 + a[3] * x ^ 3",
n=4,
a=np.array([3, 4, -2, 1]),
x=np.arange(5),
y=[3, 6, 11, 24, 51],
x_derivatives=[4, 3, 8, 19, 36],
a_derivatives=[
[1, 0, 0, 0],
[1, 1, 1, 1],
[1, 2, 4, 8],
[1, 3, 9, 27],
[1, 4, 16, 64],
],
)
def case_straight_power_2():
return FittingFunctionTestCase(
func=straight_power,
func_name="straight_power",
title="Straight Power",
syntax=None,
n=4,
a=np.array([2, 1, 2, -3]),
x=np.arange(5),
y=[-1, 5, 15, 29, 47],
x_derivatives=[4, 8, 12, 16, 20],
a_derivatives=[
[1, 4, 0, 1],
[4, 8, 5.54518, 1],
[9, 12, 19.77502, 1],
[16, 16, 44.36142, 1],
[25, 20, 80.4719, 1],
],
)
def case_straight_power_3():
return FittingFunctionTestCase(
func=straight_power,
func_name="straight_power",
title="Straight Power",
syntax=None,
n=4,
a=np.array([2, 1, 3, -3]),
x=np.arange(5),
y=[-1, 13, 51, 125, 247],
x_derivatives=[6, 24, 54, 96, 150],
a_derivatives=[
[1, 6, 0, 1],
[8, 24, 11.09035, 1],
[27, 54, 59.32506, 1],
[64, 96, 177.44568, 1],
[125, 150, 402.35948, 1],
],
)
def case_inverse_power_2():
return FittingFunctionTestCase(
func=inverse_power,
func_name="inverse_power",
title="Inverse Power",
syntax=None,
n=4,
a=np.array([2, 1, 2, -3]),
x=np.arange(5),
y=[-1, -2.5, -2.77777, -2.875, -2.92],
x_derivatives=[-4, -0.5, -0.1481481, -0.0625, -0.032],
a_derivatives=[
[1, -4, 0, 1],
[0.25, -0.5, -5.54518, 1],
[0.1111111, -0.1481481, -19.77502, 1],
[0.0625, -0.0625, -44.36142, 1],
[0.04, -0.032, -80.4719, 1],
],
)
def assert_raises_unfit_parameters(case, n0):
with pytest.raises(
FitFunctionRuntimeError, match=f"^Input length should be {case.n}, got {n0}$"
):
case.func(np.random.random(n0), np.random.random())
@cases_data(module=THIS_MODULE)
def test_number_of_parameters(case_data):
case = case_data.get()
assert case.n == case.func.n, "Func gets unexpected number of parameters"
if case.n > 1:
assert_raises_unfit_parameters(case, case.n - 1)
assert_raises_unfit_parameters(case, case.n + 1)
@cases_data(module=THIS_MODULE)
def test_name(case_data):
case = case_data.get()
assert case.func_name == case.func.name, "Func name is different than expected"
@cases_data(module=THIS_MODULE)
def test_title_name(case_data):
case = case_data.get()
assert (
case.title == case.func.title_name
), "Func title name is different than expected"
@cases_data(module=THIS_MODULE)
def test_signature(case_data):
case = case_data.get()
assert (
case.func_name == case.func.signature
), "Func signature is different than expected"
@cases_data(module=THIS_MODULE)
def test_syntax(case_data):
case = case_data.get()
assert case.syntax == case.func.syntax, "Func syntax is different than expected"
@cases_data(module=THIS_MODULE)
def test_assign(case_data):
case = case_data.get()
assigned_func = case.func.assign(case.a)
for i, (x_val, y_val) in enumerate(zip(case.x, case.y), start=1):
assert y_val == pytest.approx(assigned_func(x_val), rel=case.eps), (
"Y value is different than expected in assigned function "
f"for the {i} value"
)
case.func.clear_fixed()
@cases_data(module=THIS_MODULE)
def test_execute_on_single_value(case_data):
case = case_data.get()
for x_val, y_val in zip(case.x, case.y):
assert y_val == pytest.approx(
case.func(case.a, x_val), rel=case.eps
), "Y value is different than expected in called function"
@cases_data(module=THIS_MODULE)
def test_execute_on_array(case_data): # pylint: disable=W0613
case = case_data.get()
y_array_calculation = case.func(case.a, case.x)
assert y_array_calculation == pytest.approx(
case.y, rel=case.eps
), "Y value is different than expected in array function"
@cases_data(module=THIS_MODULE)
def test_execute_x_derivative_on_single_value(case_data):
case = case_data.get()
for x_val, x_derivative in zip(case.x, case.x_derivatives):
assert x_derivative == pytest.approx(
case.func.x_derivative(case.a, x_val), rel=case.eps
), f"X derivative of ({case.a}, {x_val}) is different than expected"
@cases_data(module=THIS_MODULE)
def test_execute_x_derivative_on_array(case_data): # pylint: disable=W0613
case = case_data.get()
x_derivative_array_calculation = case.func.x_derivative(case.a, case.x)
assert x_derivative_array_calculation == pytest.approx(
case.x_derivatives, rel=case.eps
), "Array calculation of x derivative is different than expected"
@cases_data(module=THIS_MODULE)
def test_execute_a_derivative_on_single_value(case_data): # pylint: disable=W0613
case = case_data.get()
for i, (x_val, a_derivative) in enumerate(zip(case.x, case.a_derivatives), start=1):
assert a_derivative == pytest.approx(
case.func.a_derivative(case.a, x_val), rel=case.eps
), f"A derivative is different than expected on value {i}"
@cases_data(module=THIS_MODULE)
def test_execute_a_derivative_on_array(case_data): # pylint: disable=W0613
case = case_data.get()
a_derivative_array_calculation = case.func.a_derivative(case.a, case.x)
for i, (expected_a_derivative, actual_a_derivative) in enumerate(
zip(a_derivative_array_calculation.T, case.a_derivatives), start=1
):
assert np.array(expected_a_derivative) == pytest.approx(
np.array(actual_a_derivative), rel=case.eps
), (
"Array calculation of a derivative is different than expected "
f"on value {i}"
)
def test_initialize_polynom_with_0_degree_raises_error():
with pytest.raises(FitFunctionLoadError, match="^n must be positive, got 0$"):
polynom(0)
def test_initialize_polynom_with_negative_degree_raises_error():
with pytest.raises(FitFunctionLoadError, match="^n must be positive, got -1$"):
polynom(-1)
| StarcoderdataPython |
4848343 | # basic of simple calculator app
# you only calculate between two numbers
# the operation list are : +, -, *, /, and %
# the ">" for adding a new number
# set the global variable for store the current total
subtotal = 0
total = 0
# error handling for input
def error_handling(int_type, float_type1, float_type2):
# check if type of input is correct
try:
int(int_type)
float(float_type1)
float(float_type2)
except ValueError:
print("You enter characters or symbols or selected the wrong operation.\nPlease try again.")
main()
# exit the program and then get the total calculation
def break_the_code():
global subtotal, total
total = subtotal
print("Total calculation is {}.\nThanks for using this calculator.".format(total))
exit()
# list of operation that you can used
def operation_list():
print("Enter the two numbers first and then select the operation.")
print("1. add")
print("2. subtract")
print("3. multiply")
print("4. divide")
print("5. modulus")
print("6. exit")
def additional_operation(x):
print("Current total: {}".format(x))
new_number = float(input("> "))
if new_number == 6:
break_the_code()
else:
operation_action(x, new_number)
def operation_action(number_one, number_two):
operation = input("Select the operation: ")
error_handling(operation, 0, 0)
if operation == "1":
add_numbers(number_one, number_two)
elif operation == "2":
subtract_numbers(number_one, number_two)
elif operation == "3":
multiply_numbers(number_one, number_two)
elif operation == "4":
divide_numbers(number_one, number_two)
elif operation == "5":
modulus_numbers(number_one, number_two)
elif operation == "6":
break_the_code()
def add_numbers(number_one, number_two):
current = number_one + number_two
global subtotal
subtotal = current
additional_operation(subtotal)
def subtract_numbers(number_one, number_two):
current = number_one - number_two
global subtotal
subtotal = current
additional_operation(subtotal)
def multiply_numbers(number_one, number_two):
current = number_one * number_two
global subtotal
subtotal = current
additional_operation(subtotal)
def divide_numbers(number_one, number_two):
current = number_one / number_two
global subtotal
subtotal = current
additional_operation(subtotal)
def modulus_numbers(number_one, number_two):
current = number_one % number_two
global subtotal
subtotal = current
additional_operation(subtotal)
def main():
operation_list()
number_one = input("> ")
number_two = input("> ")
error_handling(0, number_one, number_two)
# convert the number to float
number_one = float(number_one)
number_two = float(number_two)
operation_action(number_one, number_two)
main()
| StarcoderdataPython |
258858 | <gh_stars>1-10
import json
import os
from pathlib import Path
from collections import Mapping
from abc import abstractmethod
import moodle.models as models
# TODO, mebbe add locks for async usage.
def _read_json(filename):
with open(filename) as file:
return json.load(file)
def _dump_json(filename, data):
with open(filename, 'w') as file:
json.dump(data, file, indent=2, ensure_ascii=False, sort_keys=True)
class CachedMapping(Mapping):
def __init__(self):
self._cache = {}
def __getitem__(self, key):
# return value if in cache
try:
return self._cache[key]
except KeyError:
pass
# try reading from resource.
self._cache[key] = self._read_data(key)
return self._cache[key]
@abstractmethod
def _read_data(self, key):
pass
class CachedFileMapping(Mapping): # TODO: WIP
def __init__(self, file_path):
self._cache = None
self.path = file_path
def __iter__(self):
if self._cache is None:
self._cache = self._read_file(self.path)
return iter(self._cache)
def __getitem__(self, key):
if self._cache is None:
self._cache = self._read_file(self.path)
return self._cache[key]
def __len__(self):
if self._cache is None:
self._cache = self._read_file(self.path)
return len(self._cache)
@abstractmethod
def _read_file(self, file_path):
return {}
class CachedJsonFile(CachedFileMapping):
def _read_file(self, file_path):
return _read_json(file_path)
class JsonDataFolder(CachedMapping):
def __init__(self, root_folder: Path, init=False):
super().__init__()
self._folder = root_folder / self.folder_name
if init:
self._folder.mkdir(exist_ok=True)
def _read_data(self, key): # CachedMapping
filename = self._folder / str(key)
try:
return _read_json(filename)
except FileNotFoundError:
raise KeyError(key)
def _write_data(self, key, value): # CachedMutableMapping
filename = self._folder / str(key)
_dump_json(filename, value)
def _setitem(self, key, value):
self._cache[key] = value
self._write_data(key, value)
def __iter__(self):
for file in self._folder.iterdir():
yield int(file.name)
def __len__(self):
return len(list(self.__iter__()))
@property
@abstractmethod
def folder_name(self):
return 'folder_name'
class JsonMetaDataFolder(JsonDataFolder):
_meta_file_suffix = '_meta'
def __init__(self, root_folder: Path, init=False):
super().__init__(root_folder, init)
self._meta_file_path = root_folder / (self.folder_name + self._meta_file_suffix)
self._read_meta()
def _read_meta(self):
filename = self._meta_file_path
try:
meta = _read_json(filename)
for k, v in meta.items():
setattr(self, k, v)
except FileNotFoundError:
pass
def _write_meta(self):
meta = {k: v for k, v in vars(self).items() if not k.startswith('_')}
_dump_json(self._meta_file_path, meta)
def __iter__(self):
for file in self._folder.iterdir():
yield int(file.name)
def __len__(self):
return len(set(self._folder.iterdir()))
@property
@abstractmethod
def folder_name(self):
return 'folder_name'
class AssignmentFolder(JsonDataFolder):
@property
def folder_name(self):
return 'assignments'
def update(self, json_data):
response = models.CourseAssignmentResponse(json_data)
result = dict.fromkeys(['new', 'updated', 'unchanged'], 0)
for course in response.courses:
for assignment in course.assignments:
key = assignment.id
value = assignment
try:
local_data = models.MoodleAssignment(self[key])
if local_data.time_modified < assignment.time_modified:
self._setitem(assignment.id, assignment.raw)
result['updated'] += 1
else:
result['unchanged'] += 1
except KeyError:
self._setitem(assignment.id, assignment.raw)
result['new'] += 1
return result
class SubmissionFolder(JsonMetaDataFolder):
@property
def folder_name(self):
return 'submissions'
last_sync = 0
def _update_submissions(self, assignment_id, submissions):
local_list = models.MoodleSubmissionList(self[assignment_id])
local_submissions = {sub.id: sub for sub in local_list}
for submission in submissions:
local_submissions[submission.id] = submission
raw = [sub.raw for sub in local_submissions.values()]
self._setitem(assignment_id, raw)
def update(self, json_data, time_of_sync):
result = dict.fromkeys(['new', 'updated', 'unchanged'], 0)
response = models.AssignmentSubmissionResponse(json_data)
for assignment in response.assignments:
if assignment.id in self and len(assignment.submissions) > 0:
self._update_submissions(assignment.id, assignment.submissions)
result['updated'] += 1
elif len(assignment.submissions) > 0:
result['new'] += 1
self._setitem(assignment.id, assignment.submissions.raw)
else:
result['unchanged'] += 1
self.last_sync = time_of_sync
self._write_meta()
return result
class GradeFolder(JsonMetaDataFolder):
@property
def folder_name(self):
return 'grades'
last_sync = 0
def _update_grades(self, assignment_id, grades):
local_list = models.MoodleGradeList(self[assignment_id])
local_grades = {grd.id: grd for grd in local_list}
# local_grades = {grade[Jn.id]: grade for grade in self[assignment_id]}
for grade in grades:
local_grades[grade.id] = grade
raw = [grd.raw for grd in local_grades.values()]
self._setitem(assignment_id, raw)
def update(self, json_data, time_of_sync):
# g_config_file = self.grade_meta + str(assignment[Jn.assignment_id])
# self._write_meta(g_config_file, assignment)
response = models.AssignmentGradeResponse(json_data)
result = dict.fromkeys(['new', 'updated', 'unchanged'], 0)
for assignment in response.assignments:
if assignment.id in self and len(assignment.grades) > 0:
self._update_grades(assignment.id, assignment.grades)
result['updated'] += 1
elif len(assignment.grades) > 0:
self._setitem(assignment.id, assignment.grades.raw)
result['new'] += 1
else:
result['unchanged'] += 1
self.last_sync = time_of_sync
self._write_meta()
return result
class Config(models.JsonDictWrapper):
error_msg = """
'{}' couldn't be found in your config file.
Maybe it's corrupted.
Either check your config file
or delete the entire file and create a new one.
"""
@property
def service(self): return self['service']
@property
def token(self):
try:
return self['token']
except KeyError:
raise SystemExit(self.error_msg.format('token'))
@property
def user_id(self):
try:
return self['user_id']
except KeyError:
raise SystemExit(self.error_msg.format('user_id'))
@property
def url(self):
try:
return self['url']
except KeyError:
raise SystemExit(self.error_msg.format('url'))
@property
def user_name(self): return self['user_name']
def add_overrides(self, overrides):
self._data.update(overrides)
def __str__(self):
return str(self._data)
class MdtConfig:
_file_name = 'config'
@classmethod
def global_config_locations(cls):
locations = []
try:
locations.append(os.environ['XDG_CONFIG_HOME'] + '/mdtconfig')
except KeyError:
pass
locations.append(os.path.expanduser('~/.config/mdtconfig'))
locations.append(os.path.expanduser('~/.mdtconfig'))
return locations
def __init__(self, meta_root=None, prefer_local=False, init=False):
self.prefer_local = prefer_local
self.global_cfg = self.read_global(init)
if meta_root:
self.local_cfg = {}
def read_global(self, init):
locations = self.global_config_locations()
for file_name in locations:
try:
with open(file_name) as file:
return Config(json.load(file))
except FileNotFoundError:
pass
if not self.prefer_local or init:
text = 'could not find global config, creating {}'
print(text.format(locations[0]))
with open(locations[0], 'w') as cfg_file:
cfg_file.write('{}')
return {}
return None
| StarcoderdataPython |
1972500 | <gh_stars>10-100
import unittest
import types
from reverso_api.context import *
# TODO: refactor
class TestReversoContextAPI(unittest.TestCase):
"""TestCase for ReversoContextAPI
Includes tests for:
-- .get_examples()
-- .get_translations()
"""
api = ReversoContextAPI(source_text="Github",
source_lang="en",
target_lang="ru")
def test__properties(self):
"""Tests the ReversoContextAPI properties:
-- supported_langs
-- source_text
-- target_text
-- source_lang
-- target_lang
-- total_pages
"""
pass
def test__eq(self):
"""
Tests the equality of ReversoContextAPI instances (ReversoContextAPI.__eq__).
-- tests the equality of instances with different source text
-- tests the equality of instances with the same attributes after .__data["npages"] was modified
"""
api2 = ReversoContextAPI(source_text="hello",
source_lang="en",
target_lang="ru")
api3 = ReversoContextAPI(source_text="Github",
source_lang="en",
target_lang="ru")
self.assertFalse(api2 == api3)
self.assertTrue(self.api == api3)
# .__data["npages"] was modified in both self.api and api3
next(self.api.get_examples())
next(api3.get_examples())
self.assertTrue(self.api == api3)
def test__get_examples(self):
"""Tests the ReversoContextAPI.get_examples() method.
-- tests the correctness of types
-- tests attributes of related classes (WordUsageContext)
-- tests the length of examples: must be 2 (one for source, and one for target text)
-- tests the length of pairs of indexes (items of the context.highlighted)
-- tests if 0 <= index <= len(context.text) is True for all indexes
"""
examples = self.api.get_examples()
self.assertTrue(isinstance(examples, types.GeneratorType))
for example in examples:
self.assertTrue(isinstance(example, tuple))
self.assertTrue(len(example) == 2)
for context in example:
# Tests the WordUsageContext class
self.assertTrue(isinstance(context, WordUsageContext))
for attr in ("text", "highlighted"):
self.assertTrue(hasattr(context, attr))
self.assertTrue(isinstance(context.text, str))
self.assertTrue(isinstance(context.highlighted, tuple))
for indexes in context.highlighted:
self.assertTrue(isinstance(indexes, tuple))
self.assertTrue(len(indexes) == 2)
for index in indexes:
self.assertTrue(isinstance(index, int))
self.assertTrue(0 <= index <= len(context.text))
def test__get_translations(self):
"""Tests the ReversoContextAPI.get_translations()
-- tests the correctness of types
-- tests attributes of related classes (Translation, InflectedForm)
"""
translations = self.api.get_translations()
self.assertTrue(isinstance(translations, types.GeneratorType))
for translation in translations:
self.assertTrue(isinstance(translation, Translation))
self.assertTrue(len(translation) == 5)
# Tests the Translation class
for attr in ("source_word", "translation",
"frequency", "part_of_speech",
"inflected_forms"):
self.assertTrue(hasattr(Translation, attr))
self.assertTrue(translation.source_word == self.api.source_text)
self.assertTrue(isinstance(translation.translation, str))
self.assertTrue(isinstance(translation.frequency, int))
self.assertTrue(isinstance(translation.part_of_speech, str) \
or translation.part_of_speech is None)
self.assertTrue(isinstance(translation.inflected_forms, tuple))
# Tests the InflectedForms class
for inflected_form in translation.inflected_forms:
self.assertTrue(isinstance(inflected_form, InflectedForm))
for attr in ("translation", "frequency"):
self.assertTrue(hasattr(inflected_form, attr))
self.assertTrue(isinstance(inflected_form.translation, str))
self.assertTrue(isinstance(inflected_form.frequency, int))
| StarcoderdataPython |
382345 | from concurrent.futures import ThreadPoolExecutor
import time
import requests
def fetch(a,const):
url = 'http://httpbin.org/get?a={0}'.format(a)
r = requests.get(url)
result = r.json()['args']
return (result,const)
start = time.time()
# if max_workers is None or not given, it will default to the number of processors, multiplied by 5
with ThreadPoolExecutor(max_workers=None) as executor:
results = executor.submit(fetch, range(42),test='aylmao',timeout=None,chunksize=1)
print(results)
print('time: {0}'.format(time.time() - start)) | StarcoderdataPython |
387833 | from indice_pollution.extensions import db
from importlib import import_module
from indice_pollution.extensions import cache
class Zone(db.Model):
__table_args__ = {"schema": "indice_schema"}
id = db.Column(db.Integer, primary_key=True)
type = db.Column(db.String)
code = db.Column(db.String)
libtypes = {
"region": {"article": "la ", "preposition": "région", "module": "region", "clsname": "Region"},
"epci": {"article": "l’", "preposition": "EPCI" , "module": "epci", "clsname": "EPCI"},
"departement": {"article": "le ", "preposition": "département", "module": "departement", "clsname": "Departement"},
"bassin_dair": {"article": "le ", "preposition": "bassin d’air", "module": "bassin_dair", "clsname": "BassinDAir"},
"commune": {"article": "la ", "preposition": "commune", "module": "commune", "clsname": "Commune"},
}
@classmethod
def get(cls, code, type_):
return Zone.query.filter_by(code=code, type=type_).first()
@property
@cache.memoize(timeout=0)
def lib(self, with_preposition=True, with_article=True, nom_charniere=True):
t = self.libtypes.get(self.type)
if not t:
return ""
o = self.attached_obj
if not o:
return ""
r = ""
if with_preposition:
if with_article:
r = t["article"]
r += t["preposition"] + " "
if nom_charniere and hasattr(o, 'nom_charniere'):
return r + o.nom_charniere
if hasattr(o, "preposition"):
r += (o.preposition or "") + " "
r += o.nom or ""
return r
@property
@cache.memoize(timeout=0)
def attached_obj(self, with_preposition=True, with_article=True):
t = self.libtypes.get(self.type)
if not t:
return None
m = import_module(f"indice_pollution.history.models.{t['module']}")
c = getattr(m, t["clsname"])
return db.session.query(c).filter(c.zone_id == self.id).first() | StarcoderdataPython |
8147483 | <filename>econtent.py
# MIT License
# Copyright (c) 2016-2021 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import re
import datetime
def read(input):
obj = { }
body = []
index = 0
section_data = None
content = {}
format_content = None
for line in input.split('\n'):
if len(line) == 0:
body.append(line)
continue
line_handled = False
if line.startswith('@@'):
if line.startswith('@@begin|'):
line_handled = True
sr = re.search("@@begin\|(?P<type>[0-9a-zA-Z_]+)\:(?P<code>[0-9a-zA-Z_]+)@@", line)
if sr != None:
type = sr.group(1)
code = sr.group(2)
content[index] = '\n'.join(body).strip("\n")
index = index + 1
body = []
section_data = { 'type': type, 'code': code }
format_index = 0
format_content = {}
elif section_data is not None and line.startswith('@@'):
line_handled = True
if line == '@@end@@':
if format_content is None:
content[index] = {
section_data['type']: section_data['code'],
'_': '\n'.join(body).strip("\n")
}
else:
format_content[format_index] = {
section_data['type']: section_data['code'],
'_': '\n'.join(body).strip("\n")
}
content[index] = format_content
index = index + 1
body = []
section_data = None
format_content = None
else:
line_handled = True
sr = re.search("^@@(?P<type>[0-9a-zA-Z_]+)\:(?P<code>[0-9a-zA-Z_]+)@@", line)
if sr != None:
type = sr.group(1)
code = sr.group(2)
if format_content is None:
format_content = {}
format_index = 0
format_content[format_index] = {
section_data['type']: section_data['code'],
'_': '\n'.join(body).strip("\n")
}
section_data = { 'type': type, 'code': code }
format_index = format_index + 1
body = []
if not line_handled:
# this allows for @@asdfasdf@@ to start a line; meaning @@footnote@@ can start a line
body.append(line)
elif line[0] == '@':
sr = re.search("^@(?P<type>[0-9a-zA-Z_\|]+)@(?P<content>.*)", line)
if sr != None:
tag_type = sr.group(1)
tag_content = sr.group(2).strip()
if '|' in tag_type:
(bar_left, bar_right) = tag_type.split('|', 1)
if bar_left not in obj:
obj[bar_left] = {}
obj[bar_left][bar_right] = tag_content
else:
#+ don't save most stuff with prefix; it's my universal code for disabled (or system)
#+ it's VERY common to overwrite _created and _modified (since they are often killed
#+ when they go across FTP; but you can't mess with immutable stuff (e.g. filename)
if not tag_type.startswith('_') or tag_type in ('_created', '_modified'):
obj[tag_type] = tag_content
else:
sr = re.search("@@(?P<type>[0-9a-zA-Z_]+)\|(?P<code>[0-9a-zA-Z_]+)@@", line)
if sr != None:
type = sr.group(1)
code = sr.group(2)
##+ don't really do anything; just good to know about
body.append(line)
if len(body) > 0:
content[index] = '\n'.join(body).strip("\n")
obj['_'] = content
return obj
def read_file(path):
with open(path, 'r') as f:
obj = read(f.read())
file_data = os.stat(path)
#+ due to a file system design flaw, not all file systems have a file created date
if '_created' not in obj:
obj['_created'] = datetime.datetime.fromtimestamp(file_data.st_ctime).replace(microsecond=0).isoformat() + 'Z'
if '_modified' not in obj:
obj['_modified'] = datetime.datetime.fromtimestamp(file_data.st_mtime).replace(microsecond=0).isoformat() + 'Z'
obj['_filename'] = os.path.basename(path)
part_array = os.path.splitext(obj['_filename'])
if len(part_array[1]) == 0:
obj['_extension'] = part_array[0][1:]
obj['_basename'] = ''
else:
obj['_extension'] = part_array[1] if part_array[1][0] != '.' else part_array[1][1:]
obj['_basename'] = part_array[0]
return obj | StarcoderdataPython |
5025538 | import os
from django.conf import settings
from django.contrib.staticfiles.finders import BaseFinder, AppDirectoriesFinder
from django.contrib.staticfiles.storage import AppStaticStorage
from django.core.files.storage import FileSystemStorage
from django.utils._os import safe_join
class AppMediaStorage(AppStaticStorage):
source_dir = 'media'
class MediaFinder(AppDirectoriesFinder):
storage_class = AppMediaStorage
class MediaRootFinder(BaseFinder):
"""
Since the static files runserver can not find media definitions, it is now
added by this finder. This way you don't have to define anything in urls.py
to make django server both static and media files.
"""
def find(self, path, all=False):
"""
Looks for files in the MEDIA_ROOT
"""
media_prefix = settings.MEDIA_URL.replace(settings.STATIC_URL, '')
if path.startswith(media_prefix):
location = safe_join(settings.STATIC_ROOT, path)
if os.path.exists(location):
if not all:
return location
return [location]
return []
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
yield settings.MEDIA_ROOT, FileSystemStorage()
| StarcoderdataPython |
4950764 | <reponame>broaddeep/gdparser
from setuptools import find_packages, setup
with open("README.md", "r") as fh:
long_description = fh.read()
package_name = 'gdparser'
version = '0.0.2'
description = "Google Docstring Parser"
url = "https://github.com/broaddeep/gdparser.git"
setup(
name=package_name,
version=version,
author="broaddeep",
author_email="<EMAIL>",
description=description,
long_description=long_description,
long_description_content_type="text/markdown",
keywords='parser docstring google text',
license='Apache',
url=url,
packages=find_packages(),
install_requires=[],
package_data={package_name: ['*', '*/*', '*/*/*']},
python_requires='>=3.6.0',
classifiers=[
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
],
) | StarcoderdataPython |
11212753 | # 1910. <NAME>: сокрытый вход
# solved
sections_num = int(input())
sections_list = input().split(' ')
max_power_sum = 0
max_power_mid_num = 0
mid_num = 0
for i in range(len(sections_list) - 2):
power_sum = 0
for j in range(3):
power_sum = power_sum + int(sections_list[i+j])
if j == 1:
mid_num = i+j + 1
if power_sum > max_power_sum:
max_power_sum = power_sum
max_power_mid_num = mid_num
print(str(max_power_sum) + ' ' + str(max_power_mid_num)) | StarcoderdataPython |
8118991 | <filename>deluca/lung/utils/__init__.py<gh_stars>1-10
from deluca.lung.utils.core import BreathWaveform
from deluca.lung.utils.data.analyzer import Analyzer
from deluca.lung.utils.data.munger import Munger
# from deluca.lung.utils.data.featurizer import Featurizer
# from deluca.lung.utils.data.featurizer import ScalingHistoryFeaturizer
# from deluca.lung.utils.data.featurizer import TriangleErrorFeaturizer
from deluca.lung.utils.sim.nn import SNN
from deluca.lung.utils.sim.nn import ShallowBoundaryModel
from deluca.lung.utils.sim.nn import ConstantModel
from deluca.lung.utils.sim.nn_jax import InspiratoryModel_jax
from deluca.lung.utils.sim.testing import open_loop_test
# from deluca.lung.utils.scripts.run_calibration import run_calibration
from deluca.lung.utils.scripts.save_data_and_plot import save_data_and_plot
from deluca.lung.utils.scripts.run_controller import run_controller, run_controller_scan
from deluca.lung.utils.scripts.find_best_pid import find_best_pid, find_global_best_pid, plot_pid
# from deluca.lung.utils.scripts.run_explorer import run_explorer
# from deluca.lung.utils.scripts.run_pid_grid import run_pid_grid
from deluca.lung.utils.scripts.train_controller import train_controller, train_controller_multipip
from deluca.lung.utils.scripts.train_simulator import train_simulator
from deluca.lung.utils.scripts.convert_venti import convert_sim
__all__ = [
"BreathWaveform",
"Analyzer",
"Munger",
# "Featurizer",
# "ScalingHistoryFeaturizer",
# "TriangleErrorFeaturizer",
"SNN",
"ShallowBoundaryModel",
"ConstantModel",
"InspiratoryModel_jax",
"open_loop_test",
# "run_calibration",
"save_data_and_plot",
"run_controller",
"find_best_pid",
"find_global_best_pid",
"plot_pid",
# "run_explorer",
# "run_pid_grid",
"train_controller",
"train_simulator",
"convert_sim",
"train_controller_multipip",
]
| StarcoderdataPython |
3274466 | # Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree.
from ..remote_objects import RemoteObject
class MockRemoteObject(RemoteObject):
def __init__(self):
self._properties = {'result': []}
self._serialized_value = {'type': 'undefined'}
@property
def serialized_value(self):
return self._serialized_value
@serialized_value.setter
def serialized_value(self, value):
self._serialized_value = value
@property
def properties(self):
return self._properties
@properties.setter
def properties(self, properties):
self._properties = properties
| StarcoderdataPython |
3370398 | <gh_stars>0
from lib.imports.default import *
import lib.settings.templates.parse_cursor as parse_cursor
def call(**kwargs):
manager = Manager()
db = manager.db('webplatform')
cursor = db.settings_templates.find()
output = [template for template in cursor]
return [parse_cursor.call(template) for template in output]
| StarcoderdataPython |
4902862 | from ipaddress import IPv4Address
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from sfdo_template_helpers.addresses import get_remote_ip
class AdminRestrictMiddleware:
"""
A middleware that restricts all access to the admin prefix to allowed IPs.
"""
def __init__(self, get_response):
self.get_response = get_response
self.ip_ranges = settings.ADMIN_API_ALLOWED_SUBNETS
def __call__(self, request):
restricted_areas = tuple(getattr(settings, "RESTRICTED_PREFIXES", ()))
unrestricted_areas = tuple(getattr(settings, "UNRESTRICTED_PREFIXES", ()))
admin_area = getattr(settings, "ADMIN_AREA_PREFIX", None)
if admin_area is not None:
restricted_areas += (admin_area,)
for area in unrestricted_areas:
area = area.rstrip("/")
if request.path.startswith(f"/{area}"):
return self.get_response(request)
for area in restricted_areas:
area = area.rstrip("/")
if request.path.startswith(f"/{area}"):
self._validate_ip(request)
return self.get_response(request)
def _validate_ip(self, request):
ip_str = get_remote_ip(request)
ip_addr = IPv4Address(ip_str)
if not any(ip_addr in subnet for subnet in self.ip_ranges):
raise SuspiciousOperation(f"Disallowed IP address: {ip_addr}")
| StarcoderdataPython |
4931754 | import matplotlib.pyplot as plt
in_path = "../res/terminal_freq.csv"
with open(in_path) as f:
data = f.read()
data = [int(i) for i in data.split(",")]
labels = [chr(i+97) for i in range(26)]
ticks = range(26)
plt.bar(ticks, data, align="center")
plt.xticks(ticks, labels)
plt.title("Terminal frequencies")
plt.show() | StarcoderdataPython |
3287770 | <gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
import math
import statistics
from scipy import stats
infilename = "RTK_DATA.txt"
outfilename = "new_plot_data.txt"
lines_seen = set() # holds lines already seen
outfile = open(outfilename, "w")
for line in open(infilename, "r"):
if line not in lines_seen: # not a duplicate
outfile.write(line)
lines_seen.add(line)
outfile.close()
lines = []
with open(outfilename) as f:
lines = f.readlines()
parsed_lines = []
for line in lines:
parsed_lines.append(line.split('\t'))
deg = []
vec_x = []
vec_y = []
vec_z = []
for line in parsed_lines:
deg.append(float(line[8]))
vec_x.append(float(line[9]))
vec_y.append(float(line[10]))
vec_z.append(float(line[11]))
# print(line)
v_lenth = []
v_angle = []
# print(vec_x)
for i in range( len(vec_x) ):
v_lenth.append( math.sqrt( pow(vec_x[i],2) + pow(vec_y[i],2) + pow(vec_z[i],2) ) )
v_angle.append(360 - (math.degrees(math.atan2(vec_x[i], vec_y[i])) - 90)%360)
len_m = statistics.mean(v_lenth)
len_std = statistics.stdev(v_lenth)
# expected = stats.norm.ppf(0.95, loc = len_m)
x_exp = np.linspace(min(v_lenth), max(v_lenth), len(v_lenth))
x_deg = np.linspace(min(deg), max(deg), len(v_lenth))
# rv = stats.norm.pdf(x_exp, loc=len_m)
rv = np.random.normal(len_m, 0.0022, len(v_lenth))
# print(rv)
len_chi_2, len_p_2 = stats.chisquare(v_lenth, rv)
len_chi, len_p = stats.normaltest(v_lenth)
print("len_chi = {}, len_p = {}".format(len_chi, len_p))
ang_m = statistics.mean(deg)
ang_std = statistics.stdev(deg)
# ang_chi = stats.chisquare(deg)
ang_chi, ang_p = stats.normaltest(deg)
print("ang_chi = {}, ang_p = {}".format(ang_chi, ang_p))
print("Sample size = {}".format(len(v_lenth)))
"""Chi-squared test for normal destribution"""
fr_num = 18
bins = [-float('inf')]
for i in range(fr_num - 1):
p = (i+1)/fr_num; z = stats.norm.ppf(p); bound = len_m + z*len_std
print(p, '{:6.4f}'.format(z), '{:6.4f}'.format(bound))
bins.append(bound)
bins.append(float('inf'))
frequency = []
for i in range(fr_num):
observed, expected = sum(num >= bins[i] and num < bins[i+1] for num in v_lenth), (len(v_lenth)/( fr_num ))
print('{:2d}'.format(observed), expected)
frequency.append((observed, expected))
chi_square = sum([(x[0]-x[1])**2./x[1] for x in frequency])
crit = stats.chi2.ppf(0.95, (fr_num) - 3)
p_value = 1 - stats.chi2.cdf(chi_square, (fr_num) - 3)
print("\nCustom Chi2")
print("Chi = {}\tCriteria = {}\tP-value = {}\n".format(chi_square, crit, p_value))
#--------------------------------------------#
print("Normaltest K**2")
print("K = {}\tP-value = {}\n".format(len_chi, len_p))
print("Chisquered SciPy")
print("Chi = {}\tCriteria = {}\tP-value = {}\n".format(len_chi_2, crit, len_p_2))
f,axs = plt.subplots(1,2)
axs[0].hist(v_lenth, bins=(fr_num))
# axs[0].plot(x_exp, stats.norm.pdf(x_exp, len_m, len_std))
# axs[0].plot(x_exp, rv)
axs[0].set_xlabel('Values, m')
axs[0].set_ylabel('Density')
axs[0].set_title('Base vector length')
# axs[0].plot(, rv*1000)
# axs[0].legend("m = {}, STD = {}".format(len_m, len_std))
# axs[0].axis([2.075, 2.095, 0, 100])
axs[0].grid()
axs[1].hist(deg, bins=(fr_num)) #35
# axs[1].plot(x_deg, stats.norm.pdf(x_deg, ang_m, ang_std))
axs[1].set_xlabel('Values, °')
axs[1].set_ylabel('Density')
axs[1].set_title('Azimuth')
axs[1].grid()
plt.figtext(0.125,0.97, "\nMean = {: .4f}\nSTD = {: .4f}".format(len_m, len_std),
horizontalalignment ="left",
verticalalignment ="center",
wrap = True,
in_layout = True,
# fontsize = 10,
# color ="blue"
)
plt.figtext(0.48,0.97, "\nP-value = {: .4f}\n".format(len_p),
horizontalalignment ="right",
verticalalignment ="center",
wrap = True,
in_layout = True,
# fontsize = 10,
# color ="blue"
)
plt.figtext(0.9,0.97, "\nP-value = {: .4f}\n".format(ang_p),
horizontalalignment ="right",
verticalalignment ="center",
wrap = True,
in_layout = True,
# fontsize = 10,
# color ="blue"
)
plt.figtext(0.55,0.97, "\nMean = {: .4f}\nSTD = {: .4f}".format(ang_m, ang_std),
horizontalalignment ="left",
verticalalignment ="center",
wrap = True,
in_layout = True,
# fontsize = 10,
# color ="blue"
)
# axs[2].hist(v_angle, bins=35)
# axs[2].set_xlabel('Values, °')
# axs[2].set_ylabel('Density')
# axs[2].set_title('Azimuth')
# axs[2].grid()
plt.show()
# plt.savefig("matplotlib.png") #savefig, don't show
# plt.hist(x, bins=35)
# plt.xlabel('Values, m')
# plt.ylabel('Density')
# plt.title('Base vector lenth')
# plt.axis([2.075, 2.095, 0, 100])
# plt.grid()
# plt.show()
| StarcoderdataPython |
9722678 | """
Build the various language SDK packages for release
"""
import argparse
import glob
import os
import platform
import shutil
from os.path import join, abspath, dirname
from typing import Dict
try:
import requests
except ImportError:
os.system('pip install requests')
import requests
def parse_version_tag():
raise NotImplementedError
def get_os_arch_path(extract_dir, windows_path):
copy_from = ''
libs_dir = join(extract_dir, 'libs')
os_name = platform.system().lower()
processor_name = platform.machine().lower()
if os_name == 'windows':
copy_from = join(libs_dir, windows_path)
elif os_name == 'linux':
if processor_name == 'x86_64':
copy_from = join(libs_dir, 'linux')
elif processor_name == 'armv7l':
copy_from = join(libs_dir, 'linux-armv7')
elif processor_name == 'aarch64':
copy_from = join(libs_dir, 'linux-aarch64')
elif os_name == 'darwin':
copy_from = join(libs_dir, 'macos')
return copy_from
def copy_okapi_libs(copy_to: str, windows_path='windows'):
okapi_dir = abspath(join(dirname(__file__), '..'))
copy_from = get_os_arch_path(okapi_dir, windows_path)
print(f"Copying okapi libs from: {copy_from}\nto: {copy_to}")
for copy_file in glob.glob(join(copy_from, '*.*')):
shutil.copy2(copy_file, copy_to)
shutil.copy2(join(okapi_dir, 'libs', 'C_header', 'okapi.h'), copy_to)
def update_line(file_name: str, replace_lines: Dict[str, str]) -> None:
with open(file_name, 'r') as fid:
file_lines = fid.readlines()
file_lines = list(map(lambda x: replace_line_if_needed(x, replace_lines), file_lines))
with open(file_name, 'w') as fid2:
fid2.writelines(file_lines)
def replace_line_if_needed(line: str, replace_lines: Dict[str, str]) -> str:
for find, replace in replace_lines.items():
if line.strip().startswith(find.strip()):
line = replace + '\n'
return line
def build_python(args) -> None:
# Update version in setup.cfg
python_dir = abspath(join(dirname(__file__), '..', 'python'))
update_line(join(python_dir, 'setup.cfg'),
{'version = ': f'version = {get_package_versions(args)}'})
copy_okapi_libs(abspath(join(python_dir, '..', 'libs')))
def build_java(args) -> None:
# Update version in setup.cfg
java_dir = abspath(join(dirname(__file__), '..', 'java'))
update_line(join(java_dir, 'build.gradle'),
{'def jarVersion': f'def jarVersion = "{get_package_versions(args)}"'})
copy_okapi_libs(abspath(join(java_dir, '..', 'libs')))
def build_ruby(args) -> None:
# Update version in setup.cfg
ruby_dir = abspath(join(dirname(__file__), '..', 'ruby'))
update_line(join(ruby_dir, 'lib', 'version.rb'),
{' VERSION =': f" VERSION = '{get_package_versions(args)}'"})
def build_golang(args) -> None:
# Update version in setup.cfg
golang_dir = abspath(join(dirname(__file__), '..', 'go', 'services'))
# Copy in the binaries
copy_okapi_libs(golang_dir, 'windows-gnu')
def get_package_versions(args) -> str:
return (args.package_version or get_github_version()).lstrip('v')
def get_github_version(github_token: str = None) -> str:
if not github_token:
github_token = os.getenv('API_GITHUB_TOKEN')
github_release_request = requests.get('https://api.github.com/repos/trinsic-id/okapi/releases/latest',
headers={'Authorization': github_token})
github_json = github_release_request.json()
version = github_json['tag_name']
return version
def parse_arguments():
parser = argparse.ArgumentParser(description='Process SDK building')
parser.add_argument('--package-version', help='Manual override package version')
return parser.parse_args()
def main():
# Get command line arguments
args = parse_arguments()
# Update version information
build_python(args)
build_java(args)
build_ruby(args)
build_golang(args)
if __name__ == "__main__":
main()
| StarcoderdataPython |
3430904 | from setuptools import setup, find_packages
setup(
name="neuraleduseg",
version="1.0.0",
description="Discourse segmentation",
license="Apache License 2.0",
url="https://github.com/rknaebel/NeuralEDUSeg",
packages=find_packages(),
author="<NAME>",
author_email="<EMAIL>",
classifiers=["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: Unix",
"Operating System :: MacOS",
"Programming Language :: Python :: 3",
"Topic :: Text Processing :: Linguistic"],
keywords="discourse NLP linguistics")
| StarcoderdataPython |
4952641 | <filename>account/forms.py
from django import forms
from django.core.exceptions import ValidationError
import re
from django.contrib.auth import authenticate, login
class ChangePasswordForm(forms.Form):
old_password = forms.CharField(max_length=64, widget=forms.PasswordInput())
password = forms.CharField(max_length=64, widget=forms.PasswordInput())
password_repeat = forms.CharField(max_length=64, widget=forms.PasswordInput())
def clean_password_repeat(self):
temp_password_repeat = self.cleaned_data['password_repeat']
temp_password = self.cleaned_data['password']
temp_old_password = self.cleaned_data['old_password']
userexp = re.compile('^[A-Za-z0-9_-]{8,100}$')
if len(temp_password) < 8:
raise ValidationError("Минимальная длина пароля - 8.")
if len(temp_password) > 100:
raise ValidationError("Максимальная длина пароля - 100.")
if temp_password != temp_password_repeat:
raise ValidationError("Пароли не совпадают.")
if not userexp.match(temp_password):
raise ValidationError("В пароле могут присутствовать лишь английские буквы,"+
" цифры, дефис и знак подчёркивания")
if temp_old_password == temp_password:
raise ValidationError('Старый пароль совпадает с новым')
return temp_password
def change_password(self, user, request):
check_user = authenticate(username=user.username, password=self.cleaned_data['old_password'])
if check_user is not None:
user.set_password(self.cleaned_data['password'])
user.save()
user = authenticate(username=user.username, password=self.cleaned_data['password'])
login(request, user)
return True
else:
return False
| StarcoderdataPython |
3212645 | # -*- coding: utf-8 -*-
"""mnistfashionclassification.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1bkbxEu12u2eqexsnO4lROJFJMvFSZslu
"""
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
class_names = ['T-shirt', 'Pants', 'Pullover', 'Dress', 'Coat', 'Sandal', 'Shirt',
'Sneaker', 'Bag', 'Ankle boot']
train_images.shape
train_labels, test_images.shape
plt.figure()
plt.imshow(train_images[0])
plt.colorbar()
plt.grid(False)
plt.show()
train_images = train_images / 255.0
test_images = test_images / 255.0
plt.figure(figsize=(10, 10))
for i in range(25):
plt.subplot(5,5,i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(train_images[i],
cmap=plt.cm.binary)
plt.xlabel(class_names[train_labels[i]])
plt.show()
model = keras.Sequential([
keras.layers.Flatten(input_shape= (28, 28)),
keras.layers.Dense(128,
activation=tf.nn.relu),
keras.layers.Dense(10,
activation=tf.nn.softmax)
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=10)
test_loss, test_acc = model.evaluate(test_images, test_labels)
print('Test accuracy', test_acc)
probability_model = tf.keras.Sequential([model,
tf.keras.layers.Softmax()])
predictions = probability_model.predict(test_images)
test_labels[0]
np.argmax(predictions[0])
model.save('clothing_pred.h5') | StarcoderdataPython |
5093870 | #
# This is an extremely simple demo application to showcase the
# basic structure, features and use of cvui.
#
# Copyright (c) 2018 <NAME> <<EMAIL>>
# Licensed under the MIT license.
#
import numpy as np
import cv2
import cvui
WINDOW_NAME = 'CVUI Hello World!'
def main():
frame = np.zeros((200, 500, 3), np.uint8)
count = 0;
# Init cvui and tell it to create a OpenCV window, i.e. cv::namedWindow(WINDOW_NAME).
cvui.init(WINDOW_NAME)
while (True):
# Fill the frame with a nice color
frame[:] = (49, 52, 49)
# Buttons will return true if they were clicked, which makes
# handling clicks a breeze.
if (cvui.button(frame, 110, 80, "Hello, world!")):
# The button was clicked, so let's increment our counter.
count += 1
# Sometimes you want to show text that is not that simple, e.g. strings + numbers.
# You can use cvui::printf for that. It accepts a variable number of parameter, pretty
# much like printf does.
# Let's show how many times the button has been clicked.
cvui.printf(frame, 250, 90, 0.4, 0xff0000, "Button click count: %d", count);
# Update cvui stuff and show everything on the screen
cvui.imshow(WINDOW_NAME, frame);
# Check if ESC key was pressed
if cv2.waitKey(20) == 27:
break
if __name__ == '__main__':
main()
| StarcoderdataPython |
8188003 | <filename>hityper/typeobject.py
import re
from hityper.stdtypes import stdtypes, exporttypemap, inputtypemap, typeequalmap
from hityper import logger
logger.name = __name__
class TypeObject(object):
def __init__(self, t, category, added = False):
self.type = t
#categories: 0 - builtins
#1 - standard libraries
#2 - user defined
self.category = category
self.compatibletypes = [t]
self.startnodename = None
self.startnodeorder = None
self.added = added
if t in ["bool", "int", "float", "complex"]:
self.compatibletypes = ["int", "float", "complex", "bool"]
self.elementtype = []
self.keytype = []
self.valuetype = []
def buildTuple(self, t):
self.type = "Tuple"
self.elementtype = t
def buildDict(self, key, value):
self.type = "Dict"
self.elementtype = key
self.keytype = key
self.valuetype = value
def buildList(self, t):
self.type = "List"
self.elementtype = t
def buildSet(self, t):
self.type = "Set"
self.elementtype = t
@property
def getBuiltinTypes(self):
#ref: https://docs.python.org/zh-cn/3/library/typing.html
#ref: https://docs.python.org/3/library/stdtypes.html
self.builintypes = {}
self.builintypes["element"] = ["bool", "int", "float", "None", "Any", "Text", "type", "bytes"]
self.builintypes["generic"] = [ "List", "Tuple", "Set", "Dict", "Union", "Optional", "Callable", "Iterable", "Sequence", "Generator"]
self.builintypes["rare"] = ["complex", "bytearray", "Frozenset", "memoryview", "range"]
return self.builintypes
@staticmethod
def isCompatible(l, r):
for t in l.compatibletypes:
if t == r.type:
return True
return False
@staticmethod
def existCompatible(l, listr):
for r in listr:
if TypeObject.isCompatible(l, r):
return True
if TypeObject.existSame(l, listr):
return True
return False
@staticmethod
def existNumbers(l, listr, exact = False):
#now we conduct exact match
if not exact:
return False
if l.type in ["int", "float"]:
for r in listr:
if r.type in ["int", "float"]:
return True
return False
#l is x and optional[x] in listr will return true
@staticmethod
def existOptional(l, listr):
for t in listr:
if t.type.lower() == "optional" and len(t.elementtype) == 1 and typeequalmap[t.elementtype[0].type.lower()] == typeequalmap[l.type.lower()]:
return True
return False
@staticmethod
def existSame( l, listr):
for r in listr:
if isinstance(r, str):
if r.startswith("<") and r.endswith(">"):
continue
if TypeObject.isSimilar(l, TypeObject(r,0)):
return True
elif TypeObject.isIdentical(l, r):
return True
return False
@staticmethod
def existSimilar(l, listr):
for r in listr:
if TypeObject.isSimilar(l,r):
return True
return False
@staticmethod
def findSame(l, listr):
for r in listr:
if isinstance(r, str) and TypeObject.isSimilar(l, TypeObject(r,0)):
return r
elif isinstance(r, TypeObject) and TypeObject.isIdentical(l,r):
return r
return None
@staticmethod
def isIdentical( l, r):
if l.category != 0 and r.category != 0:
if l.type == r.type:
return True
elif l.category == r.category and l.category == 2 and (l.type.split(".")[-1] == r.type.split(".")[-1]):
return True
else:
return False
if l.category == 0 and r.category == 0:
if typeequalmap[l.type.lower()] == typeequalmap[r.type.lower()]:
if l.type.lower() not in ["list", "tuple", "set", "iterable", "optional", "union", "sequence", "generator", "dict"]:
return True
else:
if l.type.lower() == "dict" and TypeObject.isIdenticalSet(l.keytype, r.keytype) and TypeObject.isIdenticalSet(l.valuetype, r.valuetype):
return True
elif l.type.lower() in ["list", "tuple", "set", "iterable", "optional", "union", "sequence", "generator"] and TypeObject.isIdenticalSet(l.elementtype, r.elementtype):
return True
elif (l.type.lower() == "literal" and typeequalmap[r.type.lower()] <= 3) or (r.type.lower() == "literal" and typeequalmap[l.type.lower()] <= 3):
return True
elif (l.type.lower() == "iterable" and typeequalmap[r.type.lower()] <= 17 and typeequalmap[r.type.lower()] >= 11) or (r.type.lower() == "iterable" and typeequalmap[l.type.lower()] <= 17 and typeequalmap[l.type.lower()] >= 11):
return True
if l.category == 0 and r.category == 2 and l.type.lower() == "type" and len(l.elementtype) == 1:
return TypeObject.isIdentical(l.elementtype[0], r)
if r.category == 0 and l.category == 2 and r.type.lower() == "type" and len(r.elementtype) == 1:
return TypeObject.isIdentical(r.elementtype[0], l)
return False
@staticmethod
def isSimilar(l,r):
if l.category == 0 and r.category == 0 and typeequalmap[l.type.lower()] == typeequalmap[r.type.lower()]:
return True
elif l.type.lower() == r.type.lower():
return True
else:
return False
@staticmethod
def isIdenticalSet( llist, rlist):
invalidtypes = []
for l in llist:
if not isinstance(l, TypeObject):
invalidtypes.append(l)
for r in rlist:
if not isinstance(r, TypeObject):
invalidtypes.append(r)
for t in invalidtypes:
if t in llist:
llist.remove(t)
for t in invalidtypes:
if t in rlist:
rlist.remove(t)
for l in llist:
if l.type.lower() == "any":
return True
if not TypeObject.existSame(l, rlist) and l.type.lower() != "any":
return False
for r in rlist:
if r.type.lower() == "any":
return True
if not TypeObject.existSame(r, llist) and r.type.lower() != "any":
return False
return True
@staticmethod
def existType(t, listr):
for r in listr:
if isinstance(t, str):
if (r.category == 0 and typeequalmap[t.lower()] == typeequalmap[r.type.lower()]) or (r.category == 2 and r.type == t):
return True
elif isinstance(t, TypeObject):
if (r.category == 0 and t.category == 0 and typeequalmap[t.type.lower()] == typeequalmap[r.type.lower()]) or (t.type == r.type):
return True
return False
@staticmethod
def equal2type(t, typestr):
if typeequalmap[t.type.lower()] == typeequalmap[typestr.lower()]:
return True
return False
@staticmethod
def equal2onetype(t, typestrs):
for s in typestrs:
if typeequalmap[t.type.lower()] == typeequalmap[s.lower()]:
return True
return False
@staticmethod
def combineTypes(listt):
if len(listt) > 1:
typeobject = TypeObject("Union", 0)
typeobject.elementtype = listt
return typeobject
elif len(listt) == 1:
return listt[0]
else:
return None
@staticmethod
def usertypeCompare(l, rlist):
for r in rlist:
if l.category == r.category and l.category == 2 and ((l.type.split(".")[-1] == r.type.split(".")[-1])):
return True
return False
@staticmethod
def existIncluded(l, rlist):
for r in rlist:
if TypeObject.isIncluded(l,r):
return True
return False
#if l is included in r, for generic types, list[a] is included in list[a,b]
@staticmethod
def isIncluded(l, r):
if r.type == "Optional" and len(r.elementtype) == 1 and l.type == r.elementtype[0].type:
return True
elif l.type != r.type:
return False
elif l.type == r.type and l.type in ["List", "Tuple", "Dict", "Set", "Iterable", "Optional", "Union", "Sequence", "Generator"]:
if l.type == "Dict":
for t in l.keytype:
if not TypeObject.existSame(t, r.keytype) and not TypeObject.existOptional(t, r.keytype) and not TypeObject.existIncluded(t, r.keytype):
return False
for t in l.valuetype:
if not TypeObject.existSame(t, r.valuetype) and not TypeObject.existOptional(t, r.valuetype) and not TypeObject.existIncluded(t, r.valuetype):
return False
return True
else:
for t in l.elementtype:
if not TypeObject.existSame(t, r.elementtype) and not TypeObject.existOptional(t, r.elementtype) and not TypeObject.existIncluded(t, r.elementtype):
return False
return True
@staticmethod
def isSetIncluded(llist, rlist):
for r in rlist:
if TypeObject.existSame(r, llist) or TypeObject.existNumbers(r, llist) or TypeObject.usertypeCompare(r, llist):
continue
else:
included = False
for l in llist:
if TypeObject.isIncluded(r, l):
included = True
break
if included:
continue
return False
return True
@staticmethod
def isSetIncluded2(llist, rlist):
for r in rlist:
if TypeObject.existSimilar(r, llist) or TypeObject.existNumbers(r, llist, exact = True) or TypeObject.usertypeCompare(r, llist):
continue
else:
included = False
for l in llist:
if TypeObject.isIncluded(r, l):
included = True
break
if included:
continue
return False
return True
@staticmethod
def simplifyGenericType(t):
if not isinstance(t, TypeObject):
return t
if t.type in ["Set", "Tuple", "List", "Awaitable", "Iterable", "Union"]:
t.elementtype = TypeObject.removeInclusiveTypes(t.elementtype)
elif t.type == "Dict":
t.keytype = TypeObject.removeInclusiveTypes(t.keytype)
t.valuetype = TypeObject.removeInclusiveTypes(t.valuetype)
elif t.type == "Optional":
t.elementtype = TypeObject.removeRedundantTypes(t.elementtype)
rm = None
for et in t.elementtype:
if et.type == "None":
rm = et
break
if rm != None and rm in t.elementtype:
t.elementtype.remove(rm)
return t
@staticmethod
def removeRedundantTypes(listt):
outs = []
for t in listt:
typeobj = TypeObject.simplifyGenericType(t)
if not TypeObject.existSame(typeobj, outs):
outs.append(typeobj)
return outs
#Example: if list[] and list[a] exists at the same time, then list[] is removed
@staticmethod
def removeInclusiveTypes(listt):
outs = TypeObject.removeRedundantTypes(listt)
removed = True
while removed:
removed = False
for i in range(0, len(outs)):
for j in range(0, len(outs)):
if i != j and TypeObject.isIncluded(outs[i], outs[j]):
removed = True
target = outs[i]
break
if removed and target in outs:
outs.remove(target)
return outs
@staticmethod
def removeInvalidTypes(t):
if isinstance(t, TypeObject):
elementtype = []
for tt in t.elementtype:
if isinstance(tt, TypeObject):
elementtype.append(TypeObject.removeInvalidTypes(tt))
t.elementtype = elementtype
keytype = []
for tt in t.keytype:
if isinstance(tt, TypeObject):
keytype.append(TypeObject.removeInvalidTypes(tt))
t.keytype = keytype
valuetype = []
for tt in t.valuetype:
if isinstance(tt, TypeObject):
valuetype.append(TypeObject.removeInvalidTypes(tt))
return t
def __str__(self):
return TypeObject.resolveTypeName(self)
@staticmethod
def resolveTypeName(t):
if isinstance(t, TypeObject):
t = TypeObject.removeInvalidTypes(t)
if t.category != 0:
return t.type
elif t.type.lower() not in exporttypemap:
raise TypeError("Unknown type: " + t.type)
typestr = exporttypemap[t.type.lower()]
if t.type.lower() in ["dict", "callable"]:
typestr = typestr + "["
if len(t.keytype) == 0:
typestr += ", "
elif len(t.keytype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.keytype[0]) + ", "
else:
typestr += "typing.Union["
for n in t.keytype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "], "
if len(t.valuetype) == 0:
pass
elif len(t.valuetype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.valuetype[0])
else:
typestr += "typing.Union["
for n in t.valuetype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "]"
typestr += "]"
elif t.type.lower() in ["set", "tuple", "list", "awaitable", "iterable", "sequence", "generator"]:
typestr = typestr + "["
if len(t.elementtype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.elementtype[0])
elif len(t.elementtype) == 2 and (t.elementtype[0].type == "None" or t.elementtype[1].type == "None"):
typestr += "typing.Optional["
for i in t.elementtype:
if i.type != "None":
typestr = typestr + TypeObject.resolveTypeName(i)
typestr += "]"
elif len(t.elementtype) >= 2:
typestr += "typing.Union["
for n in t.elementtype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "]"
typestr += "]"
elif t.type.lower() == "optional":
typestr += "["
if len(t.elementtype) > 1:
typestr += "typing.Union["
for n in t.elementtype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "]"
elif len(t.elementtype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.elementtype[0]) + "]"
else:
typestr += "]"
elif t.type.lower() == "union":
typestr += "["
if len(t.elementtype) == 0:
typestr += "]"
if len(t.elementtype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.elementtype[0]) + "]"
elif len(t.elementtype) > 1:
for n in t.elementtype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "]"
return typestr
else:
raise TypeError("t should be a TypeObject.")
@staticmethod
def resolveTypeNames(tlist):
typestr = "Possible Types {"
if isinstance(tlist, list):
for i, t in enumerate(tlist):
typestr = typestr + " " + str(i+1) + "." + str(t.category) + "- " + TypeObject.resolveTypeName(t)
else:
raise TypeError("tlist must be a list of TypeObject.")
return typestr + " }"
@staticmethod
def resolveTypeNames2(tlist):
typestr = "Union["
if isinstance(tlist, list):
for i, t in enumerate(tlist):
typestr = typestr + TypeObject.resolveTypeName(t) + ","
if typestr[-1] == ",":
typestr = typestr[:len(typestr)-1]
else:
raise TypeError("tlist must be a list of TypeObject.")
return typestr + "]"
@staticmethod
def checkType(typestr):
typeobjs = TypeObject.Str2Obj(typestr)
if len(typeobjs) == 0:
return None
elif typeobjs[0].category == 0 and len(typeobjs[0].elementtype) == 0 and len(typeobjs[0].keytype) == 0 and len(typeobjs[0].valuetype) == 0:
return "simple"
elif typeobjs[0].category == 0:
return "generic"
elif typeobjs[0].category == 2:
return "user-defined"
else:
return None
@staticmethod
def Str2Obj(typestr):
strobjs = []
typestr = typestr.replace(" ", "")
typestr = typestr.replace("builtins.", "")
typestr = typestr.replace("typing_extensions.", "typing.")
if len(typestr) > 2 and typestr[0] == "[" and typestr[-1] == "]":
typestr = typestr[1:len(typestr) - 1]
if typestr == None or typestr == "":
return strobjs
if len(typestr) > 500:
#logger.warning("Type name is too long.")
return strobjs
if typestr in ["Union", "typing.Union"] and "[" not in typestr:
return strobjs
elif typestr.lower() in inputtypemap:
strobjs.append(TypeObject(inputtypemap[typestr.lower()], 0))
return strobjs
elif "[" in typestr and "]" in typestr:
typestr = typestr.replace("t.", "typing.")
index1 = typestr.index("[")
index2 = typestr.rfind("]")
innerstr = typestr[index1 + 1:index2]
if "Union" in typestr[:index1]:
strs = innerstr.split(",")
leftnum = 0
rightnum = 0
cur_str = ""
for s in strs:
cur_str += s
leftnum += s.count("[")
rightnum += s.count("]")
if leftnum == rightnum:
strobjs += TypeObject.Str2Obj(cur_str)
cur_str = ""
else:
cur_str += ","
return strobjs
elif "Optional" in typestr[:index1] or "typing.Optional" in typestr[:index1]:
strobjs += TypeObject.Str2Obj(innerstr)
strobjs.append(TypeObject("None", 0))
return strobjs
if typestr[:index1].lower() in inputtypemap:
typeobj = TypeObject(inputtypemap[typestr[:index1].lower()], 0)
if "Dict" in typestr[:index1] or "Mapping" in typestr[:index1] or "Callable" in typestr[:index1]:
if "," in innerstr:
commaindex = innerstr.split(",")
leftnum = 0
rightnum = 0
cur_str = ""
count = 0
for s in commaindex:
cur_str += s
leftnum += s.count("[")
rightnum += s.count("]")
if leftnum == rightnum:
if count == 0:
typeobj.keytype += TypeObject.Str2Obj(cur_str)
else:
typeobj.valuetype += TypeObject.Str2Obj(cur_str)
count += 1
cur_str = ""
else:
cur_str += ","
strobjs.append(typeobj)
return strobjs
else:
return strobjs
else:
strs = innerstr.split(",")
leftnum = 0
rightnum = 0
cur_str = ""
for s in strs:
cur_str += s
leftnum += s.count("[")
rightnum += s.count("]")
if leftnum == rightnum:
typeobj.elementtype += TypeObject.Str2Obj(cur_str)
cur_str = ""
else:
cur_str += ","
'''
if "[" in innerstr and "]" in innerstr:
typeobj.elementtype = TypeObject.Str2Obj(innerstr)
else:
strs = innerstr.split(",")
for s in strs:
typeobj.elementtype += TypeObject.Str2Obj(s)
'''
strobjs.append(typeobj)
return strobjs
else:
typeobj = TypeObject(typestr.replace("[typing.Any]", ""), 2)
strobjs.append(typeobj)
return strobjs
elif typestr.startswith("typing") and "[" not in typestr and typestr.lower() in inputtypemap:
typeobj = TypeObject(inputtypemap[typestr.lower()], 0)
strobjs.append(typeobj)
return strobjs
else:
typeobj = TypeObject(typestr, 2)
strobjs.append(typeobj)
return strobjs
@staticmethod
def DumpObject(typeobj):
print("Type: " + typeobj.type)
print("Element Type:" + TypeObject.resolveTypeNames(typeobj.elementtype))
print("Key Type:" + TypeObject.resolveTypeNames(typeobj.keytype))
print("Value Type:" + TypeObject.resolveTypeNames(typeobj.valuetype))
@staticmethod
def DumpOriObject(typeobj):
elementtypestr = ""
for t in typeobj.elementtype:
elementtypestr += TypeObject.DumpOriObject(t) + " [SEP] "
keytypestr = ""
for t in typeobj.keytype:
keytypestr += TypeObject.DumpOriObject(t) + " [SEP] "
valuetypestr = ""
for t in typeobj.valuetype:
valuetypestr += TypeObject.DumpOriObject(t) + " [SEP] "
return "@Type: {}, Element Type: [{}], Key Type: [{}], Value Type: [{}]@".format(typeobj.type, elementtypestr, keytypestr, valuetypestr)
@staticmethod
def DumpOriObjects(typeobjs):
typestr = ""
for i, obj in enumerate(typeobjs):
typestr += "{} - {} \n".format(i, TypeObject.DumpOriObject(obj))
return typestr
def dump(self):
obj = {"type": self.type, "category": self.category, "added": self.added, "compatibletypes": self.compatibletypes, "startnodename": self.startnodename, "startnodeorder": self.startnodeorder}
elementtype = []
for i in self.elementtype:
elementtype.append(i.dump())
obj["elementtype"] = elementtype
keytype = []
for i in self.keytype:
keytype.append(i.dump())
obj["keytype"] = keytype
valuetype = []
for i in self.valuetype:
valuetype.append(i.dump())
obj["valuetype"] = valuetype
return obj
@staticmethod
def load(dictobj):
obj = TypeObject(dictobj["type"], dictobj["category"], added = dictobj["added"])
obj.compatibletypes = dictobj["compatibletypes"]
obj.startnodename = dictobj["startnodename"]
obj.startnodeorder = dictobj["startnodeorder"]
for i in dictobj["elementtype"]:
obj.elementtype.append(TypeObject.load(i))
for i in dictobj["keytype"]:
obj.keytype.append(TypeObject.load(i))
for i in dictobj["valuetype"]:
obj.valuetype.append(TypeObject.load(i))
return obj
| StarcoderdataPython |
6513402 | import pytest
from steputils.strings import step_encoder, step_decoder, StringDecodingError, StringBuffer, EOF
def test_buffer():
b = StringBuffer('test')
assert b.look() == 't'
assert b.look(1) == 'e'
assert b.get() == 't'
assert b.look() == 'e'
assert b.get() == 'e'
assert b.get() == 's'
assert b.get() == 't'
assert b.get() == EOF
assert b.get() == EOF
assert b.get() == EOF
assert b.look() == EOF
assert b.look(3) == EOF
def test_string_encoder():
assert step_encoder('ABC') == 'ABC'
assert step_encoder('"') == '"'
assert step_encoder("'") == "''"
assert step_encoder('\'') == '\'\''
assert step_encoder('\\') == '\\\\'
assert step_encoder('ABCÄ') == 'ABC\\X2\\00C4\\X0\\'
assert step_encoder('ABCÄÖ') == 'ABC\\X2\\00C400D6\\X0\\'
assert step_encoder('CÄÖC') == 'C\\X2\\00C400D6\\X0\\C'
assert step_encoder('CÄ\\ÖC') == 'C\\X2\\00C4\\X0\\\\\\\\X2\\00D6\\X0\\C'
assert step_encoder('CÄ\'ÖC') == 'C\\X2\\00C4\\X0\\\'\'\\X2\\00D6\\X0\\C'
def test_string_decoder():
assert step_decoder('ABC') == 'ABC'
assert step_decoder("\"") == "\""
assert step_decoder("'") == "'"
assert step_decoder("''") == "''", "Apostrophe decoding has to be done by the lexer."
assert step_decoder("x''x") == "x''x"
assert step_decoder("x\"x") == "x\"x"
assert step_decoder("\\\\") == "\\"
assert step_decoder("x\\\\x") == "x\\x"
assert step_decoder('ABC\\X2\\00C4\\X0\\') == 'ABCÄ'
assert step_decoder('ABC\\X2\\00C400D6\\X0\\') == 'ABCÄÖ'
assert step_decoder('C\\X2\\00C400D6\\X0\\C') == 'CÄÖC'
assert step_decoder('C\\X2\\00C4\\X0\\\\\\\\X2\\00D6\\X0\\C') == 'CÄ\\ÖC'
# does not decode escaped apostrophes '
assert step_decoder('C\\X2\\00C4\\X0\\\'\'\\X2\\00D6\\X0\\C') == 'CÄ\'\'ÖC'
def test_extended_string_decoderx2():
assert step_decoder("\\X2\\00E4\\X0\\") == '\u00E4'
def test_extended_string_decoder_multi_x2():
assert step_decoder("\\X2\\00E400E4\\X0\\") == '\u00E4\u00E4'
def test_extended_string_decoder_x4():
assert step_decoder("\\X4\\000000E4\\X0\\") == '\u00E4'
def test_extended_string_decoder_error():
# invalid count of hex chars
pytest.raises(StringDecodingError, step_decoder, "\\X2\\0E4\\X0\\")
pytest.raises(StringDecodingError, step_decoder, "\\X4\\00000E4\\X0\\")
if __name__ == '__main__':
pytest.main([__file__])
| StarcoderdataPython |
11371002 | <reponame>cheperuiz/unlearn-python
from dataclasses import dataclass, field
from typing import List
@dataclass
class Ingredient:
name: str = field()
@dataclass
class SliceableIngredient(Ingredient):
slice_into: List[str] = field(default_factory=list, repr=False)
def __init__(self, name, slice_into, *args, **kwargs):
super().__init__(name, *args, **kwargs)
self.slice_into = slice_into
if not len(self.slice_into):
raise TypeError("__init__() missing 1 required positional argument: 'slice_into'")
def to_slices(self):
slices = self.slice_into[:]
self.slice_into = []
print(f"Slicing {self} into {len(slices)} parts.")
return [Ingredient(s) for s in slices]
@dataclass
class GrillableIngredient(Ingredient):
temp: int = field(default=4)
BAD_DONENESS = ["raw", "burned"]
def apply_heat(self, delta_temp=10):
self.temp += delta_temp
print(f"Heating {self}.")
def acceptable_doneness(self):
return self.doneness not in self.BAD_DONENESS
def __repr__(self):
return f"{self.__class__.__name__}(temp='{self.temp}', doneness='{self.doneness}')"
@property
def doneness(self):
if self.temp < 40:
return "raw"
if self.temp < 50:
return "rare"
if self.temp < 60:
return "medium"
if self.temp < 70:
return "well-done"
return "burned"
@dataclass(repr=False)
class Bun(SliceableIngredient):
name: str = "bun"
slice_into: List[str] = field(default_factory=lambda: ["bun-bottom", "bun-top"])
@dataclass(repr=False)
class Lettuce(SliceableIngredient):
name: str = "lettuce"
slice_into: List[str] = field(default_factory=lambda: 10 * ["lettuce-slice"])
@dataclass(repr=False)
class Tomato(SliceableIngredient):
name: str = "tomato"
slice_into: List[str] = field(default_factory=lambda: 5 * ["tomato-slice"])
@dataclass(repr=False)
class Onion(SliceableIngredient):
name: str = "onion"
slice_into: List[str] = field(default_factory=lambda: 5 * ["onion-ring"])
@dataclass(repr=False)
class Pickle(SliceableIngredient):
name: str = "pickle"
slice_into: List[str] = field(default_factory=lambda: 5 * ["pickle-slice"])
@dataclass(repr=False)
class Cheese(SliceableIngredient):
name: str = "cheese"
slice_into: List[str] = field(default_factory=lambda: 5 * ["cheese-slice"])
@dataclass(repr=False)
class BeefPatty(GrillableIngredient):
name: str = "beefpatty"
@dataclass(repr=False)
class ChickenPatty(GrillableIngredient):
name: str = "chickenpatty"
@dataclass(repr=False)
class VeggiePatty(GrillableIngredient):
name: str = "veggiepatty"
@dataclass(repr=False)
class Bacon(GrillableIngredient):
name: str = "bacon"
| StarcoderdataPython |
6667970 | SECS_PER_MIN = 60
SECS_PER_HOUR = SECS_PER_MIN * 60
SECS_PER_DAY = SECS_PER_HOUR * 24
def secs_to_str(secs):
days = int(secs) // SECS_PER_DAY
secs -= days * SECS_PER_DAY
hours = int(secs) // SECS_PER_HOUR
secs -= hours * SECS_PER_HOUR
mins = int(secs) // SECS_PER_MIN
secs -= mins * SECS_PER_MIN
if days > 0:
return '%dd%02dh%02dm' % (days, hours, mins)
elif hours > 0:
return '%dh%02dm%02ds' % (hours, mins, int(secs))
elif mins > 0:
return '%dm%02ds' % (mins, int(secs))
elif secs >= 1:
return '%.1fs' % secs
return '%.2fs' % secs
def get_prf(tp, fp, fn, get_str=False):
"""Get precision, recall, f1 from true pos, false pos, false neg."""
if tp + fp == 0:
precision = 0
else:
precision = float(tp) / (tp + fp)
if tp + fn == 0:
recall = 0
else:
recall = float(tp) / (tp + fn)
if precision + recall == 0:
f1 = 0
else:
f1 = 2 * precision * recall / (precision + recall)
if get_str:
return '\n'.join([
'Precision: %.2f%%' % (100.0 * precision),
'Recall : %.2f%%' % (100.0 * recall),
'F1 : %.2f%%' % (100.0 * f1)])
return precision, recall, f1 | StarcoderdataPython |
322797 | # imports
import author_rank as ar
import json
# read in sample json
with open("../data/author_network.json", 'r') as f:
data = json.load(f)
# create an AuthorRank object
ar_graph = ar.Graph()
# fit to the data
ar_graph.fit(
documents=data["documents"]
)
# get the top authors for a set of documents
top = ar_graph.top_authors(normalize_scores=True)
# print the results
for i, j in zip(top[0], top[1]):
print(i, j)
| StarcoderdataPython |
6642005 | <filename>chainercb/policies/linear_thompson.py<gh_stars>1-10
from math import factorial
from chainer import cuda, functions as F, as_variable
from chainercb.policies.linear import LinearPolicy
from chainercb.util import select_items_per_row
class ThompsonPolicy(LinearPolicy):
"""
A strictly linear policy that uses thompson sampling to draw actions.
"""
def draw(self, x):
xp = cuda.get_array_module(x)
ts = [self.regressors[a].thompson(x).data for a in range(self.k)]
ts = [xp.reshape(t, (t.shape[0], 1)) for t in ts]
ts = xp.hstack(ts)
return F.argmax(ts, axis=1)
def propensity(self, x, action):
xp = cuda.get_array_module(x)
""": type: numpy"""
# Compute independent thompson sample distributions
z_means = xp.zeros((x.shape[0], self.k))
z_std = xp.zeros((x.shape[0], self.k))
for a in range(self.k):
m, s = self.regressors[a].thompson_distribution(x)
z_means[:, a] = m.data
z_std[:, a] = s.data
# Compute the argmax probability
m_i, m_j = _tiles(z_means)
s_i, s_j = _tiles(z_std)
c_m = _cut_diagonals(m_i - m_j).data
c_s = _cut_diagonals(s_i + s_j).data
opts = factorial(self.k - 1)
res = xp.prod(0.5 * (1 + F.erf(c_m / (xp.sqrt(2) * c_s)).data), axis=2)
a = F.reshape(action, (action.shape[0], 1))
res = select_items_per_row(as_variable(res), a)
return F.reshape(res, action.shape)
def log_propensity(self, x, action):
return F.log(self.propensity(x, action))
def _tiles(x):
xp = cuda.get_array_module(x)
x = x.data
x_i = xp.reshape(x, (x.shape[0], x.shape[1], 1))
x_j = xp.reshape(x, (x.shape[0], 1, x.shape[1]))
x_i = xp.broadcast_to(x_i, (x.shape[0], x.shape[1], x.shape[1]))
x_j = xp.broadcast_to(x_j, (x.shape[0], x.shape[1], x.shape[1]))
return as_variable(x_i), as_variable(x_j)
def _cut_diagonals(x):
xp = cuda.get_array_module(x)
x = x.data
e = xp.reshape(xp.eye(x.shape[1]), (1, x.shape[1], x.shape[2]))
e = xp.broadcast_to(e, x.shape)
res = xp.reshape(x[e == 0.0], (x.shape[0], x.shape[1], x.shape[2] - 1))
return as_variable(res)
| StarcoderdataPython |
1816219 | from django.http.response import HttpResponseRedirect
from django.shortcuts import render
from django.views.generic import TemplateView, CreateView
from .forms import SignUpForm
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib import messages
# Create your views here.
class Home(TemplateView):
template_name = "basics/home.html"
class Test(TemplateView):
template_name = "basics/test.html"
class DashboardView(LoginRequiredMixin, TemplateView):
template_name = "basics/dashboard.html"
login_url = reverse_lazy('Login')
class SettingsView(LoginRequiredMixin,TemplateView):
template_name = 'basics/settings.html'
login_url = reverse_lazy('Login')
class SignUpView(CreateView):
form_class = SignUpForm
success_url = reverse_lazy('Home')
template_name = 'basics/register.html'
from django.http import HttpResponse
from django.shortcuts import redirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse_lazy
from django.contrib import messages
from .forms import UserForm, ProfileForm
from django.contrib.auth.models import User
from user.models import Profile
class ProfileView(LoginRequiredMixin, TemplateView):
template_name = 'basics/profile.html'
class ProfileUpdateView(LoginRequiredMixin, TemplateView):
user_form = UserForm
profile_form = ProfileForm
template_name = 'basics/profile-update.html'
def post(self, request):
post_data = request.POST or None
file_data = request.FILES or None
user_form = UserForm(post_data, instance=request.user)
profile_form = ProfileForm(post_data, file_data, instance=request.user.profile)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request, 'Your profile was successfully updated!')
return HttpResponseRedirect(reverse_lazy('Profile'))
context = self.get_context_data(user_form=user_form,profile_form=profile_form)
return self.render_to_response(context)
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs) | StarcoderdataPython |
4863157 | # created by <NAME>
# hide or show a widget
def hide_widget(wid, dohide=True):
if hasattr(wid, 'saved_attrs'):
if not dohide:
wid.height, wid.size_hint_y, wid.opacity, wid.disabled = wid.saved_attrs
del wid.saved_attrs
elif dohide:
wid.saved_attrs = wid.height, wid.size_hint_y, wid.opacity, wid.disabled
wid.height, wid.size_hint_y, wid.opacity, wid.disabled = 0, None, 0, True
# inheriting this class, allows the class that inherited it to be cloned.
class CloneWidget:
def __init__(self, **kwargs):
self.kwargs = kwargs
self.__dict__.update(kwargs)
super().__init__(**kwargs)
def copy(self):
return self.__class__(**self.kwargs)
| StarcoderdataPython |
6435633 | <reponame>Nic30/hwtHdlParsers
class RedefinitionErr(Exception):
pass
class NonRedefDict(dict):
def __setitem__(self, key, val):
if key in self and val is not self[key]:
raise RedefinitionErr(key)
dict.__setitem__(self, key, val)
| StarcoderdataPython |
3586844 | ## This script requires root
import docker
import click
import tempfile
client = docker.from_env()
def export_flatduck(image_name):
tempfile.TemporaryDirectory(suffix="flatduck")
image = client.images.pull('image_name')
f = open('/tmp/busybox-latest.tar', 'wb')
for chunk in image:
f.write(chunk)
f.close()
click.echo(f"Exported ${image_name} as:")
| StarcoderdataPython |
3578637 | <reponame>smelehy/wifi-scan
WIFI_SCAN_CMD = 'sudo iwlist %(nwinterface)s scan'
WIFI_CARD_NAME = 'wlan0'
# wifi scan parameters
# These rules dictate what and how information is pulled out of the iwscan results (which returns raw text)
# STR_RULES is a dictionary of lists. each list has an embedded dictionary with key/values that specify
# the 'how'. parse_char is what separates the key from the value in the string. the app will pull out
# a string for each key value pair terminated by the 'end_char'. replace_char will be removed from the string
STR_RULES = {
0 : ('Cell',
{'parse_char' : ' ',
'end_char' : '-',
},
),
1 : ('ESSID',
{'parse_char' : ':',
'replace_char' : '"',
'end_char': '\n',
'friendly_name': 'Name',
},
),
2 : ('Encryption key',
{'parse_char' : ':',
'end_char': '\n',
'friendly_name': 'Secured',
},
),
3 : ('Quality',
{'parse_char' : '=',
'end_char' : 'Sig',
'friendly_name': 'Signal Quality',
},
),
4 : ('Signal level',
{'parse_char' : '=',
'replace_char' : 'dBm',
'end_char' : '\n', # when scan finds the rule name ('signal level')
# pull out a string that starts with that and ends
# with this character
},
),
}
| StarcoderdataPython |
8150253 | import torch
import numpy as np
from typing import Tuple, Union
from torchvision import transforms as T
def to_3dim(X: torch.Tensor, target_size: Tuple[int, int, int], dtype=torch.float32) -> torch.Tensor:
"""
Rearragne data matrix X of size (n_styles*dim_x, n_contents)
to (n_styles, n_contents, dim_x)
Args:
- X: torch.Tensor of 2dim data matrix
- target_size: tuple of n_style, n_contents, dim_x
"""
assert X.ndim == 2
n_styles, n_contents, dim_x = target_size
assert X.shape[0] == n_styles * dim_x
assert X.shape[1] == n_contents
target = torch.zeros(target_size, dtype=X.dtype)
for s in range(n_styles):
for c in range(n_contents):
img = X[s * dim_x: (s + 1) * dim_x, c]
target[s, c] = img
return target.to(dtype)
def unnormalize(x: Union[torch.Tensor, np.ndarray],
mean: Union[torch.Tensor, np.ndarray],
std: Union[torch.Tensor, np.ndarray]):
"""
:param x: a mini-batch of 3Dim torch.Tensor in order of (bs, c, h, w)
:param mean: channelwise_mean; (c,)
:param std: channelwise_std; (c,)
:return: a mini-batch of unnormalized 3dim tensors; same shape as input x
"""
return T.Normalize((-mean / std).tolist(), (1.0 / std).tolist())(x)
def to_monochrome(x: torch.Tensor,
color: str,
preserve_energy:bool=False) -> torch.Tensor:
"""
Transform a single-channel grayscale (3dim, (1,h,w) tensor)
to a mono-chrome 3channel tensor (3,h,w), of either gray, red, green, blue.
- If color is one of [red, green,blue], then outputs a 3-channel tensor by
putting the input single-channel into the proper channel.
- If color is gray, then distributes the input grayscale tensor equally into
the r,g,b channels (ie. puts the input/3 into each r,g,b channels)
Args:
x: a single 3dim torch.Tensor a single channel; (1, h, w)
color: str - one of ['red', 'green', 'blue']
returns:
- a single (3dim) torch.Tensor with 3 channels: (3, h, w)
"""
out = torch.zeros_like(x).repeat((3,1,1))
color = color.lower()
color2dim = {"red": 0, "green": 1, "blue": 2}
if color == 'gray':
factor = 3.0 if preserve_energy else 1.0
for i in range(3):
out[i] = x/factor
else:
color_dim = color2dim[color]
out[color_dim] = x
return out | StarcoderdataPython |
11283347 | import sys
from data_storing.assets.common import Timespan
import fundamentals.miscellaneous as fund_utils
from utilities.common_methods import getDebugInfo
from utilities.common_methods import Methods as methods
from utilities import log
def get_return_on_assets(equity, year):
"""
@fn get_return_on_assets
@brief This ratio is simply net income dividend by assets.
It shows how well the assets are being utilized to generate profit.
"""
try:
pr_year = year - 1
balance_sheet_curr_y = fund_utils.gm.get_annual_financial_statement(equity.fundamentals.balance_sheet, year)
balance_sheet_prev_y = fund_utils.gm.get_annual_financial_statement(equity.fundamentals.balance_sheet, pr_year)
income_statement = fund_utils.gm.get_annual_financial_statement(equity.fundamentals.income_statement, year)
if not balance_sheet_curr_y or not balance_sheet_prev_y or not income_statement:
return None
# get the net income
net_income = methods.validate(income_statement.net_income)
# get the total assets
total_assets_curr_year = methods.validate(balance_sheet_curr_y.total_assets)
total_assets_prev_year = methods.validate(balance_sheet_prev_y.total_assets)
return_on_assets = None
if net_income is not None and \
total_assets_curr_year is not None and \
total_assets_prev_year is not None:
return_on_assets = 2 * net_income / (total_assets_curr_year + total_assets_prev_year + sys.float_info.epsilon)
return return_on_assets
###### OR QUICKER ######
# # get the latest return on assets ttm
# ratios = equity.fundamentals.ratios
# roa_ttm = methods.validate(ratios.return_on_assets_ttm)
# return roa_ttm
except Exception as e:
log.error(f"There is a problem in the code!: {e}\n{getDebugInfo()}")
| StarcoderdataPython |
3319746 | # Copyright (c) 2018, salesforce.com, inc.
# All rights reserved.
# Licensed under the BSD 3-Clause license.
# For full license text, see the LICENSE file in the repo root
# or https://opensource.org/licenses/BSD-3-Clause
import torch
from torch.autograd import Variable
import matchbox
from matchbox import functional as F
from matchbox import MaskedBatch
from matchbox.test_utils import mb_test, mb_assert
import random
def test_embedding():
xs = [Variable(torch.LongTensor(1, random.randint(1, 3)).random_(5))
for i in range(4)]
W = Variable(torch.rand(5, 2))
xb = MaskedBatch.fromlist(xs, (True,))
mb_assert(F.embedding, (xs, W), (xb, W), 4)
def test_mean():
mb_test(lambda x: x.mean(2),
(4, (True, 3), (False, 2)))
def test_std():
mb_test(lambda x: x.std(2),
(4, (True, 3), (False, 2)))
def test_matmul():
mb_test(lambda a, b: a @ b,
(4, (True, 3), (False, 2)), (4, (False, 2), (True, 3)))
def test_transpose():
mb_test(lambda x: x.transpose(1, 2),
(4, (True, 3), (False, 2)))
def test_causal_mask():
mb_test(lambda x: x.causal_mask(2, 1).softmax() @ x,
(4, (False, 3), (False, 3)))
mb_test(lambda x: (x @ x.transpose(1, 2)).causal_mask(2, 1).softmax() @ x,
(4, (True, 3), (False, 2)))
| StarcoderdataPython |
6695932 | <gh_stars>0
# Copyright 2013 Violin Memory, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Violin Memory iSCSI Driver for OpenStack Cinder
Provides iSCSI specific LUN services for V6000 series flash arrays.
This driver requires VMOS v6.3.0.4 or newer software on the array.
You will need to install the Violin Memory REST client library:
sudo pip install vmemclient
Set the following in the cinder.conf file to enable the VMEM V6000
ISCSI Driver along with the required flags:
volume_driver=cinder.volume.drivers.violin.v6000_iscsi.V6000ISCSIDriver
NOTE: this driver file requires the use of synchronization points for
certain types of backend operations, and as a result may not work
properly in an active-active HA configuration. See OpenStack Cinder
driver documentation for more information.
"""
import random
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import units
from cinder import context
from cinder.db.sqlalchemy import models
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import utils
from cinder.volume import driver
from cinder.volume.drivers.san import san
from cinder.volume.drivers.violin import v6000_common
LOG = logging.getLogger(__name__)
class V6000ISCSIDriver(driver.ISCSIDriver):
"""Executes commands relating to iSCSI-based Violin Memory Arrays.
Version history:
1.0 - Initial driver
1.0.1 - Fixes polling for export completion
"""
VERSION = '1.0.1'
TARGET_GROUP_NAME = 'openstack'
def __init__(self, *args, **kwargs):
super(V6000ISCSIDriver, self).__init__(*args, **kwargs)
self.array_info = []
self.gateway_iscsi_ip_addresses_mga = []
self.gateway_iscsi_ip_addresses_mgb = []
self.stats = {}
self.configuration.append_config_values(v6000_common.violin_opts)
self.configuration.append_config_values(san.san_opts)
self.common = v6000_common.V6000Common(self.configuration)
LOG.info(_LI("Initialized driver %(name)s version: %(vers)s."),
{'name': self.__class__.__name__, 'vers': self.VERSION})
def do_setup(self, context):
"""Any initialization the driver does while starting."""
super(V6000ISCSIDriver, self).do_setup(context)
self.common.do_setup(context)
self.gateway_iscsi_ip_addresses_mga = self._get_active_iscsi_ips(
self.common.mga)
for ip in self.gateway_iscsi_ip_addresses_mga:
self.array_info.append({"node": self._get_hostname('mga'),
"addr": ip,
"conn": self.common.mga})
self.gateway_iscsi_ip_addresses_mgb = self._get_active_iscsi_ips(
self.common.mgb)
for ip in self.gateway_iscsi_ip_addresses_mgb:
self.array_info.append({"node": self._get_hostname('mgb'),
"addr": ip,
"conn": self.common.mgb})
# setup global target group for exports to use
self._create_iscsi_target_group()
def check_for_setup_error(self):
"""Returns an error if prerequisites aren't met."""
self.common.check_for_setup_error()
bn = "/vshare/config/iscsi/enable"
resp = self.common.vip.basic.get_node_values(bn)
if resp[bn] is not True:
raise exception.ViolinInvalidBackendConfig(
reason=_('iSCSI is not enabled'))
if len(self.gateway_iscsi_ip_addresses_mga) == 0:
raise exception.ViolinInvalidBackendConfig(
reason=_('no available iSCSI IPs on mga'))
if len(self.gateway_iscsi_ip_addresses_mgb) == 0:
raise exception.ViolinInvalidBackendConfig(
reason=_('no available iSCSI IPs on mgb'))
def create_volume(self, volume):
"""Creates a volume."""
self.common._create_lun(volume)
def delete_volume(self, volume):
"""Deletes a volume."""
self.common._delete_lun(volume)
def extend_volume(self, volume, new_size):
"""Deletes a volume."""
self.common._extend_lun(volume, new_size)
def create_snapshot(self, snapshot):
"""Creates a snapshot from an existing volume."""
self.common._create_lun_snapshot(snapshot)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
self.common._delete_lun_snapshot(snapshot)
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
ctxt = context.get_admin_context()
snapshot['size'] = snapshot['volume']['size']
self.common._create_lun(volume)
self.copy_volume_data(ctxt, snapshot, volume)
def create_cloned_volume(self, volume, src_vref):
"""Creates a full clone of the specified volume."""
ctxt = context.get_admin_context()
self.common._create_lun(volume)
self.copy_volume_data(ctxt, src_vref, volume)
def ensure_export(self, context, volume):
"""Synchronously checks and re-exports volumes at cinder start time."""
pass
def create_export(self, context, volume, connector):
"""Exports the volume."""
pass
def remove_export(self, context, volume):
"""Removes an export for a logical volume."""
pass
def initialize_connection(self, volume, connector):
"""Initializes the connection (target<-->initiator)."""
igroup = None
if self.configuration.use_igroups:
#
# Most drivers don't use igroups, because there are a
# number of issues with multipathing and iscsi/fcp where
# lun devices either aren't cleaned up properly or are
# stale (from previous scans).
#
# If the customer really wants igroups for whatever
# reason, we create a new igroup for each host/hypervisor.
# Every lun that is exported to the particular
# hypervisor/host will be contained in this igroup. This
# should prevent other hosts from seeing luns they aren't
# using when they perform scans.
#
igroup = self.common._get_igroup(volume, connector)
self._add_igroup_member(connector, igroup)
tgt = self._get_iscsi_target()
target_name = self.TARGET_GROUP_NAME
if isinstance(volume, models.Volume):
lun = self._export_lun(volume, connector, igroup)
else:
lun = self._export_snapshot(volume, connector, igroup)
iqn = "%s%s:%s" % (self.configuration.iscsi_target_prefix,
tgt['node'], target_name)
self.common.vip.basic.save_config()
properties = {}
properties['target_discovered'] = False
properties['target_portal'] = '%s:%d' \
% (tgt['addr'], self.configuration.iscsi_port)
properties['target_iqn'] = iqn
properties['target_lun'] = lun
properties['volume_id'] = volume['id']
properties['auth_method'] = 'CHAP'
properties['auth_username'] = ''
properties['auth_password'] = ''
return {'driver_volume_type': 'iscsi', 'data': properties}
def terminate_connection(self, volume, connector, force=False, **kwargs):
"""Terminates the connection (target<-->initiator)."""
if isinstance(volume, models.Volume):
self._unexport_lun(volume)
else:
self._unexport_snapshot(volume)
self.common.vip.basic.save_config()
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
if refresh or not self.stats:
self._update_stats()
return self.stats
def _create_iscsi_target_group(self):
"""Creates a new target for use in exporting a lun.
Create an HA target on the backend that will be used for all
lun exports made via this driver.
The equivalent CLI commands are "iscsi target create
<target_name>" and "iscsi target bind <target_name> to
<ip_of_mg_eth_intf>".
"""
v = self.common.vip
target_name = self.TARGET_GROUP_NAME
bn = "/vshare/config/iscsi/target/%s" % target_name
resp = self.common.vip.basic.get_node_values(bn)
if resp:
LOG.debug("iscsi target group %s already exists.", target_name)
return
LOG.debug("Creating iscsi target %s.", target_name)
try:
self.common._send_cmd_and_verify(v.iscsi.create_iscsi_target,
self._wait_for_target_state,
'', [target_name], [target_name])
except Exception:
LOG.exception(_LE("Failed to create iscsi target!"))
raise
try:
self.common._send_cmd(self.common.mga.iscsi.bind_ip_to_target,
'', target_name,
self.gateway_iscsi_ip_addresses_mga)
self.common._send_cmd(self.common.mgb.iscsi.bind_ip_to_target,
'', target_name,
self.gateway_iscsi_ip_addresses_mgb)
except Exception:
LOG.exception(_LE("Failed to bind iSCSI targets!"))
raise
def _get_iscsi_target(self):
"""Get a random target IP for OpenStack to connect to.
For the non-multipath case we pick a single random target for
the OpenStack infrastructure to use. This at least allows us
to evenly distribute LUN connections across the storage
cluster.
"""
return self.array_info[random.randint(0, len(self.array_info) - 1)]
@utils.synchronized('vmem-export')
def _export_lun(self, volume, connector=None, igroup=None):
"""Generates the export configuration for the given volume.
The equivalent CLI command is "lun export container
<container_name> name <lun_name>"
Arguments:
volume -- volume object provided by the Manager
connector -- connector object provided by the Manager
igroup -- name of igroup to use for exporting
Returns:
lun_id -- the LUN ID assigned by the backend
"""
lun_id = -1
export_to = ''
v = self.common.vip
if igroup:
export_to = igroup
elif connector:
export_to = connector['initiator']
else:
raise exception.Error(_("No initiators found, cannot proceed"))
target_name = self.TARGET_GROUP_NAME
LOG.debug("Exporting lun %s.", volume['id'])
try:
self.common._send_cmd_and_verify(
v.lun.export_lun, self.common._wait_for_export_state, '',
[self.common.container, volume['id'], target_name,
export_to, 'auto'], [volume['id'], None, True])
except Exception:
LOG.exception(_LE("LUN export for %s failed!"), volume['id'])
raise
lun_id = self.common._get_lun_id(volume['id'])
return lun_id
@utils.synchronized('vmem-export')
def _unexport_lun(self, volume):
"""Removes the export configuration for the given volume.
The equivalent CLI command is "no lun export container
<container_name> name <lun_name>"
Arguments:
volume -- volume object provided by the Manager
"""
v = self.common.vip
LOG.debug("Unexporting lun %s.", volume['id'])
try:
self.common._send_cmd_and_verify(
v.lun.unexport_lun, self.common._wait_for_export_state, '',
[self.common.container, volume['id'], 'all', 'all', 'auto'],
[volume['id'], None, False])
except exception.ViolinBackendErrNotFound:
LOG.debug("Lun %s already unexported, continuing.", volume['id'])
except Exception:
LOG.exception(_LE("LUN unexport for %s failed!"), volume['id'])
raise
@utils.synchronized('vmem-export')
def _export_snapshot(self, snapshot, connector=None, igroup=None):
"""Generates the export configuration for the given snapshot.
The equivalent CLI command is "snapshot export container
PROD08 lun <snapshot_name> name <volume_name>"
Arguments:
snapshot -- snapshot object provided by the Manager
connector -- connector object provided by the Manager
igroup -- name of igroup to use for exporting
Returns:
lun_id -- the LUN ID assigned by the backend
"""
lun_id = -1
export_to = ''
v = self.common.vip
target_name = self.TARGET_GROUP_NAME
LOG.debug("Exporting snapshot %s.", snapshot['id'])
if igroup:
export_to = igroup
elif connector:
export_to = connector['initiator']
else:
raise exception.Error(_("No initiators found, cannot proceed"))
try:
self.common._send_cmd(v.snapshot.export_lun_snapshot, '',
self.common.container, snapshot['volume_id'],
snapshot['id'], export_to, target_name,
'auto')
except Exception:
LOG.exception(_LE("Snapshot export for %s failed!"),
snapshot['id'])
raise
else:
self.common._wait_for_export_state(snapshot['volume_id'],
snapshot['id'], state=True)
lun_id = self.common._get_snapshot_id(snapshot['volume_id'],
snapshot['id'])
return lun_id
@utils.synchronized('vmem-export')
def _unexport_snapshot(self, snapshot):
"""Removes the export configuration for the given snapshot.
The equivalent CLI command is "no snapshot export container
PROD08 lun <snapshot_name> name <volume_name>"
Arguments:
snapshot -- snapshot object provided by the Manager
"""
v = self.common.vip
LOG.debug("Unexporting snapshot %s.", snapshot['id'])
try:
self.common._send_cmd(v.snapshot.unexport_lun_snapshot, '',
self.common.container, snapshot['volume_id'],
snapshot['id'], 'all', 'all', 'auto', False)
except Exception:
LOG.exception(_LE("Snapshot unexport for %s failed!"),
snapshot['id'])
raise
else:
self.common._wait_for_export_state(snapshot['volume_id'],
snapshot['id'], state=False)
def _add_igroup_member(self, connector, igroup):
"""Add an initiator to an igroup so it can see exports.
The equivalent CLI command is "igroup addto name <igroup_name>
initiators <initiator_name>"
Arguments:
connector -- connector object provided by the Manager
"""
v = self.common.vip
LOG.debug("Adding initiator %s to igroup.", connector['initiator'])
resp = v.igroup.add_initiators(igroup, connector['initiator'])
if resp['code'] != 0:
raise exception.Error(
_('Failed to add igroup member: %(code)d, %(message)s') % resp)
def _update_stats(self):
"""Update array stats.
Gathers array stats from the backend and converts them to GB values.
"""
data = {}
total_gb = 0
free_gb = 0
v = self.common.vip
master_cluster_id = list(v.basic.get_node_values(
'/cluster/state/master_id').values())[0]
bn1 = "/vshare/state/global/%s/container/%s/total_bytes" \
% (master_cluster_id, self.common.container)
bn2 = "/vshare/state/global/%s/container/%s/free_bytes" \
% (master_cluster_id, self.common.container)
resp = v.basic.get_node_values([bn1, bn2])
if bn1 in resp:
total_gb = resp[bn1] // units.Gi
else:
LOG.warning(_LW("Failed to receive update for total_gb stat!"))
if 'total_capacity_gb' in self.stats:
total_gb = self.stats['total_capacity_gb']
if bn2 in resp:
free_gb = resp[bn2] // units.Gi
else:
LOG.warning(_LW("Failed to receive update for free_gb stat!"))
if 'free_capacity_gb' in self.stats:
free_gb = self.stats['free_capacity_gb']
backend_name = self.configuration.volume_backend_name
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['vendor_name'] = 'Violin Memory, Inc.'
data['driver_version'] = self.VERSION
data['storage_protocol'] = 'iSCSI'
data['reserved_percentage'] = 0
data['QoS_support'] = False
data['total_capacity_gb'] = total_gb
data['free_capacity_gb'] = free_gb
for i in data:
LOG.debug("stat update: %(name)s=%(data)s.",
{'name': i, 'data': data[i]})
self.stats = data
def _get_short_name(self, volume_name):
"""Creates a vSHARE-compatible iSCSI target name.
The Folsom-style volume names are prefix(7) + uuid(36), which
is too long for vSHARE for target names. To keep things
simple we can just truncate the name to 32 chars.
Arguments:
volume_name -- name of volume/lun
Returns:
Shortened volume name as a string.
"""
return volume_name[:32]
def _get_active_iscsi_ips(self, mg_conn):
"""Get a list of gateway IP addresses that can be used for iSCSI.
Arguments:
mg_conn -- active XG connection to one of the gateways
Returns:
active_gw_iscsi_ips -- list of IP addresses
"""
active_gw_iscsi_ips = []
interfaces_to_skip = ['lo', 'vlan10', 'eth1', 'eth2', 'eth3']
bn = "/net/interface/config/*"
intf_list = mg_conn.basic.get_node_values(bn)
for i in intf_list:
if intf_list[i] in interfaces_to_skip:
continue
bn1 = "/net/interface/state/%s/addr/ipv4/1/ip" % intf_list[i]
bn2 = "/net/interface/state/%s/flags/link_up" % intf_list[i]
resp = mg_conn.basic.get_node_values([bn1, bn2])
if len(resp.keys()) == 2 and resp[bn2] is True:
active_gw_iscsi_ips.append(resp[bn1])
return active_gw_iscsi_ips
def _get_hostname(self, mg_to_query=None):
"""Get the hostname of one of the mgs (hostname is used in IQN).
If the remote query fails then fall back to using the hostname
provided in the cinder configuration file.
Arguments:
mg_to_query -- name of gateway to query 'mga' or 'mgb'
Returns: hostname -- hostname as a string
"""
hostname = self.configuration.san_ip
conn = self.common.vip
if mg_to_query == "mga":
hostname = self.configuration.gateway_mga
conn = self.common.mga
elif mg_to_query == "mgb":
hostname = self.configuration.gateway_mgb
conn = self.common.mgb
ret_dict = conn.basic.get_node_values("/system/hostname")
if ret_dict:
hostname = list(ret_dict.items())[0][1]
else:
LOG.debug("Unable to fetch gateway hostname for %s.", mg_to_query)
return hostname
def _wait_for_target_state(self, target_name):
"""Polls backend to verify an iscsi target configuration.
This function will try to verify the creation of an iscsi
target on both gateway nodes of the array every 5 seconds.
Arguments:
target_name -- name of iscsi target to be polled
Returns:
True if the target state was correctly added
"""
bn = "/vshare/state/local/target/iscsi/%s" % (target_name)
def _loop_func():
status = [False, False]
mg_conns = [self.common.mga, self.common.mgb]
LOG.debug("Entering _wait_for_target_state loop: target=%s.",
target_name)
for node_id in range(2):
resp = mg_conns[node_id].basic.get_node_values(bn)
if len(resp.keys()):
status[node_id] = True
if status[0] and status[1]:
raise loopingcall.LoopingCallDone(retvalue=True)
timer = loopingcall.FixedIntervalLoopingCall(_loop_func)
success = timer.start(interval=5).wait()
return success
| StarcoderdataPython |
3504036 | <filename>buffer_and_clip_to_basins.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 25 10:49:37 2018
@author: charlie
Hopefully a GRASS script to clip mosaiced DEM to study basins
PLUS A 5 KM BUFFER ON EVERY SIDE so that local relief is appropriately
calculated.
"""
import sys
import os
#import grass.script as grass
from grass.pygrass.modules.shortcuts import raster as r
from grass.pygrass.modules.shortcuts import vector as v
#first buffer the subcatchments layer by 5 km
#(largest relief window extent I will need)
vector_to_be_buffered = 'subcatchments_vector'
output_name = 'subcatchments_buffer'
buffer_distance = 5000 #map units are m, so this is 5 km
v.buffer(input=vector_to_be_buffered, output=output_name,
distance=buffer_distance, minordistance=buffer_distance,
type='area')
#then use the basins to mask the mosaiced raster
mosaic_name = 'study_area_mosaic'
vector_mask_extent = 'subcatchments_buffer'
r.mask(vector=vector_mask_extent)
#then use mapcalc
r.mapcalc(expression="cropped_with_buffer=study_area_mosaic")
#then to transfer color table:
r.colors(map='cropped_with_buffer', raster=mosaic_name) # may be required to transfer the color table | StarcoderdataPython |
1790639 | <filename>lxserv/replay_fileSaveAs.py
# python
import lx, modo, replay
from replay import message as message
"""A simple example of a blessed MODO command using the commander module.
https://github.com/adamohern/commander for details"""
class CommandClass(replay.commander.CommanderClass):
"""Saves the current Macro() object to the destination stored in its
`file_path` property. If `file_path` is `None`, prompt for a destination. Unlike
`replay.fileExport`, this command only supports saving to the LXM format."""
_path = lx.eval('query platformservice alias ? {scripts:untitled}')
def commander_arguments(self):
return [
{
'name': 'path',
'datatype': 'string',
'flags': ['optional']
}
]
def commander_execute(self, msg, flags):
# Stop recording
lx.eval('replay.record stop')
macro = replay.Macro()
file_path = None
file_format = macro.file_format
# If there is no associated file path try to get from command line or prompt the user for new destination
if file_path is None:
# Try to get the path from the command line:
file_path = self.commander_arg_value(0)
file_format = "lxm"
# Prompt the user
if not file_path:
file_path = modo.dialogs.customFile(
dtype = 'fileSave',
title = message("MECCO_REPLAY", "SAVE_DIALOG_TITLE"),
names = ('LXM',),
unames = ('LXM file',),
ext=('LXM',),
path = self._path
)
if file_path is None:
return
self.__class__._path = file_path
# And save it for the next time
macro.file_path = file_path
macro.render(file_format, file_path)
lx.eval('!!replay.fileClose')
lx.eval('replay.fileOpen {%s}' % file_path)
# Add to recently-opened
lx.eval('replay.fileOpenAddRecent {%s}' % file_path)
def basic_Enable(self, msg):
if replay.Macro().is_empty:
return False
return True
lx.bless(CommandClass, 'replay.fileSaveAs')
| StarcoderdataPython |
1888998 | <filename>scripts/common/css/parse.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from urllib2 import urlopen
from codecs import EncodedFile
import css, csslex, cssyacc
from uri import uri
__all__ = ('parse','export')
def parse(data):
parser = cssyacc.yacc()
parser.lexer = csslex.lex()
return parser.parse(data, debug=True)
def export(base, stylesheet, recursive=False):
def recur(rule):
url = rule.source
if isinstance(url, css.Uri):
url = url.url
url = uri.resolve(base, url)
export(base, parse(urlopen(url).read()), recursive)
for rule in stylesheet:
if recursive and isinstance(rule, css.Import):
recur(rule)
else:
print rule.datum(unicode)
def main(fileuri, options):
inputfile = urlopen(fileuri)
stylesheet = parse(inputfile.read())
export(fileuri, stylesheet)
if '__main__' == __name__:
from optparse import OptionParser
opts = OptionParser("usage: %prog [options] filename")
options, args = opts.parse_args()
if 1 != len(args):
opts.error("no filename given")
main(args[0],options)
| StarcoderdataPython |
228596 | <reponame>ysc3839/vcmp-python-test
# pylint: disable=missing-docstring
from typing import Tuple
from _vcmp import functions as func
Vector = Tuple[float, float, float]
Quaternion = Tuple[float, float, float, float]
class Object:
def __init__(self, object_id):
self._id = object_id
# Read-write properties
@property
def world(self):
return func.get_object_world(self._id)
@world.setter
def world(self, value: int):
func.set_object_world(self._id, value)
@property
def pos(self):
return func.get_object_position(self._id)
@pos.setter
def pos(self, value: Vector):
func.set_object_position(self._id, *value)
@property
def shot_report(self):
return func.is_object_shot_report_enabled(self._id)
@shot_report.setter
def shot_report(self, value: bool):
func.set_object_shot_report_enabled(self._id, value)
@property
def touched_report(self):
return func.is_object_touched_report_enabled(self._id)
@touched_report.setter
def touched_report(self, value: bool):
func.set_object_touched_report_enabled(self._id, value)
#Read-only properties
@property
def alpha(self):
return func.get_object_alpha(self._id)
@property
def model(self):
return func.get_object_model(self._id)
@property
def rotation(self):
return func.get_object_rotation(self._id)
@property
def rotation_euler(self):
return func.get_object_rotation_euler(self._id)
@property
def id(self):
return self._id
# Functions
def delete(self) -> None:
func.delete_object(self._id)
def move_to(self, pos: Vector, time: int) -> None:
func.move_object_to(self._id, *pos, time)
def move_by(self, offset: Vector, time: int) -> None:
func.move_object_by(self._id, *offset, time)
def rotate_to(self, rotation: Quaternion, time: int) -> None:
func.rotate_object_to(self._id, *rotation, time)
def rotate_by(self, rot_offset: Quaternion, time: int) -> None:
func.rotate_object_by(self._id, *rot_offset, time)
def rotate_to_euler(self, rotation: Vector, time: int) -> None:
func.rotate_to_euler(self._id, *rotation, time)
def rotate_by_euler(self, rot_offset: Vector, time: int) -> None:
func.rotate_by_euler(self._id, *rot_offset, time)
def set_alpha(self, alpha: int, fade_time: int) -> None:
func.set_object_alpha(self._id, alpha, fade_time)
| StarcoderdataPython |
1913431 | <filename>files/migrations/0001_initial.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import markupfield.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Download',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filename', models.CharField(max_length=50)),
('size', models.IntegerField(default=0)),
('md5', models.CharField(max_length=32)),
('sha1', models.CharField(max_length=40)),
('signed', models.BooleanField(default=False)),
],
options={
'ordering': ['-release__version_num', 'filename'],
},
),
migrations.CreateModel(
name='Release',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('version', models.CharField(unique=True, max_length=50)),
('version_num', models.IntegerField(default=0, unique=True)),
('release_notes', markupfield.fields.MarkupField(rendered_field=True)),
('stable', models.BooleanField(default=False, db_index=True)),
('release_notes_markup_type', models.CharField(default=b'markdown', max_length=30, choices=[(b'', b'--'), (b'html', 'HTML'), (b'plain', 'Plain'), (b'markdown', 'Markdown'), (b'restructuredtext', 'Restructured Text')])),
('date', models.DateTimeField(default=django.utils.timezone.now, db_index=True)),
('_release_notes_rendered', models.TextField(editable=False)),
],
options={
'ordering': ['-version_num'],
},
),
migrations.CreateModel(
name='Theme',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('display_name', models.CharField(max_length=50)),
('version', models.CharField(max_length=50)),
('filename', models.CharField(unique=True, max_length=100)),
('supported_versions', models.CharField(max_length=50)),
('description', models.TextField()),
('author', models.CharField(max_length=200)),
('size', models.IntegerField(default=0)),
('md5', models.CharField(max_length=32)),
('sha1', models.CharField(max_length=40)),
('signed', models.BooleanField(default=False)),
('date', models.DateTimeField(default=django.utils.timezone.now, db_index=True)),
],
options={
'ordering': ['name', 'version'],
},
),
migrations.AddField(
model_name='download',
name='release',
field=models.ForeignKey(to='files.Release'),
),
migrations.AlterUniqueTogether(
name='download',
unique_together=set([('release', 'filename')]),
),
]
| StarcoderdataPython |
5171400 | <filename>topicnet/cooking_machine/recipes/exploratory_search_pipeline.py
from .recipe_wrapper import BaseRecipe
from .. import Dataset
modality_selection_template = (
'PerplexityScore{modality}'
' < 1.01 * MINIMUM(PerplexityScore{modality}) and SparsityPhiScore{modality} -> max'
)
general_selection_template = (
'PerplexityScore@all'
' < 1.01 * MINIMUM(PerplexityScore@all) and SparsityPhiScore{modality} -> max'
)
exploratory_search_template = '''
# This config follows a strategy described in the article
# Multi-objective Topic Modeling for Exploratory Search in Tech News
# by <NAME>, <NAME> and <NAME>, Jan 2018
# Use .format(modality=modality, dataset_path=dataset_path,
# specific_topics=specific_topics, background_topics=background_topics)
# when loading the recipe to adjust for your dataset
topics:
# Describes number of model topics, in the actuall article 200 topics were found to be optimal
specific_topics: {{specific_topics}}
background_topics: {{background_topics}}
regularizers:
- DecorrelatorPhiRegularizer:
name: decorrelation_phi_{{modality}}
topic_names: specific_topics
tau: 1
class_ids: ['{{modality}}']
- SmoothSparsePhiRegularizer:
name: smooth_phi_{{modality}}
topic_names: specific_topics
tau: 1
class_ids: ['{{modality}}']
- SmoothSparseThetaRegularizer:
name: sparse_theta
topic_names: specific_topics
tau: 1
model:
dataset_path: {{dataset_path}}
modalities_to_use: ['{{modality}}']
main_modality: '{{modality}}'
stages:
# repeat the following two cubes for every modality in the dataset
- RegularizersModifierCube:
num_iter: 8
reg_search: mul
regularizer_parameters:
name: decorrelation_phi_{{modality}}
selection:
- {0}
strategy: PerplexityStrategy
strategy_params:
start_point: 100000
step: 10
max_len: 6
tracked_score_function: PerplexityScore@all
verbose: false
use_relative_coefficients: false
- RegularizersModifierCube:
num_iter: 8
reg_search: add
regularizer_parameters:
name: smooth_phi_{{modality}}
selection:
- {0}
strategy: PerplexityStrategy
strategy_params:
start_point: 0.25
step: 0.25
max_len: 6
tracked_score_function: PerplexityScore{{modality}}
verbose: false
use_relative_coefficients: false
#last cube is independent of modalities and can be used only once
- RegularizersModifierCube:
num_iter: 8
reg_search: add
regularizer_parameters:
name: sparse_theta
selection:
- {1}
strategy: PerplexityStrategy
strategy_params:
start_point: -0.5
step: -0.5
max_len: 6
tracked_score_function: PerplexityScore@all
verbose: false
use_relative_coefficients: false
'''.format(modality_selection_template, general_selection_template)
class SearchRecipe(BaseRecipe):
"""
Class for baseline recipe creation and
unification of recipe interface
"""
def __init__(self):
super().__init__(recipe_template=exploratory_search_template)
def format_recipe(
self,
dataset_path: str,
modality: str = None,
topic_number: int = 20,
background_topic_number: int = 1,
):
if modality is None:
modality = list(Dataset(dataset_path).get_possible_modalities())[0]
specific_topics = [f'topic_{i}' for i in range(topic_number)]
background_topics = [f'bcg_{i}' for i in range(
len(specific_topics), len(specific_topics) + background_topic_number)]
self._recipe = self.recipe_template.format(
dataset_path=dataset_path,
modality=modality,
specific_topics=specific_topics,
background_topics=background_topics,
)
return self._recipe
| StarcoderdataPython |
9682492 | import sys
# import libraries
import sqlite3
import pandas as pd
from sqlalchemy import create_engine
import nltk
#nltk.download(['punkt', 'wordnet', 'averaged_perceptron_tagger'])
import pickle
import warnings
import re
import numpy as np
import pandas as pd
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from sklearn.metrics import accuracy_score, precision_score
import os
from sklearn.multioutput import MultiOutputClassifier
from sklearn.metrics import classification_report, make_scorer, f1_score
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.model_selection import train_test_split
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer, TfidfVectorizer
nltk.download(['punkt', 'wordnet', 'averaged_perceptron_tagger', 'stopwords'])
import nltk
from nltk.corpus import stopwords
#load_data()
def load_data(database_filepath):
'''
This function loads in the data from database
Parameters
----------
:database_filepath
Returns
-------
X - feature variable,
Y - target variable
'''
#engine = create_engine('sqlite:///%s' % database_filepath)
engine = create_engine('sqlite:///' + database_filepath)
df = pd.read_sql_table("Disasters_table", engine)
X = df['message'] #feature variable
Y = df.iloc[:,4:] #target variable
category_names = Y.columns
return X, Y, category_names
def tokenize(text):
'''
this function clean text, removes stop words, tokenize and lemmatize stop words.
'''
stop_words = stopwords.words("english")
lemmatizer = WordNetLemmatizer()
# normalize case and remove punctuation
text = re.sub(r"[^a-zA-Z0-9]", " ", text.lower())
# tokenize text
tokens = word_tokenize(text)
# lemmatize andremove stop words
tokens = [lemmatizer.lemmatize(word) for word in tokens if word not in stop_words]
return tokens
def build_model():
'''
This function builds a pipeline using count vectorizer, Tfidf and Random forest classifier with grid search CV
and returns model.
'''
#Random Forest Classifier pipeline
pipeline_rfc = Pipeline([
('vect', CountVectorizer(tokenizer = tokenize)),
('tfidf', TfidfTransformer()),
('clf', MultiOutputClassifier(RandomForestClassifier()))
])
parameters_rfc = {
'tfidf__use_idf': (True, False),
'clf__estimator__n_estimators': [10, 20]
}
cv_rfc = GridSearchCV(pipeline_rfc, param_grid = parameters_rfc)
return cv_rfc
def plot_scores(Y_test, Y_pred):
"""This function prints model accuracy of the classification report"""
i = 0
for col in Y_test:
print('Feature {}: {}'.format(i+1, col))
print(classification_report(Y_test[col], Y_pred[:, i]))
i = i + 1
accuracy = (Y_pred == Y_test.values).mean()
print('The model accuracy is {:.3f}'.format(accuracy))
def evaluate_model(model, X_test, Y_test, category_names):
"""This function evaluates the model using test data."""
# Get results and add them to a dataframe.
# Predicting using the first tuned model
Y_pred = model.predict(X_test)
plot_scores(Y_test, Y_pred)
def save_model(model, model_filepath):
"""This function saves the classification model in a pickle file called classifier.pkl"""
# Create a pickle file for the model
file_name = 'classifier.pkl'
with open (file_name, 'wb') as f:
pickle.dump(model, f)
def main():
if len(sys.argv) == 3:
database_filepath, model_filepath = sys.argv[1:]
print('Loading data...\n DATABASE: {}'.format(database_filepath))
X, Y, category_names = load_data(database_filepath)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
print('Building model...')
model = build_model()
print('Training model...')
model.fit(X_train, Y_train)
print('Evaluating model...')
evaluate_model(model, X_test, Y_test, category_names)
print('Saving model...\n MODEL: {}'.format(model_filepath))
save_model(model, model_filepath)
print('Trained model saved!')
else:
print('Please provide the filepath of the disaster messages database '\
'as the first argument and the filepath of the pickle file to '\
'save the model to as the second argument. \n\nExample: python '\
'train_classifier.py ../data/DisasterResponse.db classifier.pkl')
if __name__ == '__main__':
main() | StarcoderdataPython |
11342096 | <filename>pynumdiff/optimize/__init__.py
from pynumdiff.optimize.__optimize__ import docstring as docstring
from pynumdiff.optimize import finite_difference as finite_difference
from pynumdiff.optimize import smooth_finite_difference as smooth_finite_difference
from pynumdiff.optimize import total_variation_regularization as total_variation_regularization
from pynumdiff.optimize import linear_model as linear_model
from pynumdiff.optimize import kalman_smooth as kalman_smooth | StarcoderdataPython |
1714901 | from enum import Enum
class Dot1xControlledDirectionEnum(str, Enum):
DCD_BOTH = "DCD_BOTH"
DCD_IN = "DCD_IN" | StarcoderdataPython |
3448602 | <reponame>HLasse/wav2vec_finetune
import numpy as np
import math
sig = np.arange(0, 20)
sampling_rate = 1
frame_length = 5
frame_stride = 2
zero_padding = True
def stack_frames(
sig,
sampling_rate,
frame_length,
frame_stride,
filter=lambda x: np.ones(
(x,
)),
zero_padding=True,
keep_short_signals=True,
remove_zero_padding=False):
"""Frame a signal into overlapping frames.
Args:
sig (array): The audio signal to frame of size (N,).
sampling_rate (int): The sampling frequency of the signal.
frame_length (float): The length of the frame in second.
frame_stride (float): The stride between frames.
filter (array): The time-domain filter for applying to each frame.
By default it is one so nothing will be changed.
zero_padding (bool): If the samples is not a multiple of
frame_length(number of frames sample), zero padding will
be done for generating last frame.
keep_short_signal: Return the original signal if shorter than frame_length.
remove_zero_padding: Remove trailing zeros from last element following zero padding
Returns:
array: Stacked_frames-Array of frames of size (number_of_frames x frame_len).
"""
# Check dimension
s = "Signal dimention should be of the format of (N,) but it is %s instead"
assert sig.ndim == 1, s % str(sig.shape)
signal_length = len(sig) / sampling_rate
if signal_length < frame_length:
if keep_short_signals:
return np.expand_dims(sig, axis=0)
else:
raise ValueError(f"Signal is shorter than frame length {signal_length} vs {frame_length}. Set `keep_short_signal` to True to return the original signal in such cases.")
# Initial necessary values
length_signal = sig.shape[0]
frame_sample_length = int(
np.round(
sampling_rate *
frame_length)) # Defined by the number of samples
frame_stride = float(np.round(sampling_rate * frame_stride))
# Zero padding is done for allocating space for the last frame.
if zero_padding:
# Calculation of number of frames
numframes = (int(math.ceil((length_signal
- frame_sample_length) / frame_stride)))
# below zero pads the last, above discards the last signal
#numframes = (int(math.ceil((length_signal
# - (frame_sample_length - frame_stride)) / frame_stride)))
# Zero padding
len_sig = int(numframes * frame_stride + frame_sample_length)
additive_zeros = np.zeros((len_sig - length_signal,))
signal = np.concatenate((sig, additive_zeros))
else:
# No zero padding! The last frame which does not have enough
# samples(remaining samples <= frame_sample_length), will be dropped!
numframes = int(math.floor((length_signal
- frame_sample_length) / frame_stride))
# new length
len_sig = int((numframes - 1) * frame_stride + frame_sample_length)
signal = sig[0:len_sig]
# Getting the indices of all frames.
indices = np.tile(np.arange(0,
frame_sample_length),
(numframes,
1)) + np.tile(np.arange(0,
numframes * frame_stride,
frame_stride),
(frame_sample_length,
1)).T
indices = np.array(indices, dtype=np.int32)
# Extracting the frames based on the allocated indices.
frames = signal[indices]
# Apply the windows function
window = np.tile(filter(frame_sample_length), (numframes, 1))
Extracted_Frames = frames * window
# doesn't work - can't change the shape of a signle array
if remove_zero_padding:
Extracted_Frames[-1] = np.trim_zeros(Extracted_Frames[-1], trim="b")
return Extracted_Frames
l = stack_frames(sig, sampling_rate, frame_length, frame_stride, remove_zero_padding=False)
| StarcoderdataPython |
205119 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 25 21:11:45 2017
@author: hubert
"""
import numpy as np
import matplotlib.pyplot as plt
class LiveBarGraph(object):
"""
"""
def __init__(self, band_names=['delta', 'theta', 'alpha', 'beta'],
ch_names=['TP9', 'AF7', 'AF8', 'TP10']):
"""
"""
self.band_names = band_names
self.ch_names = ch_names
self.n_bars = self.band_names * self.ch_names
self.x =
self.fig, self.ax = plt.subplots()
self.ax.set_ylim((0, 1))
y = np.zeros((self.n_bars,))
x = range(self.n_bars)
self.rects = self.ax.bar(x, y)
def update(self, new_y):
[rect.set_height(y) for rect, y in zip(self.rects, new_y)]
if __name__ == '__main__':
bar = LiveBarGraph()
plt.show()
while True:
bar.update(np.random.random(10))
plt.pause(0.1)
| StarcoderdataPython |
3430251 | <gh_stars>0
# -*- coding: utf-8 -*-
"""
Helper utilities for the blog application.
:author: <NAME>
:date: 2/18/2019
"""
#
# Functions
#
def font_color_helper(background_color, light_color=None, dark_color=None):
"""Helper function to determine which font color to use"""
light_color = light_color if light_color is not None else "#FFFFFF"
dark_color = dark_color if dark_color is not None else "#000000"
tmp_color = background_color.strip().strip('#')
tmp_r = int(tmp_color[:2], 16)
tmp_g = int(tmp_color[2:4], 16)
tmp_b = int(tmp_color[4:], 16)
font_color_code = dark_color if ((tmp_r * 0.299) + (tmp_g * 0.587) +
(tmp_b * 0.114)) > 186 else light_color
return font_color_code
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.