id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1600210 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'plantillas_interfaces/interfaz_registro_datos.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_DialogoRegistroDatos(object):
def setupUi(self, DialogoRegistroDatos):
DialogoRegistroDatos.setObjectName(_fromUtf8("DialogoRegistroDatos"))
DialogoRegistroDatos.resize(677, 405)
DialogoRegistroDatos.setToolTip(_fromUtf8(""))
self.grupo_datos_personales = QtGui.QGroupBox(DialogoRegistroDatos)
self.grupo_datos_personales.setGeometry(QtCore.QRect(9, 9, 666, 201))
self.grupo_datos_personales.setObjectName(_fromUtf8("grupo_datos_personales"))
self.input_primer_nombre = QtGui.QLineEdit(self.grupo_datos_personales)
self.input_primer_nombre.setGeometry(QtCore.QRect(10, 50, 141, 29))
self.input_primer_nombre.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_primer_nombre.setObjectName(_fromUtf8("input_primer_nombre"))
self.label_primer_nombre = QtGui.QLabel(self.grupo_datos_personales)
self.label_primer_nombre.setGeometry(QtCore.QRect(14, 30, 111, 17))
self.label_primer_nombre.setObjectName(_fromUtf8("label_primer_nombre"))
self.label_segundo_nombre = QtGui.QLabel(self.grupo_datos_personales)
self.label_segundo_nombre.setGeometry(QtCore.QRect(170, 30, 111, 17))
self.label_segundo_nombre.setObjectName(_fromUtf8("label_segundo_nombre"))
self.input_segundo_nombre = QtGui.QLineEdit(self.grupo_datos_personales)
self.input_segundo_nombre.setGeometry(QtCore.QRect(170, 50, 141, 29))
self.input_segundo_nombre.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_segundo_nombre.setObjectName(_fromUtf8("input_segundo_nombre"))
self.input_primer_apellido = QtGui.QLineEdit(self.grupo_datos_personales)
self.input_primer_apellido.setGeometry(QtCore.QRect(330, 50, 141, 29))
self.input_primer_apellido.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_primer_apellido.setObjectName(_fromUtf8("input_primer_apellido"))
self.input_segundo_apellido = QtGui.QLineEdit(self.grupo_datos_personales)
self.input_segundo_apellido.setGeometry(QtCore.QRect(490, 50, 141, 29))
self.input_segundo_apellido.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_segundo_apellido.setObjectName(_fromUtf8("input_segundo_apellido"))
self.label_primer_apellido = QtGui.QLabel(self.grupo_datos_personales)
self.label_primer_apellido.setGeometry(QtCore.QRect(330, 30, 111, 17))
self.label_primer_apellido.setObjectName(_fromUtf8("label_primer_apellido"))
self.label_segundo_apellido = QtGui.QLabel(self.grupo_datos_personales)
self.label_segundo_apellido.setGeometry(QtCore.QRect(490, 30, 111, 17))
self.label_segundo_apellido.setObjectName(_fromUtf8("label_segundo_apellido"))
self.input_cedula = QtGui.QLineEdit(self.grupo_datos_personales)
self.input_cedula.setGeometry(QtCore.QRect(10, 120, 141, 29))
self.input_cedula.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_cedula.setObjectName(_fromUtf8("input_cedula"))
self.label_cedula = QtGui.QLabel(self.grupo_datos_personales)
self.label_cedula.setGeometry(QtCore.QRect(14, 100, 111, 17))
self.label_cedula.setObjectName(_fromUtf8("label_cedula"))
self.select_sexo = QtGui.QComboBox(self.grupo_datos_personales)
self.select_sexo.setGeometry(QtCore.QRect(170, 119, 141, 31))
self.select_sexo.setObjectName(_fromUtf8("select_sexo"))
self.label_sexo = QtGui.QLabel(self.grupo_datos_personales)
self.label_sexo.setGeometry(QtCore.QRect(170, 100, 111, 17))
self.label_sexo.setObjectName(_fromUtf8("label_sexo"))
self.line = QtGui.QFrame(self.grupo_datos_personales)
self.line.setGeometry(QtCore.QRect(7, 170, 641, 20))
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.gurpo_datos_extras = QtGui.QGroupBox(DialogoRegistroDatos)
self.gurpo_datos_extras.setGeometry(QtCore.QRect(10, 210, 631, 121))
self.gurpo_datos_extras.setObjectName(_fromUtf8("gurpo_datos_extras"))
self.input_email = QtGui.QLineEdit(self.gurpo_datos_extras)
self.input_email.setGeometry(QtCore.QRect(10, 50, 181, 31))
self.input_email.setStyleSheet(_fromUtf8("background-color: white;"))
self.input_email.setObjectName(_fromUtf8("input_email"))
self.label_email = QtGui.QLabel(self.gurpo_datos_extras)
self.label_email.setGeometry(QtCore.QRect(10, 30, 121, 17))
self.label_email.setObjectName(_fromUtf8("label_email"))
self.label_cargo = QtGui.QLabel(self.gurpo_datos_extras)
self.label_cargo.setGeometry(QtCore.QRect(410, 29, 59, 17))
self.label_cargo.setObjectName(_fromUtf8("label_cargo"))
self.select_cargos = QtGui.QComboBox(self.gurpo_datos_extras)
self.select_cargos.setGeometry(QtCore.QRect(410, 50, 181, 31))
self.select_cargos.setObjectName(_fromUtf8("select_cargos"))
self.select_nivel_instruccion = QtGui.QComboBox(self.gurpo_datos_extras)
self.select_nivel_instruccion.setGeometry(QtCore.QRect(210, 50, 181, 31))
self.select_nivel_instruccion.setObjectName(_fromUtf8("select_nivel_instruccion"))
self.label_nivel_instruccion = QtGui.QLabel(self.gurpo_datos_extras)
self.label_nivel_instruccion.setGeometry(QtCore.QRect(210, 30, 131, 17))
self.label_nivel_instruccion.setObjectName(_fromUtf8("label_nivel_instruccion"))
self.btn_registrar = QtGui.QPushButton(DialogoRegistroDatos)
self.btn_registrar.setGeometry(QtCore.QRect(530, 350, 111, 31))
self.btn_registrar.setObjectName(_fromUtf8("btn_registrar"))
self.btn_cerrar = QtGui.QPushButton(DialogoRegistroDatos)
self.btn_cerrar.setGeometry(QtCore.QRect(410, 350, 111, 31))
self.btn_cerrar.setObjectName(_fromUtf8("btn_cerrar"))
self.retranslateUi(DialogoRegistroDatos)
QtCore.QMetaObject.connectSlotsByName(DialogoRegistroDatos)
def retranslateUi(self, DialogoRegistroDatos):
DialogoRegistroDatos.setWindowTitle(_translate("DialogoRegistroDatos", "Formulario de Registro", None))
self.grupo_datos_personales.setTitle(_translate("DialogoRegistroDatos", "Datos Personales", None))
self.label_primer_nombre.setText(_translate("DialogoRegistroDatos", "Primer Nombre", None))
self.label_segundo_nombre.setText(_translate("DialogoRegistroDatos", "Segundo Nombre", None))
self.label_primer_apellido.setText(_translate("DialogoRegistroDatos", "Primer Apellido", None))
self.label_segundo_apellido.setText(_translate("DialogoRegistroDatos", "Segundo Apellido", None))
self.label_cedula.setText(_translate("DialogoRegistroDatos", "Cedula", None))
self.label_sexo.setText(_translate("DialogoRegistroDatos", "Sexo", None))
self.gurpo_datos_extras.setTitle(_translate("DialogoRegistroDatos", "Datos Extra", None))
self.input_email.setWhatsThis(
_translate("DialogoRegistroDatos", "<html><head/><body><p><br/></p></body></html>", None))
self.label_email.setText(_translate("DialogoRegistroDatos", "Correo Electronico", None))
self.label_cargo.setText(_translate("DialogoRegistroDatos", "Cargo", None))
self.label_nivel_instruccion.setText(_translate("DialogoRegistroDatos", "Nivel de Instruccion", None))
self.btn_registrar.setText(_translate("DialogoRegistroDatos", "&Registrar", None))
self.btn_cerrar.setText(_translate("DialogoRegistroDatos", "&Cerrar", None))
| StarcoderdataPython |
3343181 | from struct import pack, unpack
from abc import ABC, abstractmethod
class RadarData(ABC):
"""
Abstract radar data type stored in pbs stream
"""
@classmethod
@abstractmethod
def from_proto(cls, data_pb):
pass
class ProtoStreamReader(object):
"""
This class streams data stored in protobuf binary stream and
returns python object type
"""
def __init__(self, data_stream_path, proto_type, python_type,
header_size=8):
"""
data_stream_path -- radar data protobuf stream file path
proto_type -- protobuf type
python_type -- Python class to be converted to
header_size -- size of the header in the binary stream
"""
self.data_stream_path = data_stream_path
self.proto_type = proto_type
self.python_type = python_type
self.header_size = header_size
self.data_stream = None
def __enter__(self):
self.data_stream = open(self.data_stream_path, 'rb')
return self
def __iter__(self):
"""
initialize the iterator
move file pointer pass the header
"""
if self.header_size > 0:
_ = self.data_stream.read(self.header_size)
return self
def __next__(self):
data_pb = read_protobuf_message(self.data_stream, self.proto_type)
if data_pb is None:
raise StopIteration
python_obj = self.python_type.from_proto(data_pb)
return python_obj
def __exit__(self, exc_type, exc_value, traceback):
self.data_stream.close()
class ProtoStreamWriter(object):
"""
This class writes protobuf objects to file stream
"""
def __init__(self, output_file_path, header_size=8):
self.output_file_path = output_file_path
self.header_size = header_size
self.write_stream = None
def __enter__(self):
self.write_stream = open(self.output_file_path, 'wb')
# add dummy header
header = b"\x00" * self.header_size
self.write_stream.write(header)
return self
def write(self, record):
serial = record.SerializeToString()
length = len(serial)
self.write_stream.write(pack('<I', length))
self.write_stream.write(serial)
def __exit__(self, exc_type, exc_value, traceback):
self.write_stream.close()
class MultiRadarStreamReader(object):
"""
Takes in multiple radar streams and return aligned radar data
TODO:: this function now only handles two radars
"""
def __init__(self, radar_data_readers):
assert isinstance(radar_data_readers, list), \
"radar_data_readers is not a list"
assert len(radar_data_readers) == 2, \
"MultiRadarStreamReader currently only supports two radars"
self.radar_data_readers = []
for reader in radar_data_readers:
self.radar_data_readers.append(iter(reader))
def __iter__(self):
return self
def __next__(self):
"""
return the next frame-id aligned radar data
"""
radar_data_0 = next(self.radar_data_readers[0])
radar_data_1 = next(self.radar_data_readers[1])
frame_id_0 = radar_data_0.frame_id
frame_id_1 = radar_data_1.frame_id
# we continue to iterate between all the radar streams
# until we find a match
while frame_id_0 != frame_id_1:
if frame_id_0 < frame_id_1:
radar_data_0 = next(self.radar_data_reader_0)
frame_id_0 = radar_data_0.frame_id
else:
radar_data_1 = next(self.radar_data_reader_1)
frame_id_1 = radar_data_1.frame_id
return (radar_data_0, radar_data_1)
def read_protobuf_message(fp, message_type):
"""
read a protobuf message with uint32_t framing
fp -- binary stream file pointer
message type -- protobuf message class
"""
# read the message size (first 4 bytes)
data = fp.read(4)
if len(data) != 4:
return None
# convert byte array into integer
msg_length = unpack('<I', data)[0]
# read the entire binary data into memory
data = fp.read(msg_length)
if len(data) != msg_length:
print('Truncated data')
return None
# convert to protobuf object
msg = message_type()
msg.ParseFromString(data)
return msg
| StarcoderdataPython |
5013513 | from collections.abc import Callable
from bolinette.blnt.commands import Argument
class Command:
def __init__(self, name: str, func: Callable, path: str = None,
summary: str = None, args: list[Argument] = None, run_init: bool = False):
self.name = name
self.func = func
self.path = path
self.summary = summary
self.args = args or []
self.run_init = run_init
| StarcoderdataPython |
3561814 | <gh_stars>0
import requests
if len(prompt) == 1:
print('rget: usage: rget <url|--help> [arg]')
elif len(prompt) == 2:
if prompt[1] == "--help":
print("""
rget - Make simple HTTP requests
----------
args:
url - The url to get
--help - Displays this message
--include-headers - Includes HTTP headers
--user-agent - Uses a custom User-Agent
""")
else:
spinner.start("Getting url " + prompt[1])
r = requests.get(prompt[1])
spinner.stop()
print("Response code: {}\n".format(r.status_code))
print(r.text)
elif len(prompt) > 2:
headers = {}
if "--user-agent" in prompt:
headers['User-Agent'] = input("Enter user-agent: ")
spinner.start("Getting url " + prompt[1])
r = requests.get(prompt[1], headers=headers)
spinner.stop()
print("Response code: {}\n".format(r.status_code))
if "--include-headers" in prompt:
print("Headers:")
for h in r.headers.keys():
print(" {0}: {1}".format(h, r.headers[h]))
print("")
print(r.text)
| StarcoderdataPython |
188122 | #!/usr/bin/env python3
"""Setup script for Redwall"""
import codecs
import os
import re
from setuptools import find_packages, setup
def get_long_description():
"""Reads the main README.rst to get the program's long description"""
with codecs.open("README.rst", "r", "utf-8") as f_readme:
return f_readme.read()
def get_program_metadata(attribute):
"""Reads program metadata from the main package's __init__"""
with open(os.path.join("redwall", "__init__.py"), "r") as f_init:
return re.search(
r'^__{attr}__\s*=\s*[\'"]([^\'"]*)[\'"]'.format(attr=attribute),
f_init.read(),
re.MULTILINE,
).group(1)
setup(
name=get_program_metadata("title"),
version=get_program_metadata("version"),
description="Redwall",
long_description=get_long_description(),
author=get_program_metadata("author"),
author_email="<EMAIL>",
license="MIT",
url="https://github.com/virtualtam/redwall",
keywords="image reddit wallpaper",
packages=find_packages(exclude=["tests.*", "tests"]),
entry_points={"console_scripts": ["redwall = redwall.cli:redwall"]},
install_requires=[
"click>=8.0,<8.1",
"Pillow>=8.3,<8.4",
"praw>=7.3.0,<7.4",
"requests>=2.26,<2.27",
"screeninfo==0.6.7",
"SQLAlchemy>=1.4,<1.5",
],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
)
| StarcoderdataPython |
4841340 | <reponame>adeogliari/GeekUniversity_Python
"""
3) Leia um número real. Se o número for positivo imprima a raiz quadrada. Do contrário, imprima o número ao quadrado.
"""
import math
n1 = float(input('Digite um número: \n'))
if n1 > 0:
print(f'A raiz quadrada de {n1} é {math.sqrt(n1)}')
elif n1 == 0:
print('Você digitou o número 0')
else:
print(f'{n1} ao quadrado é {n1**2}')
| StarcoderdataPython |
6477418 | import pytest
from SupportLibraries.driver_factory import DriverFactory
@pytest.fixture(scope="session")
def get_driver(request, browser, platform, environment):
df = DriverFactory(browser, platform, environment)
driver = df.get_driver_instance()
session = request.node
for item in session.items:
cls = item.getparent(pytest.Class)
setattr(cls.obj, "driver", driver)
yield
driver.quit()
def pytest_addoption(parser):
parser.addoption("--browser", help="Browser Type")
parser.addoption("--platform", help="Operating System Type")
parser.addoption("--environment", help="Application Environment")
@pytest.fixture(scope="session")
def browser(request):
return request.config.getoption("--browser")
@pytest.fixture(scope="session")
def platform(request):
return request.config.getoption("--platform")
@pytest.fixture(scope="session")
def environment(request):
return request.config.getoption("--environment")
| StarcoderdataPython |
3597186 | from django.db import models
from opendata.catalog.models import UrlType, UpdateFrequency
from opendata.requests.models import Category, City, County
from opendata.fields_info import FIELDS, HELP
class Suggestion(models.Model):
AGENCY_TYPES = (
('state', 'Statewide'),
('county', 'County Agency'),
('city', 'City/town Agency'),
)
FREQUENCY_TYPES = (
('daily', 'At least once a day'),
('weekly', 'At least once a week'),
('monthly', 'At least once a month'),
('yearly', 'At least once a year'),
('never', "It is not updated after it's created."),
)
date = models.DateTimeField(auto_now_add=True)
date_last_modified = models.DateTimeField(auto_now=True)
title = models.CharField(max_length=255, help_text=HELP['title'])
short_description = models.CharField(u'Brief description', max_length=255,
help_text=HELP['short_description'])
description = models.TextField(u'Long description', help_text=HELP['description'])
url = models.CharField(verbose_name=FIELDS['url'], max_length=255,
help_text=HELP['url'])
data_format = models.ForeignKey(UrlType, verbose_name=FIELDS['data_format'],
blank=True, null=True)
other_format = models.CharField(max_length=255, blank=True)
agency_name = models.CharField(verbose_name=FIELDS['agency_name'],
help_text=HELP['agency_name'], max_length=255)
agency_division = models.CharField(max_length=255,
verbose_name=FIELDS['agency_division'],
help_text=HELP['agency_name'])
agency_type = models.CharField(choices=AGENCY_TYPES, max_length=16,
verbose_name=FIELDS['agency_type'],
help_text=HELP['agency_type'])
city = models.ForeignKey(City, verbose_name=FIELDS['city'], blank=True,
related_name='suggestions', null=True,)
county = models.ForeignKey(County, related_name='suggestions', null=True,
blank=True, verbose_name=FIELDS['county'])
last_updated = models.DateField(verbose_name=FIELDS['last_updated'],
blank=True, null=True)
updates = models.ForeignKey(UpdateFrequency, null=True, blank=True,
verbose_name=FIELDS['update_frequency'],
help_text=HELP['update_frequency'],
)
categories = models.ManyToManyField(Category, related_name="suggestions",
null=True, blank=True,
verbose_name=FIELDS['categories'])
other_category = models.CharField(u'Other category', max_length=255, blank=True,
help_text=HELP['other'])
keywords = models.CommaSeparatedIntegerField(max_length=255, blank=True,
verbose_name=FIELDS['keywords'],
help_text=HELP['keywords'])
contact_name = models.CharField(max_length=255)
phone_number = models.CharField(max_length=30)
street_address = models.CharField(max_length=100)
contact_city = models.CharField(u'City', max_length=50)
zipcode = models.CharField(u'Zip Code', max_length=30)
email_address = models.CharField(max_length=100)
def __unicode__(self):
return self.title | StarcoderdataPython |
52589 | <gh_stars>0
# 04. Forum Topics
line = input()
forum_dict = {}
# def unique(sequence):
# seen = set()
# return [x for x in sequence if not(x in seen or seen.add(x))]
while not line == "filter":
words = line.split(" -> ")
topic = words[0]
hashtags = words[1].split(", ")
if topic not in forum_dict.keys():
forum_dict[topic] = hashtags
else:
forum_dict[topic].extend(hashtags)
line = input()
sequence = set(input().split(', '))
for topic, hashtags in forum_dict.items():
ht_set = sorted(set(hashtags), key= hashtags.index)
if sequence.issubset(ht_set):
print(f"{topic} | {', '.join(map(lambda x: '#' + x, ht_set))}")
| StarcoderdataPython |
66690 | import json
import os
from contextlib import ExitStack
from collections import defaultdict
os.makedirs("workdata/icc", exist_ok=True)
files = defaultdict(dict)
with open("workdata/clausified.json", "r") as f:
with ExitStack() as stack:
for ds in ["eca", "emotion-stimulus", "reman", "gne", "electoral_tweets"]:
for split in ["train", "dev", "test"]:
files[ds][split] = stack.enter_context(
open(f"workdata/icc/{ds}.{split}", "w")
)
for line in f:
data = json.loads(line)
ds = data["dataset"]
split = data["split"]
for clause in data["clauses"]:
text = " ".join(token for token, tag in clause)
label = int(bool(set("BI") & {tag for token, tag in clause}))
files[ds][split].write(f"{text}\t{label}\n")
| StarcoderdataPython |
3450689 | <reponame>Kukuster/SSrehub
# standard library
from subprocess import run, PIPE
from typing import Dict, List, TypedDict
import os
import shutil
RUN_CMD_ONFAIL_EXITCODE = 22
class CMD_RETURN:
ec: int
stdout: str
stderr: str
def run_cmd(cmd: List[str]):
"""A wrapper around subprocess.run that nicely fails on a non-zero exit code"""
if len(cmd) == 0:
raise ValueError('cmd has to be a non-empty list')
res = run(cmd, stdout=PIPE, stderr=PIPE)
if res.returncode != 0:
error_message = f"command \"{cmd[0]}\" finished with exit code: {res.returncode}"
stderr = res.stderr.decode('utf-8')
if stderr:
error_message += "\nand produced the following error message:\n"
error_message += stderr
raise ChildProcessError(error_message)
return res.stdout.decode('utf-8').rstrip()
def run_cmd_rich(cmd: List[str]) -> CMD_RETURN:
"""A wrapper around subprocess.run, returns stdout, stderr, and exit code"""
if len(cmd) == 0:
raise ValueError('cmd has to be a non-empty list')
res = run(cmd, stdout=PIPE, stderr=PIPE)
r = CMD_RETURN()
r.ec = res.returncode
r.stdout = res.stdout.decode('utf-8').rstrip()
r.stderr = res.stderr.decode('utf-8').rstrip()
return r
def run_bash(bash_code: str):
"""Safely runs a given bash code, and nicely fails on a non-zero exit code"""
return run_cmd(['bash', '-c', bash_code])
def run_bash_rich(bash_code: str):
"""Runs a given bash code, returns stdout, stderr, and exit code"""
return run_cmd_rich(['bash', '-c', bash_code])
def rm(file: str):
try:
return os.remove(file)
except FileNotFoundError:
return None
def rm_r(dir: str):
try:
return shutil.rmtree(dir)
except FileNotFoundError:
return None
def rm_rf(file_or_dir: str):
if os.path.isdir(file_or_dir):
return rm_r(file_or_dir)
elif os.path.isfile(file_or_dir):
return rm(file_or_dir)
else:
return None
def mv(file_or_dir: str, new_name: str):
shutil.move(file_or_dir, new_name)
| StarcoderdataPython |
1618544 | from abaqusConstants import *
from .Load import Load
from ..Region.Region import Region
class InertiaRelief(Load):
"""The InertiaRelief object defines an inertia relief load.
The InertiaRelief object is derived from the Load object.
Attributes
----------
name: str
A String specifying the load repository key.
localCoordinates: int
None or a :py:class:`~abaqus.Datum.DatumCsys.DatumCsys` object specifying the local coordinate system of the rigid body
degrees of freedom for the inertia relief load. If **localCoordinates=None**, the free
directions are defined in the global coordinate system. When this member is queried, it
returns an Int. The default value is None.
region: Region
A :py:class:`~abaqus.Region.Region.Region` object specifying the region to which the load is applied.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].loads[name]
"""
# A String specifying the load repository key.
name: str = ''
# None or a DatumCsys object specifying the local coordinate system of the rigid body
# degrees of freedom for the inertia relief load. If *localCoordinates*=None, the free
# directions are defined in the global coordinate system. When this member is queried, it
# returns an Int. The default value is None.
localCoordinates: int = None
# A Region object specifying the region to which the load is applied.
region: Region = Region()
def __init__(self, name: str, createStepName: str, u1: Boolean = OFF, u2: Boolean = OFF, u3: Boolean = OFF,
ur1: Boolean = OFF, ur2: Boolean = OFF, ur3: Boolean = OFF, referencePoint: tuple = (),
localCoordinates: int = None):
"""This method creates an InertiaRelief object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].InertiaRelief
Parameters
----------
name
A String specifying the load repository key.
createStepName
A String specifying the name of the step in which the load is created.
u1
A Boolean specifying the 1-direction as a free direction.Note:Although *u1*, *u2*, *u3*,
*ur1*, *ur2*, and *ur3* are optional arguments, at least one of them must be specified.
Further, any specified set of free directions cannot include only two rotational degrees
of freedom.
u2
A Boolean specifying the 2-direction as a free direction.
u3
A Boolean specifying the 3-direction as a free direction.
ur1
A Boolean specifying the rotation about the 1–direction as a free direction.
ur2
A Boolean specifying the rotation about the 2–direction as a free direction.
ur3
A Boolean specifying the rotation about the 3–direction as a free direction.
referencePoint
A sequence of Floats specifying the *X*, *Y* and *Z*-coordinates of a fixed rotation
point or a point on the rotation axis or a point on the symmetry line, about which
rotations are defined. Such a point must be specified only for certain combinations of
free directions.
localCoordinates
None or a DatumCsys object specifying the local coordinate system of the rigid body
degrees of freedom for the inertia relief load. If *localCoordinates*=None, the free
directions are defined in the global coordinate system. When this member is queried, it
returns an Int. The default value is None.
Returns
-------
An InertiaRelief object.
"""
super().__init__()
pass
def setValues(self, u1: Boolean = OFF, u2: Boolean = OFF, u3: Boolean = OFF, ur1: Boolean = OFF,
ur2: Boolean = OFF, ur3: Boolean = OFF, referencePoint: tuple = (),
localCoordinates: int = None):
"""This method modifies the data for an existing InertiaRelief object in the step where it
is created.
Parameters
----------
u1
A Boolean specifying the 1-direction as a free direction.Note:Although *u1*, *u2*, *u3*,
*ur1*, *ur2*, and *ur3* are optional arguments, at least one of them must be specified.
Further, any specified set of free directions cannot include only two rotational degrees
of freedom.
u2
A Boolean specifying the 2-direction as a free direction.
u3
A Boolean specifying the 3-direction as a free direction.
ur1
A Boolean specifying the rotation about the 1–direction as a free direction.
ur2
A Boolean specifying the rotation about the 2–direction as a free direction.
ur3
A Boolean specifying the rotation about the 3–direction as a free direction.
referencePoint
A sequence of Floats specifying the *X*, *Y* and *Z*-coordinates of a fixed rotation
point or a point on the rotation axis or a point on the symmetry line, about which
rotations are defined. Such a point must be specified only for certain combinations of
free directions.
localCoordinates
None or a DatumCsys object specifying the local coordinate system of the rigid body
degrees of freedom for the inertia relief load. If *localCoordinates*=None, the free
directions are defined in the global coordinate system. When this member is queried, it
returns an Int. The default value is None.
"""
pass
def setValuesInStep(self, stepName: str, u1: Boolean = OFF, u2: Boolean = OFF, u3: Boolean = OFF,
ur1: Boolean = OFF, ur2: Boolean = OFF, ur3: Boolean = OFF, referencePoint: tuple = (),
fixed: Boolean = OFF):
"""This method modifies the propagating data for an existing InertiaRelief object in the
specified step.
Parameters
----------
stepName
A String specifying the name of the step in which the load is modified.
u1
A Boolean specifying the 1-direction as a free direction.
u2
A Boolean specifying the 2-direction as a free direction.
u3
A Boolean specifying the 3-direction as a free direction.
ur1
A Boolean specifying the rotation about the 1–direction as a free direction.
ur2
A Boolean specifying the rotation about the 2–direction as a free direction.
ur3
A Boolean specifying the rotation about the 3–direction as a free direction.
referencePoint
A sequence of Floats specifying the point about which rotations are defined. The point
can be specified only for certain combinations of free directions. The *referencePoint*
argument can be one of the following:
- The *X*, *Y* and *Z*-coordinates of a fixed rotation point.
- A point on the rotation axis.
- A point on the symmetry line.
fixed
A Boolean specifying whether the inertia relief loading should remain fixed at the
current loading at the start of the step. The default value is OFF.
"""
pass
| StarcoderdataPython |
5075608 | """ <NAME>, University of Warwick, March 2018
Extraction and handling of routing info
"""
from urlparse import urlparse
import re
from copy import deepcopy
import match_heuristics as mh
class simple_node:
""" Tree node for a given route
Contains the endpoint name, full route, and details such as decorators
Also contains link to parent node
"""
def __init__(self, name='', route=None):
self.name = name
self.safe_name = self._make_safe(name)
self.route = route
self.login = None
self.type = None
self.methods = None
self.id = "" #ID for the tree this node should be in
self.parent = None
self.children = []
def set_name(self, name):
self.name = name
self.safe_name = self._make_safe(name)
def set_id(self, id):
self.id = id
def set_route(self, route):
self.route = route
def set_login(self, login):
self.login = login
def set_type(self, type):
self.type = type
def set_methods(self, methods):
self.methods = methods
def _make_safe(self, name):
"""Make a name which is safe for DOT and Graphviz to use"""
return name.replace(':', '_')
class flask_route:
"""
A full route, comprised of route sections
"""
def __init__(self, path = []):
assert(self._check_path(path))
self.path = path
def replace_path(new_path):
assert(self._check_path(new_path))
self.path = new_path
def add_to_path(new_section):
if isinstance(new_section, route_section):
self.path.append(new_section)
def _check_path(self, path):
for section in path:
if not isinstance(section, route_section):
return False
return True
def __str__(self):
pstr = ''
for item in self.path:
pstr += str(item)
pstr +='/'
return pstr
def str_notr(self):
pstr = ''
for item in self.path:
pstr += str(item)
pstr +='/'
return pstr[0:-1]
def __eq__(self, other):
if len(self.path) != len(other.path):
return False
i = 0
for item in self.path:
if item != other.path[i]:
return False
i = i + 1
return True
def __ne__(self, other):
return not self.__eq__(other)
def pathlen(self):
return len(self.path)
class route_section:
"""A single segment of a route
This is not particularly useful as is, but derived
classes can add a lot of power
"""
def __init__(self, value = ''):
self.value = value #String
def set_value(self, value):
self.value = value
def __str__(self):
return self.value
def __eq__(self, other):
#Should handle equality with any derived types too
if isinstance(other, flask_placeholder):
return True
return self.value == other.value
def __ne__(self, other):
return not self.__eq__(other)
class flask_placeholder(route_section):
"""A single segment of a route which is a flask-style placeholder
That is, of the form '<type:name>'
"""
def __init__(self, value='', var='', typestr=''):
route_section.__init__(self, value)
self.var = var #The placeholder name used
self.typestr = typestr #The flask type string
def set_var(self, var):
self.var = var
def set_typestr(self, typestr):
self.typestr = typestr
def __str__(self):
return self.value
def full_str(self):
return self.value + '['+self.var + ','+ self.typestr+']'
def __eq__(self, other):
if isinstance(other, route_section):
return True
return False
def __ne__(self, other):
return not self.__eq__(other)
def split_url_path(url):
"""Split url into chunks"""
#Ensure trailing slash to get parts correct
if url =='' or url[-1] != '/':
url = url + '/'
parts = url.split('/')
try:
#Final part is now blank
return parts[0:-1]
except:
return parts
def parse_route_segment(string):
"""Turn a string containing a route segment into
a suitable route_section object
"""
#This is pretty clumsy but enough for now
reg = re.compile('<(\w+):(\w+)>')
match = re.match(reg, string)
if match is not None:
try:
grps = match.groups()
return flask_placeholder(string, grps[1], grps[0])
except:
return route_section(string)
else:
return route_section(string)
def parse_flask_routing(url):
"""Turn url into flask_route object
"""
parts = split_url_path(url)
new_parts = []
for part in parts:
new_parts.append(parse_route_segment(part))
return flask_route(new_parts)
def get_parent(route, all_routes):
"""Get parent of given route (best guess)
Parameters:
route -- the route to consider
all_routes -- all possible routes to select from
"""
parent = None
if len(route.path) == 1:
return None
potential = deepcopy(route)
all_matches = []
while(len(potential.path) > 0):
#Compile all possible, shorter, matches
potential = flask_route(potential.path[0:-1])
for rt in all_routes:
if potential == rt:
all_matches.append(rt)
parent = mh.select_match(route, all_matches)
return parent
def add_ids(node_dict, id):
for key, value in node_dict.items():
value.set_id(id)
return node_dict
| StarcoderdataPython |
6421865 | <filename>dev_tests/test_multi_nnls.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import logging
import time
# Set matplotlib backend to 'Agg' (compatible when X11 is not running
# e.g., on a cluster). Note that the backend can only be set BEFORE
# matplotlib is used or even submodules are imported!
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import dynamite as dyn
def run_user_test():
logger = logging.getLogger()
logger.info(f'Using DYNAMITE version: {dyn.__version__}')
logger.info(f'Located at: {dyn.__path__}')
# print to console anyway...
print('Using DYNAMITE version:', dyn.__version__)
print('Located at:', dyn.__path__)
# read configuration
if '__file__' in globals():
file_dir = os.path.dirname(__file__)
if file_dir:
os.chdir(file_dir)
fname = 'user_test_config_multi_ml_FCC.yaml'
# fname = 'user_test_config_multi_ml.yaml' # alternate config w/ 3 kinsets
c = dyn.config_reader.Configuration(fname, reset_logging=True)
c.remove_existing_orblibs()
c.remove_existing_all_models_file()
plotdir = c.settings.io_settings['plot_directory']
plotfile_ml = plotdir + 'ml_vs_iter_chi2.png'
if os.path.isfile(plotfile_ml):
os.remove(plotfile_ml)
plotfile_chi2 = plotdir + 'chi2_vs_model_id.png'
if os.path.isfile(plotfile_chi2):
os.remove(plotfile_chi2)
logger.info(f'{c.system.n_kin} kinematics data sets in system')
print(f'{c.system.n_kin} kinematics data sets in system')
compare_file = os.path.dirname(__file__) \
+ "/data/chi2_compare_ml_" \
f"{c.settings.orblib_settings['nE']}" \
f"{c.settings.orblib_settings['nI2']}" \
f"{c.settings.orblib_settings['nI3']}.dat"
# "run" the models
t = time.perf_counter()
smi = dyn.model_iterator.ModelIterator(c)
delt = time.perf_counter()-t
logger.info(f'Computation time: {delt} seconds = {delt/60} minutes')
# print to console regardless of logging level
print(f'Computation time: {delt} seconds = {delt/60} minutes')
c.all_models.table.pprint(max_lines=-1, max_width=-1)
# for one model, re-calculate solution with the new weight solver
print('Recalculating orbit weights with scipy NNLS solver')
fig, ax = plt.subplots(1, 3, sharey=True, figsize=(12,4))
for i in [0,1,2]:
mod0 = c.all_models.get_model_from_row(i)
parset0 = c.all_models.get_parset_from_row(i)
orblib0 = dyn.orblib.LegacyOrbitLibrary(config=c,
mod_dir=mod0.directory_noml,
parset=parset0)
orblib0.read_losvd_histograms()
weight_solver = mod0.get_weights()
weights_old, chi2_tot_old, chi2_kin_old = weight_solver.solve(orblib0)
weight_solver_new = dyn.weight_solvers.NNLS(
config=c,
directory_with_ml=mod0.directory,
CRcut=True,
nnls_solver='scipy')
solution_new = weight_solver_new.solve(orblib0)
weights_new = solution_new[0]
ax[i].plot(weights_old, label='Legacy')
ax[i].plot(weights_new, '--', label='NNLS scipy')
ax[i].legend()
ax[i].set_xlabel('orbit')
ax[i].set_title(f'Model {i}')
ax[0].set_ylabel('weight')
fig.subplots_adjust(wspace=0)
fig.tight_layout()
plotfile = f'{plotdir}multikin_wsolver_compare.png'
if os.path.isfile(plotfile):
os.remove(plotfile)
fig.savefig(plotfile)
logger.info(f'Look at {plotfile}')
print(f'Look at {plotfile}')
return c.all_models.table, \
compare_file, \
c.settings.parameter_space_settings['stopping_criteria']['n_max_mods']
if __name__ == '__main__':
run_user_test()
# end
| StarcoderdataPython |
3318490 | <reponame>touilleWoman/N-puzzle<filename>srcs/generator.py
#!/usr/bin/env python
import sys
import argparse
import random
def make_puzzle(s, solvable, iterations):
def swap_empty(p):
idx = p.index(0)
poss = []
if idx % s > 0:
poss.append(idx - 1)
if idx % s < s - 1:
poss.append(idx + 1)
if idx / s > 0 and idx - s >= 0:
poss.append(idx - s)
if idx / s < s - 1:
poss.append(idx + s)
swi = random.choice(poss)
p[idx] = p[swi]
p[swi] = 0
p = make_goal(s)
for i in range(iterations):
swap_empty(p)
if not solvable:
if p[0] == 0 or p[1] == 0:
p[-1], p[-2] = p[-2], p[-1]
else:
p[0], p[1] = p[1], p[0]
return p
def make_goal(s):
ts = s * s
puzzle = [-1 for i in range(ts)]
cur = 1
x = 0
ix = 1
y = 0
iy = 0
while True:
puzzle[x + y * s] = cur
if cur == 0:
break
cur += 1
if x + ix == s or x + ix < 0 or (ix != 0 and puzzle[x + ix + y * s] != -1):
iy = ix
ix = 0
elif y + iy == s or y + iy < 0 or (iy != 0 and puzzle[x + (y + iy) * s] != -1):
ix = -iy
iy = 0
x += ix
y += iy
if cur == s * s:
cur = 0
return puzzle
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("size", type=int, help="Size of the puzzle's side. Must be >3.")
parser.add_argument(
"-s",
"--solvable",
action="store_true",
default=False,
help="Forces generation of a solvable puzzle. Overrides -u.",
)
parser.add_argument(
"-u",
"--unsolvable",
action="store_true",
default=False,
help="Forces generation of an unsolvable puzzle",
)
parser.add_argument(
"-i", "--iterations", type=int, default=10000, help="Number of passes"
)
args = parser.parse_args()
random.seed()
if args.solvable and args.unsolvable:
print("Can't be both solvable AND unsolvable, dummy !")
sys.exit(1)
if args.size < 3:
print(
"Can't generate a puzzle with size lower than 2. It says so in the help. Dummy."
)
sys.exit(1)
if not args.solvable and not args.unsolvable:
solv = random.choice([True, False])
elif args.solvable:
solv = True
elif args.unsolvable:
solv = False
s = args.size
puzzle = make_puzzle(s, solvable=solv, iterations=args.iterations)
w = len(str(s * s))
print("# This puzzle is %s" % ("solvable" if solv else "unsolvable"))
print("%d" % s)
for y in range(s):
for x in range(s):
print("%s" % (str(puzzle[x + y * s]).rjust(w)), end=' ')
print("\n")
| StarcoderdataPython |
5110087 | print('=' * 12 + 'Desafio 33' + '=' * 12)
a = float(input('Digite o número 1: '))
b = float(input('Digite o número 2: '))
c = float(input('Digite o número 3: '))
maior = 0
menor = 0
if a >= b and a >= c:
maior = a
if b >= a and b >= c:
maior = b
if c >= a and c >= b:
maior = c
if a <= b and a <= c:
menor = a
if b <= a and b <= c:
menor = b
if c <= a and c <= b:
menor = c
print(f'Dentre esses números, o menor é o {menor} e o maior é o {maior}!')
| StarcoderdataPython |
273167 | <reponame>Horta/limix-qep
import os
import sys
from setuptools import setup
from setuptools import find_packages
def setup_package():
src_path = os.path.dirname(os.path.abspath(sys.argv[0]))
old_path = os.getcwd()
os.chdir(src_path)
sys.path.insert(0, src_path)
needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if needs_pytest else []
setup_requires = ['build_capi>=0.0.8', 'ncephes>=0.1',
'cffi>=1.6', 'limix_math>=0.3'] + pytest_runner
install_requires = ['hcache', 'limix_math>=0.3',
'lim>=0.1', 'pytest', 'tabulate>=0.7']
tests_require = install_requires
metadata = dict(
name='limix_qep',
maintainer="Limix Developers",
version='0.3.4',
maintainer_email="<EMAIL>",
packages=find_packages(),
license="BSD",
url='http://pmbio.github.io/limix/',
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
zip_safe=False,
include_package_data=True,
cffi_modules=['moments_build.py:binomial']
)
try:
from distutils.command.bdist_conda import CondaDistribution
except ImportError:
pass
else:
metadata['distclass'] = CondaDistribution
metadata['conda_buildnum'] = 1
metadata['conda_features'] = ['mkl']
try:
setup(**metadata)
finally:
del sys.path[0]
os.chdir(old_path)
if __name__ == '__main__':
setup_package()
| StarcoderdataPython |
11272032 | a=int(input("enter a number"))
if a<10:
print("Entered Number is below 10")
elif a>10 and a<20:
print("Entered Number between 10 and 20")
elif a>20:
print("Entered number is above 20")
else :
print("No Number")
| StarcoderdataPython |
11254288 | <filename>scripts/prepare_data.py
import os
import argparse
import pickle
import torch
import torch.nn.functional as F
import kaldi_io
def main():
parser = argparse.ArgumentParser("Configuration for data preparation")
parser.add_argument("--feat_scp", type=str, help="Path to the VoxCeleb features generated by the Kaldi scripts")
parser.add_argument("--save_dir", type=str, help="Directory to save the preprocessed pytorch tensors")
config = parser.parse_args()
id2len = {}
for key,mat in kaldi_io.read_mat_scp(config.feat_scp):
id2len[key + '.pt'] = len(mat)
log_mel = torch.FloatTensor(mat)
torch.save(log_mel, os.path.join(config.save_dir, key + '.pt'))
with open(os.path.join(config.save_dir, 'lengths.pkl'), 'wb') as f:
pickle.dump(id2len, f, protocol=4)
if __name__ == '__main__':
main()
| StarcoderdataPython |
137035 | <reponame>onetop21/MLAppDeploy
from typing import Tuple, List
from mlad.cli import context
def init(address) -> context.Context:
ctx = context.add('default', address, allow_duplicate=True)
context.use('default')
return ctx
def set(*args) -> None:
return context.set('default', *args)
def get() -> context.Context:
return context.get('default')
def env(unset=False) -> Tuple[List[str], str]:
ctx = get()
envs = context.get_env(ctx)
ret = []
for line in envs:
if unset:
K, _ = line.split('=')
ret.append(f'export {K}=')
else:
ret.append(f'export {line}')
msg = '# To set environment variables, run "eval $(mlad config env)"'
return ret, msg
def get_env(dict=False):
ctx = get()
env = context.get_env(ctx)
return {e.split('=')[0]: e.split('=')[1] for e in env} if dict else env
| StarcoderdataPython |
229105 | import numpy as np
from sklearn import clone
from sklearn.base import (
BaseEstimator,
TransformerMixin,
MetaEstimatorMixin,
)
from sklearn.utils.validation import (
check_is_fitted,
check_X_y,
FLOAT_DTYPES,
)
class EstimatorTransformer(TransformerMixin, MetaEstimatorMixin, BaseEstimator):
"""
Allows using an estimator such as a model as a transformer in an earlier step of a pipeline
:param estimator: An instance of the estimator that should be used for the transformation
:param predict_func: The function called on the estimator when transforming e.g. (`predict`, `predict_proba`)
"""
def __init__(self, estimator, predict_func="predict"):
self.estimator = estimator
self.predict_func = predict_func
def fit(self, X, y):
"""Fits the estimator"""
X, y = check_X_y(X, y, estimator=self, dtype=FLOAT_DTYPES)
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X, y)
return self
def transform(self, X):
"""
Applies the `predict_func` on the fitted estimator.
Returns an array of shape `(X.shape[0], )`.
"""
check_is_fitted(self, "estimator_")
return getattr(self.estimator_, self.predict_func)(X).reshape(-1, 1)
| StarcoderdataPython |
6628036 | """ Code to facilitate delayed archiving of FITS files in the images directory """
import os
import time
import queue
import atexit
import shutil
from contextlib import suppress
from threading import Thread
from astropy import units as u
from panoptes.utils import get_quantity_value
from panoptes.utils.time import current_time
from panoptes.pocs.base import PanBase
VALID_EXTENSIONS = (".fits", ".fits.fz")
class Archiver(PanBase):
""" Class to watch the images directory for new files and move them to the archive directory
after enough time has passed.
"""
_valid_extensions = VALID_EXTENSIONS
def __init__(self, images_directory=None, archive_directory=None, delay_interval=None,
sleep_interval=None, status_interval=60, *args, **kwargs):
"""
Args:
images_directory (str): The images directory to archive. If None (default), uses
the directories.images config entry.
archive_directory (str): The archive directory. If None (default), uses
the directories.archive config entry.
delay_interval (u.Quantity): The minimum amount of time a file must spend in the
archive queue before it is archived. If None (default), uses the
archiver.delay_time config entry.
sleep_interval (u.Quantity): The amout of time to sleep in between checking for new
files to archive. Ideally this should be longer than delay_interval. If None
(default), uses the archiver.sleep_interval confing entry.
status_interval (float, optional): Sleep for this long between status reports. Default
60s.
*args, **kwargs: Parsed to PanBase initialiser.
"""
super().__init__(*args, **kwargs)
if images_directory is None:
images_directory = self.get_config("directories.images")
self.images_directory = str(images_directory)
if archive_directory is None:
archive_directory = self.get_config("directories.archive")
self.archive_directory = str(archive_directory)
self.logger.debug(f"Archive directory: {self.archive_directory}")
if delay_interval is None:
delay_interval = self.get_config("archiver.delay_interval")
self.delay_interval = get_quantity_value(delay_interval, u.minute) * u.minute
if sleep_interval is None:
sleep_interval = self.get_config("archiver.sleep_interval")
self.sleep_interval = get_quantity_value(sleep_interval, u.minute) * u.minute
self._status_interval = get_quantity_value(status_interval, u.second)
self._n_archived = 0
self._stop = False
self._archive_queue = queue.Queue()
self._status_thread = Thread(target=self._async_monitor_status)
self._watch_thread = Thread(target=self._async_watch_directory)
self._archive_thread = Thread(target=self._async_archive_files)
self._threads = [self._status_thread, self._watch_thread, self._archive_thread]
atexit.register(self.stop) # This gets called when python is quit
@property
def is_running(self):
return self.status["is_running"]
@property
def status(self):
""" Return a status dictionary.
Returns:
dict: The status dictionary.
"""
status = {"is_running": all([t.is_alive() for t in self._threads]),
"status_thread": self._status_thread.is_alive(),
"watch_thread": self._watch_thread.is_alive(),
"archive_thread": self._status_thread.is_alive(),
"queued": self._archive_queue.qsize(),
"archived": self._n_archived}
return status
def start(self):
""" Start archiving. """
self.logger.info("Starting archiving.")
self._stop = False
for thread in self._threads:
thread.start()
def stop(self, blocking=True):
""" Stop archiving.
Args:
blocking (bool, optional): If True (default), blocks until all threads have joined.
"""
self.logger.info("Stopping archiving.")
self._stop = True
if blocking:
for thread in self._threads:
with suppress(RuntimeError):
thread.join()
def _async_monitor_status(self):
""" Report the status on a regular interval. """
self.logger.debug("Starting status thread.")
while True:
if self._stop:
self.logger.debug("Stopping status thread.")
break
# Get the current status
status = self.status
self.logger.debug(f"Archiver status: {status}")
# Sleep before reporting status again
time.sleep(self._status_interval)
def _async_watch_directory(self):
""" Watch the images directory and add all valid files to the archive queue. """
self.logger.debug("Starting watch thread.")
while True:
if self._stop:
self.logger.debug("Stopping watch thread.")
break
# Loop over filenames and add them to the queue
# Duplicates are taken care of later on
for filename in self._get_filenames_to_archive():
self._archive_queue.put([current_time(), filename])
# Sleep before checking again
time.sleep(self.sleep_interval.to_value(u.second))
def _async_archive_files(self, sleep=10):
""" Archive files that have been in the queue longer than self.delay_interval.
Args:
sleep (float, optional): Sleep for this long while waiting for self.delay_interval to
expire. Default: 10s.
"""
while True:
if self._stop and self._archive_queue.empty():
self.logger.debug("Stopping archive thread.")
break
# Get the oldest file from the queue
try:
track_time, filename = self._archive_queue.get(block=True, timeout=sleep)
except queue.Empty:
continue
# Archive file when it is old enough
while current_time() - track_time < self.delay_interval:
time.sleep(sleep)
with suppress(FileNotFoundError):
self._archive_file(filename)
self._n_archived += 1
# Tell the queue we are done with this file
self._archive_queue.task_done()
def _get_filenames_to_archive(self):
""" Get valid filenames in the images directory to archive.
Returns:
list: The list of filenames to archive.
"""
filenames = []
# Get all the matching filenames in the images directory
for path, _, files in os.walk(self.images_directory):
for name in files:
if any([name.endswith(ext) for ext in self._valid_extensions]):
filenames.append(os.path.join(path, name))
return filenames
def _get_archive_filename(self, filename):
""" Get the archive filename from the original filename.
Args:
filename (str): The filename string.
Returns:
str: The archived file name.
"""
relpath = os.path.relpath(filename, self.images_directory)
return os.path.join(self.archive_directory, relpath)
def _archive_file(self, filename):
""" Archive the file.
Args:
filename (str): The filename string.
"""
if not os.path.exists(filename): # May have already been archived or deleted
self.logger.warning(f"Tried to archive {filename} but it does not exist.")
raise FileNotFoundError
# Get the archived filename
archive_filename = self._get_archive_filename(filename)
# Make sure the archive directory exists
os.makedirs(os.path.dirname(archive_filename), exist_ok=True)
# Move the file to the archive directory
self.logger.debug(f"Moving {filename} to {archive_filename}.")
shutil.move(filename, archive_filename)
| StarcoderdataPython |
5140702 | <gh_stars>0
# Generated by Django 2.2.13 on 2021-02-08 04:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('huntserver', '0072_auto_20201103_1539'),
]
operations = [
migrations.AddField(
model_name='puzzle',
name='puzzle_type',
field=models.CharField(choices=[('STD', 'A standard puzzle'), ('MET', 'A meta puzzle'), ('NON', 'An unscored puzzle')], default='STD', help_text='The type of puzzle.', max_length=3),
),
migrations.AlterField(
model_name='puzzle',
name='puzzle_page_type',
field=models.CharField(choices=[('PDF', 'Puzzle page displays a PDF'), ('LNK', 'Puzzle page links a webpage'), ('WEB', 'Puzzle page displays a webpage'), ('EMB', 'Puzzle is html embedded in the webpage')], default='WEB', help_text='The type of webpage for this puzzle.', max_length=3),
),
]
| StarcoderdataPython |
6483192 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-30 10:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('pigeon', '0003_auto_20160328_0809'),
]
operations = [
migrations.RemoveField(
model_name='scroll',
name='pubDate',
),
migrations.RemoveField(
model_name='scroll',
name='published',
),
migrations.RemoveField(
model_name='scroll',
name='regDate',
),
migrations.RemoveField(
model_name='scroll',
name='scrollFrom',
),
migrations.RemoveField(
model_name='scroll',
name='scrollTo',
),
]
| StarcoderdataPython |
6588725 | class InvalidCredentials(Exception):
pass
class NotFoundError(Exception):
pass
class ServerAuthError(Exception):
pass
class InternalServerError(Exception):
pass
| StarcoderdataPython |
11359179 | <filename>Baixando videos do Youtube/app.py
from pytube import YouTube
link = input("Digite o link do vídeo que seja baixar: ");
path = input("Digite o diretório que seja salvar o vídeo: ");
yt = YouTube(link)
print("Título: ", yt.title);
print("Número de views: ", yt.views);
print("Tamanho do vídeo: ", yt.length, "segundos");
print("Avaliação do vídeo: ", yt.rating);
ys = yt.streams.get_highest_resolution();
print("Baixando...");
ys.download(path);
print("Download Completo!");
| StarcoderdataPython |
6614042 | <filename>debile/utils/aget.py
# Copyright (c) 2012-2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debile.utils import run_command
import deb822
import StringIO
import requests
import gzip
import os
SOURCE = "dists/{suite}/{section}/source/Sources.gz"
def dget(path):
out, err, ret = run_command(["dget", "-u", path])
if ret != 0:
print ret, err
raise Exception("DAMNIT; dget fucked us")
def aget(archive, suite, section, source, version):
url = "{archive}/{path}".format(
archive=archive,
path=SOURCE.format(suite=suite, section=section
))
for entry in deb822.Deb822.iter_paragraphs(gzip.GzipFile(
fileobj=StringIO.StringIO(requests.get(url).content))):
path = entry['Directory']
dsc = None
for fp in entry['Files'].splitlines():
if fp.strip() == "":
continue
hash_, size, fid = fp.split()
if fid.endswith(".dsc"):
dsc = fid
if entry['Package'] == source and entry['Version'] == version:
dget("{archive}/{pool}/{dsc}".format(
archive=archive,
pool=path,
dsc=dsc,
))
# break
return os.path.basename(dsc)
else:
print "BALLS."
raise Exception
def main():
import sys
return aget(*sys.argv[1:])
| StarcoderdataPython |
216310 | <gh_stars>1-10
import os
from subprocess import Popen
import sys
import logging
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(os.path.basename(__file__))
def get_blast_file(qfasta, sfasta, out_dir=None):
"""
>>> get_blast_file("/tmp/a.fasta", "b.fasta")
'a_vs_b.blast'
>>> get_blast_file("/tmp/a.fasta", "b.fasta", out_dir=True)
'/tmp/a_vs_b.blast'
"""
q = os.path.basename(qfasta)
s = os.path.basename(sfasta)
blast_file = q[:q.rfind(".")] + "_vs_" + s[:s.rfind(".")] + ".blast"
if not out_dir: return blast_file
if out_dir is True:
d = os.path.dirname(qfasta)
return os.path.abspath(os.path.join(d, blast_file))
return os.path.join(out_dir, blast_file)
def is_current_file(a, b):
"""
>>> is_current_file(__file__, 'a.txt')
False
>>> is_current_file(__file__, __file__)
False
"""
if not (os.path.exists(a) and os.path.exists(b)): return False
am = os.stat(a).st_mtime
bm = os.stat(b).st_mtime
return am > bm
def is_same_blast_params(blast_file, cmd):
""" when using the blast() below, a .cmd file is
written, this function checks that file to see if the
current command is the same as that. if so, and the fasta
files are up to date, the blast is not done as it's up to date"""
params_file = blast_file + ".cmd"
if not os.path.exists(params_file): return False
return cmd.strip() == open(params_file).read().strip()
def sh(cmd, blast_log=None):
"""run a commmand in the shell
# waiting for bugfix in nose.
#>>> sh("echo 'hi'")
'hi'
"""
if not blast_log is None:
cmd += " 2>%s" % blast_log
log.debug(cmd)
proc = Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, shell=True)
r = proc.communicate()
return r
def add_dash(params):
"""
>>> add_dash({'p': 'F', 'save': 'T'})
{'-p': 'F', '--save': 'T'}
"""
d = {}
for k, v in params.items():
if k.startswith("-"):
d[k] = v
elif len(k) > 1:
d["--" + k] = v
else:
d["-" + k] = v
return d
def is_protein_db(blast_cfg):
"""
>>> is_protein_db({'p': 'blastx'})
True
"""
return blast_cfg["p"] in ("blastx", "blastp")
def rm(f):
if os.path.exists(f): os.unlink(f)
def blast(_blast_cfg, full_name=False, blast_log=None):
"""
>>> blast({'i': 'tests/a.fasta', 'd': 'tests/a.fasta'})
"""
blast_cfg = _blast_cfg.copy()
check_args(blast_cfg)
q_fasta = blast_cfg["i"]
s_fasta = blast_cfg["d"]
blastall = blast_cfg["b"]
blast_cfg.pop('b')
format_db = blastall.replace("blastall", "formatdb")
protein = "T" if is_protein_db(blast_cfg) else "F"
cmd = "%(format_db)s -i %(s_fasta)s -p %(protein)s" % locals()
ext = ".pin" if protein == "T" else ".nin"
assert os.path.exists(s_fasta), "%s does not exist!" % s_fasta
if not is_current_file(s_fasta + ext, s_fasta):
try:
sh(cmd)
except KeyboardInterrupt:
import glob
for f in glob.glob(s_fasta + ".*"): rm(f)
raise
else:
log.warn("NOT running cmd:\n%s\n because %s.nin is up to date" % (cmd, s_fasta))
blast_file = ""
to_query_dir = blast_cfg.get("o", "F").upper() != "F"
if blast_cfg.get("o", "F").upper() not in ("T", "F"):
# if it's a file, use the file. otherwise it's a dir, need to
# create the filename and append it to the dir
blast_file = blast_cfg["o"]
if blast_file.endswith(".blast"):
log.error("using file past in on -o: %s" % blast_cfg["o"])
else:
blast_file = get_blast_file(q_fasta, s_fasta, blast_cfg["o"])
log.error("using directory passed in on -o with exiting file: %s" %
blast_file)
else:
if full_name:
blast_file = blast_file.rstrip(".blast") \
+ "_params__" \
+ "__".join(["%s_%s" % p for p in sorted(blast_cfg.items())
if not p[0] in ("i", "d")]) \
+ ".blast"
blast_file = get_blast_file(q_fasta, s_fasta, to_query_dir)
if blast_log is None:
blast_log = blast_file + ".log"
blast_cfg.update({"o": blast_file})
params = add_dash(blast_cfg)
params = ["%s %s" % (p, v) for p, v in sorted(params.items())]
cmd = blastall + " " + " ".join(params)
if not (is_current_file(blast_file, q_fasta) \
and is_current_file(blast_file, s_fasta) \
and is_same_blast_params(blast_file, cmd)):
fh = open(blast_file + ".cmd", "w")
fh.write(cmd)
fh.close()
try:
sh(cmd, blast_log=blast_log)
except:
rm(blast_file)
rm(blast_file + ".cmd")
raise
if os.path.exists(blast_file):
lines = sum(1 for line in open(blast_file))
log.debug("\n\n%s lines of blast output sent to %s" % (lines, blast_file))
else:
log.error("\n\nERROR: blast not run")
if not blast_log is None:
log.error(open(blast_log).read())
else:
log.error("NOT running cmd:\n%s\n because %s is up to date" % (cmd, blast_file))
def check_args(args):
"""
>>> args = {'i': 'a.fasta', 'd': 'b.fasta'}
>>> check_args(args)
>>> args
{'i': 'a.fasta', 'p': 'blastn', 'd': 'b.fasta', 'a': '4'}
"""
if not "p" in args: args["p"] = "blastn"
assert "i" in args, "need to specify a query fasta"
assert "d" in args, "need to specify a query fasta"
if not "a" in args: args["a"] = "4"
def handle_temps(args):
"""allow the query (-i) and subject (-d) to be specified as e.g.:
a.fasta['chr1'][200:500]
this will create a temporary file of just that chromosome and/or region
and blast it
>>> args = {'i': 'tests/a.fasta', 'd': 'tests/a.fasta'}
>>> handle_temps(args)
>>> args['i']
'tests/a.fasta'
>>> args['d']
'tests/a.fasta'
>>> args['i'] = 'tests/a.fasta["chr2"]'
>>> try: handle_temps(args) #doctest: +ELLIPSIS
... except Exception, e:
... assert 'no fasta with name' in str(e)
>>> args['i'] = 'tests/a.fasta[chr1]'
>>> handle_temps(args)
>>> args['i']
'tests/a.chr1.fasta'
>>> args['d']
'tests/a.fasta'
>>> args['d'] = 'tests/a.fasta[chr1][20:25]'
>>> handle_temps(args)
>>> args['d']
'tests/a.chr1_20_25.fasta'
>>> open(args['d']).read() #doctest: +NORMALIZE_WHITESPACE
'>chr1\\nGGGGGG\\n'
>>> rm(args['d'])
>>> rm(args['i'])
"""
def _h(fname):
if not "[" in fname: return fname
start = None
fname = fname.split("[")
d = os.path.dirname(fname[0])
if len(fname) == 3:
fa, seqid, start_stop = [x.rstrip(']').strip("'\"") for x in fname]
start, stop = [int(x) for x in start_stop.split(":")]
out_name = os.path.basename(os.path.splitext(fa)[0]) + \
(".%s_%s_%s" % (seqid, start, stop)) + ".fasta"
else:
fa, seqid = [x.rstrip(']').strip("'\"") for x in fname]
out_name = os.path.basename(os.path.splitext(fa)[0]) + \
(".%s" % (seqid, )) + ".fasta"
assert os.path.exists(fa), fa
out_name = os.path.join(d, out_name)
if os.path.exists(out_name) and is_current_file(out_name, fa):
return out_name
fh = open(fa, 'rb')
log.debug('creating sub-file %s' % out_name)
out = open(out_name, 'wb')
header = None
seq = ""
try:
for line in fh:
if header is None:
if line[1:].strip() != seqid: continue
header = line[1:].strip()
print >>out, '>%s' % header
continue
elif line[0] == '>': break
if start is None:
print >>out, line,
elif len(seq) <= (stop - start):
# just hold the entire thing in memory and
# snip it at the end.
seq += line.rstrip()
if header is None:
out.close()
try:
os.unlink(out_name)
except: pass
raise Exception("no fasta with name %s containing seq %s"
% (fa, seqid))
if start is not None:
print >>out, seq[max(0, start - 1):stop]
fh.close()
out.close()
return out_name
except:
if os.path.exists(out_name):
os.unlink(out_name)
raise
args['i'] = _h(args['i'])
args['d'] = _h(args['d'])
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] in ("-h", "--help"):
sh("/usr/bin/blastall")
print """\
this script will generally do exactly the same as blastall
except it will attempt to create the name of the output blast
file from the input fasta files, and the blast parameters.
it will also run formatdb with the correct -p parameter based
on the type of blast requested.
it also saves a file: a_vs_b.blast.cmd that stores the exact
blast command used to generate a_vs_b.blast
in addition, if a.fasta an b.fasta are older than a_vs_b.blast
and the params have not changed, the blast will not run as
the current blast file is up to date.
additional args provided by this script are:
-o T
or
-o F
in the former case the blast output file will be created
from the names of the input fasta files and placed in the
directory of the query fasta
in the latter case, the blast file will go to the current
directory
--full_name T
if specified, this will include the blast params in the name
of the output blast file. e.g.: a__vs_b__params__m_8__W_15.blast
for -m -8 -W 15
"""
sys.exit()
args = dict((sys.argv[i].lstrip("-") , sys.argv[i + 1].rstrip()) \
for i in range(1, len(sys.argv), 2))
try:
f = args.pop("full_name")
full_name = not f.lower() in ("f", "0")
except:
full_name = False
if not "i" in args:
print "need to specify a query fasta (-i)"
sys.exit()
if not "d" in args:
print "need to specify a subject fasta (-d)"
sys.exit()
handle_temps(args)
blast(args, full_name=full_name)
| StarcoderdataPython |
4926324 | <reponame>benhawk1/447-Project---Submit-System
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 9 21:33:09 2020
@author: benhawk1
"""
import pandas as pd
import hashlib
# Tool for hashing given passwords for a specified user in the userbase csv file
def encode(password, row, df):
salt = b'1\xcc\xf09V\x1b\xed\xf5\x87\x13p\xe7/3ZA\x80\xdfN\t\xd1P\xa1\xf9\x95\xc7T\xfe\x19\xa0\xd4\x0b'
key = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 100000, dklen=128)
df.at[row, 'Key'] = key
# Replace the string with the desired path to save the file
df.to_csv("C:/Users/benha/Documents/CMSC 447 Project/Users.csv", mode='w', index=False)
| StarcoderdataPython |
6406888 | import os
import unittest
import requests_mock
from datahub_emails import api
class TestStatusPage(unittest.TestCase):
@requests_mock.mock()
def test_on_incident_returns_none_if_comonent_not_exists(self, m):
m.get('https://api.statuspage.io/v1/pages/test/components', json={})
res = api.on_incident('Test Incident', 'testing')
self.assertIsNone(res)
@requests_mock.mock()
def test_on_incident_works(self, m):
m.get('https://api.statuspage.io/v1/pages/test/components', json=[{'name': 'test', 'id': 'test'}])
m.post('https://api.statuspage.io/v1/pages/test/incidents', json={'success': True})
res = api.on_incident('Test Incident', 'test', 'errors')
self.assertDictEqual(res, {'success': True})
@requests_mock.mock()
def test_subscribe_user_returns_none_if_comonent_not_exists(self, m):
m.get('https://api.statuspage.io/v1/pages/test/components', json={})
res = api.subscribe_user('test', '<EMAIL>')
self.assertIsNone(res)
@requests_mock.mock()
def test_subscribe_user_works(self, m):
m.get('https://api.statuspage.io/v1/pages/test/components', json=[{'name': 'test', 'id': 'test'}])
m.post('https://api.statuspage.io/v1/pages/test/subscribers', json={'success': True})
res = api.subscribe_user('test', '<EMAIL>')
self.assertDictEqual(res, {'success': True})
| StarcoderdataPython |
12837252 | <reponame>CiscoDevNet/ydk-py<filename>ietf/ydk/models/ietf/iana_if_type.py<gh_stars>100-1000
""" iana_if_type
This YANG module defines YANG identities for IANA\-registered
interface types.
This YANG module is maintained by IANA and reflects the
'ifType definitions' registry.
The latest revision of this YANG module can be obtained from
the IANA web site.
Requests for new values should be made to IANA via
email (<EMAIL>).
Copyright (c) 2014 IETF Trust and the persons identified as
authors of the code. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, is permitted pursuant to, and subject
to the license terms contained in, the Simplified BSD License
set forth in Section 4.c of the IETF Trust's Legal Provisions
Relating to IETF Documents
(http\://trustee.ietf.org/license\-info).
The initial version of this YANG module is part of RFC 7224;
see the RFC itself for full legal notices.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
from ydk.models.ietf.ietf_interfaces import InterfaceType
class IanaInterfaceType(InterfaceType):
"""
This identity is used as a base for all interface types
defined in the 'ifType definitions' registry.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iana-interface-type"):
super(IanaInterfaceType, self).__init__(ns, pref, tag)
class Other(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:other"):
super(Other, self).__init__(ns, pref, tag)
class Regular1822(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:regular1822"):
super(Regular1822, self).__init__(ns, pref, tag)
class Hdh1822(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hdh1822"):
super(Hdh1822, self).__init__(ns, pref, tag)
class DdnX25(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ddnX25"):
super(DdnX25, self).__init__(ns, pref, tag)
class Rfc877x25(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:rfc877x25"):
super(Rfc877x25, self).__init__(ns, pref, tag)
class EthernetCsmacd(IanaInterfaceType):
"""
For all Ethernet\-like interfaces, regardless of speed,
as per RFC 3635.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ethernetCsmacd"):
super(EthernetCsmacd, self).__init__(ns, pref, tag)
class Iso88023Csmacd(IanaInterfaceType):
"""
Deprecated via RFC 3635.
Use ethernetCsmacd(6) instead.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88023Csmacd"):
super(Iso88023Csmacd, self).__init__(ns, pref, tag)
class Iso88024TokenBus(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88024TokenBus"):
super(Iso88024TokenBus, self).__init__(ns, pref, tag)
class Iso88025TokenRing(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88025TokenRing"):
super(Iso88025TokenRing, self).__init__(ns, pref, tag)
class Iso88026Man(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88026Man"):
super(Iso88026Man, self).__init__(ns, pref, tag)
class StarLan(IanaInterfaceType):
"""
Deprecated via RFC 3635.
Use ethernetCsmacd(6) instead.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:starLan"):
super(StarLan, self).__init__(ns, pref, tag)
class Proteon10Mbit(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:proteon10Mbit"):
super(Proteon10Mbit, self).__init__(ns, pref, tag)
class Proteon80Mbit(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:proteon80Mbit"):
super(Proteon80Mbit, self).__init__(ns, pref, tag)
class Hyperchannel(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hyperchannel"):
super(Hyperchannel, self).__init__(ns, pref, tag)
class Fddi(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fddi"):
super(Fddi, self).__init__(ns, pref, tag)
class Lapb(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:lapb"):
super(Lapb, self).__init__(ns, pref, tag)
class Sdlc(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sdlc"):
super(Sdlc, self).__init__(ns, pref, tag)
class Ds1(IanaInterfaceType):
"""
DS1\-MIB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ds1"):
super(Ds1, self).__init__(ns, pref, tag)
class E1(IanaInterfaceType):
"""
Obsolete; see DS1\-MIB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:e1"):
super(E1, self).__init__(ns, pref, tag)
class BasicISDN(IanaInterfaceType):
"""
No longer used. See also RFC 2127.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:basicISDN"):
super(BasicISDN, self).__init__(ns, pref, tag)
class PrimaryISDN(IanaInterfaceType):
"""
No longer used. See also RFC 2127.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:primaryISDN"):
super(PrimaryISDN, self).__init__(ns, pref, tag)
class PropPointToPointSerial(IanaInterfaceType):
"""
Proprietary serial.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propPointToPointSerial"):
super(PropPointToPointSerial, self).__init__(ns, pref, tag)
class Ppp(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ppp"):
super(Ppp, self).__init__(ns, pref, tag)
class SoftwareLoopback(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:softwareLoopback"):
super(SoftwareLoopback, self).__init__(ns, pref, tag)
class Eon(IanaInterfaceType):
"""
CLNP over IP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:eon"):
super(Eon, self).__init__(ns, pref, tag)
class Ethernet3Mbit(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ethernet3Mbit"):
super(Ethernet3Mbit, self).__init__(ns, pref, tag)
class Nsip(IanaInterfaceType):
"""
XNS over IP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:nsip"):
super(Nsip, self).__init__(ns, pref, tag)
class Slip(IanaInterfaceType):
"""
Generic SLIP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:slip"):
super(Slip, self).__init__(ns, pref, tag)
class Ultra(IanaInterfaceType):
"""
Ultra Technologies.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ultra"):
super(Ultra, self).__init__(ns, pref, tag)
class Ds3(IanaInterfaceType):
"""
DS3\-MIB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ds3"):
super(Ds3, self).__init__(ns, pref, tag)
class Sip(IanaInterfaceType):
"""
SMDS, coffee.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sip"):
super(Sip, self).__init__(ns, pref, tag)
class FrameRelay(IanaInterfaceType):
"""
DTE only.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frameRelay"):
super(FrameRelay, self).__init__(ns, pref, tag)
class Rs232(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:rs232"):
super(Rs232, self).__init__(ns, pref, tag)
class Para(IanaInterfaceType):
"""
Parallel\-port.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:para"):
super(Para, self).__init__(ns, pref, tag)
class Arcnet(IanaInterfaceType):
"""
ARCnet.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:arcnet"):
super(Arcnet, self).__init__(ns, pref, tag)
class ArcnetPlus(IanaInterfaceType):
"""
ARCnet Plus.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:arcnetPlus"):
super(ArcnetPlus, self).__init__(ns, pref, tag)
class Atm(IanaInterfaceType):
"""
ATM cells.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atm"):
super(Atm, self).__init__(ns, pref, tag)
class Miox25(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:miox25"):
super(Miox25, self).__init__(ns, pref, tag)
class Sonet(IanaInterfaceType):
"""
SONET or SDH.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sonet"):
super(Sonet, self).__init__(ns, pref, tag)
class X25ple(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:x25ple"):
super(X25ple, self).__init__(ns, pref, tag)
class Iso88022llc(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88022llc"):
super(Iso88022llc, self).__init__(ns, pref, tag)
class LocalTalk(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:localTalk"):
super(LocalTalk, self).__init__(ns, pref, tag)
class SmdsDxi(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:smdsDxi"):
super(SmdsDxi, self).__init__(ns, pref, tag)
class FrameRelayService(IanaInterfaceType):
"""
FRNETSERV\-MIB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frameRelayService"):
super(FrameRelayService, self).__init__(ns, pref, tag)
class V35(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:v35"):
super(V35, self).__init__(ns, pref, tag)
class Hssi(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hssi"):
super(Hssi, self).__init__(ns, pref, tag)
class Hippi(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hippi"):
super(Hippi, self).__init__(ns, pref, tag)
class Modem(IanaInterfaceType):
"""
Generic modem.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:modem"):
super(Modem, self).__init__(ns, pref, tag)
class Aal5(IanaInterfaceType):
"""
AAL5 over ATM.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aal5"):
super(Aal5, self).__init__(ns, pref, tag)
class SonetPath(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sonetPath"):
super(SonetPath, self).__init__(ns, pref, tag)
class SonetVT(IanaInterfaceType):
"""
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sonetVT"):
super(SonetVT, self).__init__(ns, pref, tag)
class SmdsIcip(IanaInterfaceType):
"""
SMDS InterCarrier Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:smdsIcip"):
super(SmdsIcip, self).__init__(ns, pref, tag)
class PropVirtual(IanaInterfaceType):
"""
Proprietary virtual/internal.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propVirtual"):
super(PropVirtual, self).__init__(ns, pref, tag)
class PropMultiplexor(IanaInterfaceType):
"""
Proprietary multiplexing.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propMultiplexor"):
super(PropMultiplexor, self).__init__(ns, pref, tag)
class Ieee80212(IanaInterfaceType):
"""
100BaseVG.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee80212"):
super(Ieee80212, self).__init__(ns, pref, tag)
class FibreChannel(IanaInterfaceType):
"""
Fibre Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fibreChannel"):
super(FibreChannel, self).__init__(ns, pref, tag)
class HippiInterface(IanaInterfaceType):
"""
HIPPI interfaces.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hippiInterface"):
super(HippiInterface, self).__init__(ns, pref, tag)
class FrameRelayInterconnect(IanaInterfaceType):
"""
Obsolete; use either
frameRelay(32) or frameRelayService(44).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frameRelayInterconnect"):
super(FrameRelayInterconnect, self).__init__(ns, pref, tag)
class Aflane8023(IanaInterfaceType):
"""
ATM Emulated LAN for 802.3.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aflane8023"):
super(Aflane8023, self).__init__(ns, pref, tag)
class Aflane8025(IanaInterfaceType):
"""
ATM Emulated LAN for 802.5.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aflane8025"):
super(Aflane8025, self).__init__(ns, pref, tag)
class CctEmul(IanaInterfaceType):
"""
ATM Emulated circuit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:cctEmul"):
super(CctEmul, self).__init__(ns, pref, tag)
class FastEther(IanaInterfaceType):
"""
Obsoleted via RFC 3635.
ethernetCsmacd(6) should be used instead.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fastEther"):
super(FastEther, self).__init__(ns, pref, tag)
class Isdn(IanaInterfaceType):
"""
ISDN and X.25.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:isdn"):
super(Isdn, self).__init__(ns, pref, tag)
class V11(IanaInterfaceType):
"""
CCITT V.11/X.21.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:v11"):
super(V11, self).__init__(ns, pref, tag)
class V36(IanaInterfaceType):
"""
CCITT V.36.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:v36"):
super(V36, self).__init__(ns, pref, tag)
class G703at64k(IanaInterfaceType):
"""
CCITT G703 at 64Kbps.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:g703at64k"):
super(G703at64k, self).__init__(ns, pref, tag)
class G703at2mb(IanaInterfaceType):
"""
Obsolete; see DS1\-MIB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:g703at2mb"):
super(G703at2mb, self).__init__(ns, pref, tag)
class Qllc(IanaInterfaceType):
"""
SNA QLLC.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:qllc"):
super(Qllc, self).__init__(ns, pref, tag)
class FastEtherFX(IanaInterfaceType):
"""
Obsoleted via RFC 3635.
ethernetCsmacd(6) should be used instead.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fastEtherFX"):
super(FastEtherFX, self).__init__(ns, pref, tag)
class Channel(IanaInterfaceType):
"""
Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:channel"):
super(Channel, self).__init__(ns, pref, tag)
class Ieee80211(IanaInterfaceType):
"""
Radio spread spectrum.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee80211"):
super(Ieee80211, self).__init__(ns, pref, tag)
class Ibm370parChan(IanaInterfaceType):
"""
IBM System 360/370 OEMI Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ibm370parChan"):
super(Ibm370parChan, self).__init__(ns, pref, tag)
class Escon(IanaInterfaceType):
"""
IBM Enterprise Systems Connection.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:escon"):
super(Escon, self).__init__(ns, pref, tag)
class Dlsw(IanaInterfaceType):
"""
Data Link Switching.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dlsw"):
super(Dlsw, self).__init__(ns, pref, tag)
class Isdns(IanaInterfaceType):
"""
ISDN S/T interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:isdns"):
super(Isdns, self).__init__(ns, pref, tag)
class Isdnu(IanaInterfaceType):
"""
ISDN U interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:isdnu"):
super(Isdnu, self).__init__(ns, pref, tag)
class Lapd(IanaInterfaceType):
"""
Link Access Protocol D.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:lapd"):
super(Lapd, self).__init__(ns, pref, tag)
class IpSwitch(IanaInterfaceType):
"""
IP Switching Objects.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ipSwitch"):
super(IpSwitch, self).__init__(ns, pref, tag)
class Rsrb(IanaInterfaceType):
"""
Remote Source Route Bridging.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:rsrb"):
super(Rsrb, self).__init__(ns, pref, tag)
class AtmLogical(IanaInterfaceType):
"""
ATM Logical Port.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmLogical"):
super(AtmLogical, self).__init__(ns, pref, tag)
class Ds0(IanaInterfaceType):
"""
Digital Signal Level 0.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ds0"):
super(Ds0, self).__init__(ns, pref, tag)
class Ds0Bundle(IanaInterfaceType):
"""
Group of ds0s on the same ds1.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ds0Bundle"):
super(Ds0Bundle, self).__init__(ns, pref, tag)
class Bsc(IanaInterfaceType):
"""
Bisynchronous Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:bsc"):
super(Bsc, self).__init__(ns, pref, tag)
class Async(IanaInterfaceType):
"""
Asynchronous Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:async"):
super(Async, self).__init__(ns, pref, tag)
class Cnr(IanaInterfaceType):
"""
Combat Net Radio.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:cnr"):
super(Cnr, self).__init__(ns, pref, tag)
class Iso88025Dtr(IanaInterfaceType):
"""
ISO 802.5r DTR.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88025Dtr"):
super(Iso88025Dtr, self).__init__(ns, pref, tag)
class Eplrs(IanaInterfaceType):
"""
Ext Pos Loc Report Sys.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:eplrs"):
super(Eplrs, self).__init__(ns, pref, tag)
class Arap(IanaInterfaceType):
"""
Appletalk Remote Access Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:arap"):
super(Arap, self).__init__(ns, pref, tag)
class PropCnls(IanaInterfaceType):
"""
Proprietary Connectionless Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propCnls"):
super(PropCnls, self).__init__(ns, pref, tag)
class HostPad(IanaInterfaceType):
"""
CCITT\-ITU X.29 PAD Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hostPad"):
super(HostPad, self).__init__(ns, pref, tag)
class TermPad(IanaInterfaceType):
"""
CCITT\-ITU X.3 PAD Facility.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:termPad"):
super(TermPad, self).__init__(ns, pref, tag)
class FrameRelayMPI(IanaInterfaceType):
"""
Multiproto Interconnect over FR.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frameRelayMPI"):
super(FrameRelayMPI, self).__init__(ns, pref, tag)
class X213(IanaInterfaceType):
"""
CCITT\-ITU X213.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:x213"):
super(X213, self).__init__(ns, pref, tag)
class Adsl(IanaInterfaceType):
"""
Asymmetric Digital Subscriber Loop.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:adsl"):
super(Adsl, self).__init__(ns, pref, tag)
class Radsl(IanaInterfaceType):
"""
Rate\-Adapt. Digital Subscriber Loop.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:radsl"):
super(Radsl, self).__init__(ns, pref, tag)
class Sdsl(IanaInterfaceType):
"""
Symmetric Digital Subscriber Loop.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sdsl"):
super(Sdsl, self).__init__(ns, pref, tag)
class Vdsl(IanaInterfaceType):
"""
Very H\-Speed Digital Subscrib. Loop.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:vdsl"):
super(Vdsl, self).__init__(ns, pref, tag)
class Iso88025CRFPInt(IanaInterfaceType):
"""
ISO 802.5 CRFP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88025CRFPInt"):
super(Iso88025CRFPInt, self).__init__(ns, pref, tag)
class Myrinet(IanaInterfaceType):
"""
Myricom Myrinet.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:myrinet"):
super(Myrinet, self).__init__(ns, pref, tag)
class VoiceEM(IanaInterfaceType):
"""
Voice recEive and transMit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceEM"):
super(VoiceEM, self).__init__(ns, pref, tag)
class VoiceFXO(IanaInterfaceType):
"""
Voice Foreign Exchange Office.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceFXO"):
super(VoiceFXO, self).__init__(ns, pref, tag)
class VoiceFXS(IanaInterfaceType):
"""
Voice Foreign Exchange Station.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceFXS"):
super(VoiceFXS, self).__init__(ns, pref, tag)
class VoiceEncap(IanaInterfaceType):
"""
Voice encapsulation.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceEncap"):
super(VoiceEncap, self).__init__(ns, pref, tag)
class VoiceOverIp(IanaInterfaceType):
"""
Voice over IP encapsulation.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceOverIp"):
super(VoiceOverIp, self).__init__(ns, pref, tag)
class AtmDxi(IanaInterfaceType):
"""
ATM DXI.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmDxi"):
super(AtmDxi, self).__init__(ns, pref, tag)
class AtmFuni(IanaInterfaceType):
"""
ATM FUNI.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmFuni"):
super(AtmFuni, self).__init__(ns, pref, tag)
class AtmIma(IanaInterfaceType):
"""
ATM IMA.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmIma"):
super(AtmIma, self).__init__(ns, pref, tag)
class PppMultilinkBundle(IanaInterfaceType):
"""
PPP Multilink Bundle.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pppMultilinkBundle"):
super(PppMultilinkBundle, self).__init__(ns, pref, tag)
class IpOverCdlc(IanaInterfaceType):
"""
IBM ipOverCdlc.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ipOverCdlc"):
super(IpOverCdlc, self).__init__(ns, pref, tag)
class IpOverClaw(IanaInterfaceType):
"""
IBM Common Link Access to Workstn.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ipOverClaw"):
super(IpOverClaw, self).__init__(ns, pref, tag)
class StackToStack(IanaInterfaceType):
"""
IBM stackToStack.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:stackToStack"):
super(StackToStack, self).__init__(ns, pref, tag)
class VirtualIpAddress(IanaInterfaceType):
"""
IBM VIPA.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:virtualIpAddress"):
super(VirtualIpAddress, self).__init__(ns, pref, tag)
class Mpc(IanaInterfaceType):
"""
IBM multi\-protocol channel support.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mpc"):
super(Mpc, self).__init__(ns, pref, tag)
class IpOverAtm(IanaInterfaceType):
"""
IBM ipOverAtm.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ipOverAtm"):
super(IpOverAtm, self).__init__(ns, pref, tag)
class Iso88025Fiber(IanaInterfaceType):
"""
ISO 802.5j Fiber Token Ring.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:iso88025Fiber"):
super(Iso88025Fiber, self).__init__(ns, pref, tag)
class Tdlc(IanaInterfaceType):
"""
IBM twinaxial data link control.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:tdlc"):
super(Tdlc, self).__init__(ns, pref, tag)
class GigabitEthernet(IanaInterfaceType):
"""
Obsoleted via RFC 3635.
ethernetCsmacd(6) should be used instead.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gigabitEthernet"):
super(GigabitEthernet, self).__init__(ns, pref, tag)
class Hdlc(IanaInterfaceType):
"""
HDLC.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hdlc"):
super(Hdlc, self).__init__(ns, pref, tag)
class Lapf(IanaInterfaceType):
"""
LAP F.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:lapf"):
super(Lapf, self).__init__(ns, pref, tag)
class V37(IanaInterfaceType):
"""
V.37.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:v37"):
super(V37, self).__init__(ns, pref, tag)
class X25mlp(IanaInterfaceType):
"""
Multi\-Link Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:x25mlp"):
super(X25mlp, self).__init__(ns, pref, tag)
class X25huntGroup(IanaInterfaceType):
"""
X25 Hunt Group.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:x25huntGroup"):
super(X25huntGroup, self).__init__(ns, pref, tag)
class TranspHdlc(IanaInterfaceType):
"""
Transp HDLC.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:transpHdlc"):
super(TranspHdlc, self).__init__(ns, pref, tag)
class Interleave(IanaInterfaceType):
"""
Interleave channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:interleave"):
super(Interleave, self).__init__(ns, pref, tag)
class Fast(IanaInterfaceType):
"""
Fast channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fast"):
super(Fast, self).__init__(ns, pref, tag)
class Ip(IanaInterfaceType):
"""
IP (for APPN HPR in IP networks).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ip"):
super(Ip, self).__init__(ns, pref, tag)
class DocsCableMaclayer(IanaInterfaceType):
"""
CATV Mac Layer.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableMaclayer"):
super(DocsCableMaclayer, self).__init__(ns, pref, tag)
class DocsCableDownstream(IanaInterfaceType):
"""
CATV Downstream interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableDownstream"):
super(DocsCableDownstream, self).__init__(ns, pref, tag)
class DocsCableUpstream(IanaInterfaceType):
"""
CATV Upstream interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableUpstream"):
super(DocsCableUpstream, self).__init__(ns, pref, tag)
class A12MppSwitch(IanaInterfaceType):
"""
Avalon Parallel Processor.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:a12MppSwitch"):
super(A12MppSwitch, self).__init__(ns, pref, tag)
class Tunnel(IanaInterfaceType):
"""
Encapsulation interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:tunnel"):
super(Tunnel, self).__init__(ns, pref, tag)
class Coffee(IanaInterfaceType):
"""
Coffee pot.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:coffee"):
super(Coffee, self).__init__(ns, pref, tag)
class Ces(IanaInterfaceType):
"""
Circuit Emulation Service.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ces"):
super(Ces, self).__init__(ns, pref, tag)
class AtmSubInterface(IanaInterfaceType):
"""
ATM Sub Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmSubInterface"):
super(AtmSubInterface, self).__init__(ns, pref, tag)
class L2vlan(IanaInterfaceType):
"""
Layer 2 Virtual LAN using 802.1Q.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:l2vlan"):
super(L2vlan, self).__init__(ns, pref, tag)
class L3ipvlan(IanaInterfaceType):
"""
Layer 3 Virtual LAN using IP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:l3ipvlan"):
super(L3ipvlan, self).__init__(ns, pref, tag)
class L3ipxvlan(IanaInterfaceType):
"""
Layer 3 Virtual LAN using IPX.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:l3ipxvlan"):
super(L3ipxvlan, self).__init__(ns, pref, tag)
class DigitalPowerline(IanaInterfaceType):
"""
IP over Power Lines.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:digitalPowerline"):
super(DigitalPowerline, self).__init__(ns, pref, tag)
class MediaMailOverIp(IanaInterfaceType):
"""
Multimedia Mail over IP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mediaMailOverIp"):
super(MediaMailOverIp, self).__init__(ns, pref, tag)
class Dtm(IanaInterfaceType):
"""
Dynamic synchronous Transfer Mode.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dtm"):
super(Dtm, self).__init__(ns, pref, tag)
class Dcn(IanaInterfaceType):
"""
Data Communications Network.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dcn"):
super(Dcn, self).__init__(ns, pref, tag)
class IpForward(IanaInterfaceType):
"""
IP Forwarding Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ipForward"):
super(IpForward, self).__init__(ns, pref, tag)
class Msdsl(IanaInterfaceType):
"""
Multi\-rate Symmetric DSL.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:msdsl"):
super(Msdsl, self).__init__(ns, pref, tag)
class Ieee1394(IanaInterfaceType):
"""
IEEE1394 High Performance Serial Bus.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee1394"):
super(Ieee1394, self).__init__(ns, pref, tag)
class IfGsn(IanaInterfaceType):
"""
HIPPI\-6400.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:if-gsn"):
super(IfGsn, self).__init__(ns, pref, tag)
class DvbRccMacLayer(IanaInterfaceType):
"""
DVB\-RCC MAC Layer.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbRccMacLayer"):
super(DvbRccMacLayer, self).__init__(ns, pref, tag)
class DvbRccDownstream(IanaInterfaceType):
"""
DVB\-RCC Downstream Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbRccDownstream"):
super(DvbRccDownstream, self).__init__(ns, pref, tag)
class DvbRccUpstream(IanaInterfaceType):
"""
DVB\-RCC Upstream Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbRccUpstream"):
super(DvbRccUpstream, self).__init__(ns, pref, tag)
class AtmVirtual(IanaInterfaceType):
"""
ATM Virtual Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmVirtual"):
super(AtmVirtual, self).__init__(ns, pref, tag)
class MplsTunnel(IanaInterfaceType):
"""
MPLS Tunnel Virtual Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mplsTunnel"):
super(MplsTunnel, self).__init__(ns, pref, tag)
class Srp(IanaInterfaceType):
"""
Spatial Reuse Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:srp"):
super(Srp, self).__init__(ns, pref, tag)
class VoiceOverAtm(IanaInterfaceType):
"""
Voice over ATM.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceOverAtm"):
super(VoiceOverAtm, self).__init__(ns, pref, tag)
class VoiceOverFrameRelay(IanaInterfaceType):
"""
Voice Over Frame Relay.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceOverFrameRelay"):
super(VoiceOverFrameRelay, self).__init__(ns, pref, tag)
class Idsl(IanaInterfaceType):
"""
Digital Subscriber Loop over ISDN.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:idsl"):
super(Idsl, self).__init__(ns, pref, tag)
class CompositeLink(IanaInterfaceType):
"""
Avici Composite Link Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:compositeLink"):
super(CompositeLink, self).__init__(ns, pref, tag)
class Ss7SigLink(IanaInterfaceType):
"""
SS7 Signaling Link.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ss7SigLink"):
super(Ss7SigLink, self).__init__(ns, pref, tag)
class PropWirelessP2P(IanaInterfaceType):
"""
Prop. P2P wireless interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propWirelessP2P"):
super(PropWirelessP2P, self).__init__(ns, pref, tag)
class FrForward(IanaInterfaceType):
"""
Frame Forward Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frForward"):
super(FrForward, self).__init__(ns, pref, tag)
class Rfc1483(IanaInterfaceType):
"""
Multiprotocol over ATM AAL5.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:rfc1483"):
super(Rfc1483, self).__init__(ns, pref, tag)
class Usb(IanaInterfaceType):
"""
USB Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:usb"):
super(Usb, self).__init__(ns, pref, tag)
class Ieee8023adLag(IanaInterfaceType):
"""
IEEE 802.3ad Link Aggregate.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee8023adLag"):
super(Ieee8023adLag, self).__init__(ns, pref, tag)
class Bgppolicyaccounting(IanaInterfaceType):
"""
BGP Policy Accounting.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:bgppolicyaccounting"):
super(Bgppolicyaccounting, self).__init__(ns, pref, tag)
class Frf16MfrBundle(IanaInterfaceType):
"""
FRF.16 Multilink Frame Relay.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frf16MfrBundle"):
super(Frf16MfrBundle, self).__init__(ns, pref, tag)
class H323Gatekeeper(IanaInterfaceType):
"""
H323 Gatekeeper.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:h323Gatekeeper"):
super(H323Gatekeeper, self).__init__(ns, pref, tag)
class H323Proxy(IanaInterfaceType):
"""
H323 Voice and Video Proxy.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:h323Proxy"):
super(H323Proxy, self).__init__(ns, pref, tag)
class Mpls(IanaInterfaceType):
"""
MPLS.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mpls"):
super(Mpls, self).__init__(ns, pref, tag)
class MfSigLink(IanaInterfaceType):
"""
Multi\-frequency signaling link.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mfSigLink"):
super(MfSigLink, self).__init__(ns, pref, tag)
class Hdsl2(IanaInterfaceType):
"""
High Bit\-Rate DSL \- 2nd generation.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hdsl2"):
super(Hdsl2, self).__init__(ns, pref, tag)
class Shdsl(IanaInterfaceType):
"""
Multirate HDSL2.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:shdsl"):
super(Shdsl, self).__init__(ns, pref, tag)
class Ds1FDL(IanaInterfaceType):
"""
Facility Data Link (4Kbps) on a DS1.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ds1FDL"):
super(Ds1FDL, self).__init__(ns, pref, tag)
class Pos(IanaInterfaceType):
"""
Packet over SONET/SDH Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pos"):
super(Pos, self).__init__(ns, pref, tag)
class DvbAsiIn(IanaInterfaceType):
"""
DVB\-ASI Input.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbAsiIn"):
super(DvbAsiIn, self).__init__(ns, pref, tag)
class DvbAsiOut(IanaInterfaceType):
"""
DVB\-ASI Output.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbAsiOut"):
super(DvbAsiOut, self).__init__(ns, pref, tag)
class Plc(IanaInterfaceType):
"""
Power Line Communications.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:plc"):
super(Plc, self).__init__(ns, pref, tag)
class Nfas(IanaInterfaceType):
"""
Non\-Facility Associated Signaling.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:nfas"):
super(Nfas, self).__init__(ns, pref, tag)
class Tr008(IanaInterfaceType):
"""
TR008.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:tr008"):
super(Tr008, self).__init__(ns, pref, tag)
class Gr303RDT(IanaInterfaceType):
"""
Remote Digital Terminal.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gr303RDT"):
super(Gr303RDT, self).__init__(ns, pref, tag)
class Gr303IDT(IanaInterfaceType):
"""
Integrated Digital Terminal.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gr303IDT"):
super(Gr303IDT, self).__init__(ns, pref, tag)
class Isup(IanaInterfaceType):
"""
ISUP.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:isup"):
super(Isup, self).__init__(ns, pref, tag)
class PropDocsWirelessMaclayer(IanaInterfaceType):
"""
Cisco proprietary Maclayer.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propDocsWirelessMaclayer"):
super(PropDocsWirelessMaclayer, self).__init__(ns, pref, tag)
class PropDocsWirelessDownstream(IanaInterfaceType):
"""
Cisco proprietary Downstream.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propDocsWirelessDownstream"):
super(PropDocsWirelessDownstream, self).__init__(ns, pref, tag)
class PropDocsWirelessUpstream(IanaInterfaceType):
"""
Cisco proprietary Upstream.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propDocsWirelessUpstream"):
super(PropDocsWirelessUpstream, self).__init__(ns, pref, tag)
class Hiperlan2(IanaInterfaceType):
"""
HIPERLAN Type 2 Radio Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:hiperlan2"):
super(Hiperlan2, self).__init__(ns, pref, tag)
class PropBWAp2Mp(IanaInterfaceType):
"""
PropBroadbandWirelessAccesspt2Multipt (use of this value
for IEEE 802.16 WMAN interfaces as per IEEE Std 802.16f
is deprecated, and ieee80216WMAN(237) should be used
instead).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propBWAp2Mp"):
super(PropBWAp2Mp, self).__init__(ns, pref, tag)
class SonetOverheadChannel(IanaInterfaceType):
"""
SONET Overhead Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sonetOverheadChannel"):
super(SonetOverheadChannel, self).__init__(ns, pref, tag)
class DigitalWrapperOverheadChannel(IanaInterfaceType):
"""
Digital Wrapper.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:digitalWrapperOverheadChannel"):
super(DigitalWrapperOverheadChannel, self).__init__(ns, pref, tag)
class Aal2(IanaInterfaceType):
"""
ATM adaptation layer 2.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aal2"):
super(Aal2, self).__init__(ns, pref, tag)
class RadioMAC(IanaInterfaceType):
"""
MAC layer over radio links.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:radioMAC"):
super(RadioMAC, self).__init__(ns, pref, tag)
class AtmRadio(IanaInterfaceType):
"""
ATM over radio links.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmRadio"):
super(AtmRadio, self).__init__(ns, pref, tag)
class Imt(IanaInterfaceType):
"""
Inter\-Machine Trunks.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:imt"):
super(Imt, self).__init__(ns, pref, tag)
class Mvl(IanaInterfaceType):
"""
Multiple Virtual Lines DSL.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mvl"):
super(Mvl, self).__init__(ns, pref, tag)
class ReachDSL(IanaInterfaceType):
"""
Long Reach DSL.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:reachDSL"):
super(ReachDSL, self).__init__(ns, pref, tag)
class FrDlciEndPt(IanaInterfaceType):
"""
Frame Relay DLCI End Point.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:frDlciEndPt"):
super(FrDlciEndPt, self).__init__(ns, pref, tag)
class AtmVciEndPt(IanaInterfaceType):
"""
ATM VCI End Point.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmVciEndPt"):
super(AtmVciEndPt, self).__init__(ns, pref, tag)
class OpticalChannel(IanaInterfaceType):
"""
Optical Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:opticalChannel"):
super(OpticalChannel, self).__init__(ns, pref, tag)
class OpticalTransport(IanaInterfaceType):
"""
Optical Transport.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:opticalTransport"):
super(OpticalTransport, self).__init__(ns, pref, tag)
class PropAtm(IanaInterfaceType):
"""
Proprietary ATM.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:propAtm"):
super(PropAtm, self).__init__(ns, pref, tag)
class VoiceOverCable(IanaInterfaceType):
"""
Voice Over Cable Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceOverCable"):
super(VoiceOverCable, self).__init__(ns, pref, tag)
class Infiniband(IanaInterfaceType):
"""
Infiniband.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:infiniband"):
super(Infiniband, self).__init__(ns, pref, tag)
class TeLink(IanaInterfaceType):
"""
TE Link.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:teLink"):
super(TeLink, self).__init__(ns, pref, tag)
class Q2931(IanaInterfaceType):
"""
Q.2931.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:q2931"):
super(Q2931, self).__init__(ns, pref, tag)
class VirtualTg(IanaInterfaceType):
"""
Virtual Trunk Group.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:virtualTg"):
super(VirtualTg, self).__init__(ns, pref, tag)
class SipTg(IanaInterfaceType):
"""
SIP Trunk Group.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sipTg"):
super(SipTg, self).__init__(ns, pref, tag)
class SipSig(IanaInterfaceType):
"""
SIP Signaling.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sipSig"):
super(SipSig, self).__init__(ns, pref, tag)
class DocsCableUpstreamChannel(IanaInterfaceType):
"""
CATV Upstream Channel.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableUpstreamChannel"):
super(DocsCableUpstreamChannel, self).__init__(ns, pref, tag)
class Econet(IanaInterfaceType):
"""
Acorn Econet.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:econet"):
super(Econet, self).__init__(ns, pref, tag)
class Pon155(IanaInterfaceType):
"""
FSAN 155Mb Symetrical PON interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pon155"):
super(Pon155, self).__init__(ns, pref, tag)
class Pon622(IanaInterfaceType):
"""
FSAN 622Mb Symetrical PON interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pon622"):
super(Pon622, self).__init__(ns, pref, tag)
class Bridge(IanaInterfaceType):
"""
Transparent bridge interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:bridge"):
super(Bridge, self).__init__(ns, pref, tag)
class Linegroup(IanaInterfaceType):
"""
Interface common to multiple lines.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:linegroup"):
super(Linegroup, self).__init__(ns, pref, tag)
class VoiceEMFGD(IanaInterfaceType):
"""
Voice E&M Feature Group D.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceEMFGD"):
super(VoiceEMFGD, self).__init__(ns, pref, tag)
class VoiceFGDEANA(IanaInterfaceType):
"""
Voice FGD Exchange Access North American.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceFGDEANA"):
super(VoiceFGDEANA, self).__init__(ns, pref, tag)
class VoiceDID(IanaInterfaceType):
"""
Voice Direct Inward Dialing.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceDID"):
super(VoiceDID, self).__init__(ns, pref, tag)
class MpegTransport(IanaInterfaceType):
"""
MPEG transport interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mpegTransport"):
super(MpegTransport, self).__init__(ns, pref, tag)
class SixToFour(IanaInterfaceType):
"""
6to4 interface (DEPRECATED).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:sixToFour"):
super(SixToFour, self).__init__(ns, pref, tag)
class Gtp(IanaInterfaceType):
"""
GTP (GPRS Tunneling Protocol).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gtp"):
super(Gtp, self).__init__(ns, pref, tag)
class PdnEtherLoop1(IanaInterfaceType):
"""
Paradyne EtherLoop 1.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pdnEtherLoop1"):
super(PdnEtherLoop1, self).__init__(ns, pref, tag)
class PdnEtherLoop2(IanaInterfaceType):
"""
Paradyne EtherLoop 2.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pdnEtherLoop2"):
super(PdnEtherLoop2, self).__init__(ns, pref, tag)
class OpticalChannelGroup(IanaInterfaceType):
"""
Optical Channel Group.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:opticalChannelGroup"):
super(OpticalChannelGroup, self).__init__(ns, pref, tag)
class Homepna(IanaInterfaceType):
"""
HomePNA ITU\-T G.989.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:homepna"):
super(Homepna, self).__init__(ns, pref, tag)
class Gfp(IanaInterfaceType):
"""
Generic Framing Procedure (GFP).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gfp"):
super(Gfp, self).__init__(ns, pref, tag)
class CiscoISLvlan(IanaInterfaceType):
"""
Layer 2 Virtual LAN using Cisco ISL.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ciscoISLvlan"):
super(CiscoISLvlan, self).__init__(ns, pref, tag)
class ActelisMetaLOOP(IanaInterfaceType):
"""
Acteleis proprietary MetaLOOP High Speed Link.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:actelisMetaLOOP"):
super(ActelisMetaLOOP, self).__init__(ns, pref, tag)
class FcipLink(IanaInterfaceType):
"""
FCIP Link.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:fcipLink"):
super(FcipLink, self).__init__(ns, pref, tag)
class Rpr(IanaInterfaceType):
"""
Resilient Packet Ring Interface Type.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:rpr"):
super(Rpr, self).__init__(ns, pref, tag)
class Qam(IanaInterfaceType):
"""
RF Qam Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:qam"):
super(Qam, self).__init__(ns, pref, tag)
class Lmp(IanaInterfaceType):
"""
Link Management Protocol.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:lmp"):
super(Lmp, self).__init__(ns, pref, tag)
class CblVectaStar(IanaInterfaceType):
"""
Cambridge Broadband Networks Limited VectaStar.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:cblVectaStar"):
super(CblVectaStar, self).__init__(ns, pref, tag)
class DocsCableMCmtsDownstream(IanaInterfaceType):
"""
CATV Modular CMTS Downstream Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableMCmtsDownstream"):
super(DocsCableMCmtsDownstream, self).__init__(ns, pref, tag)
class Adsl2(IanaInterfaceType):
"""
Asymmetric Digital Subscriber Loop Version 2
(DEPRECATED/OBSOLETED \- please use adsl2plus(238)
instead).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:adsl2"):
super(Adsl2, self).__init__(ns, pref, tag)
class MacSecControlledIF(IanaInterfaceType):
"""
MACSecControlled.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:macSecControlledIF"):
super(MacSecControlledIF, self).__init__(ns, pref, tag)
class MacSecUncontrolledIF(IanaInterfaceType):
"""
MACSecUncontrolled.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:macSecUncontrolledIF"):
super(MacSecUncontrolledIF, self).__init__(ns, pref, tag)
class AviciOpticalEther(IanaInterfaceType):
"""
Avici Optical Ethernet Aggregate.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aviciOpticalEther"):
super(AviciOpticalEther, self).__init__(ns, pref, tag)
class Atmbond(IanaInterfaceType):
"""
atmbond.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:atmbond"):
super(Atmbond, self).__init__(ns, pref, tag)
class VoiceFGDOS(IanaInterfaceType):
"""
Voice FGD Operator Services.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceFGDOS"):
super(VoiceFGDOS, self).__init__(ns, pref, tag)
class MocaVersion1(IanaInterfaceType):
"""
MultiMedia over Coax Alliance (MoCA) Interface
as documented in information provided privately to IANA.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:mocaVersion1"):
super(MocaVersion1, self).__init__(ns, pref, tag)
class Ieee80216WMAN(IanaInterfaceType):
"""
IEEE 802.16 WMAN interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee80216WMAN"):
super(Ieee80216WMAN, self).__init__(ns, pref, tag)
class Adsl2plus(IanaInterfaceType):
"""
Asymmetric Digital Subscriber Loop Version 2 \-
Version 2 Plus and all variants.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:adsl2plus"):
super(Adsl2plus, self).__init__(ns, pref, tag)
class DvbRcsMacLayer(IanaInterfaceType):
"""
DVB\-RCS MAC Layer.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbRcsMacLayer"):
super(DvbRcsMacLayer, self).__init__(ns, pref, tag)
class DvbTdm(IanaInterfaceType):
"""
DVB Satellite TDM.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbTdm"):
super(DvbTdm, self).__init__(ns, pref, tag)
class DvbRcsTdma(IanaInterfaceType):
"""
DVB\-RCS TDMA.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:dvbRcsTdma"):
super(DvbRcsTdma, self).__init__(ns, pref, tag)
class X86Laps(IanaInterfaceType):
"""
LAPS based on ITU\-T X.86/Y.1323.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:x86Laps"):
super(X86Laps, self).__init__(ns, pref, tag)
class WwanPP(IanaInterfaceType):
"""
3GPP WWAN.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:wwanPP"):
super(WwanPP, self).__init__(ns, pref, tag)
class WwanPP2(IanaInterfaceType):
"""
3GPP2 WWAN.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:wwanPP2"):
super(WwanPP2, self).__init__(ns, pref, tag)
class VoiceEBS(IanaInterfaceType):
"""
Voice P\-phone EBS physical interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:voiceEBS"):
super(VoiceEBS, self).__init__(ns, pref, tag)
class IfPwType(IanaInterfaceType):
"""
Pseudowire interface type.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ifPwType"):
super(IfPwType, self).__init__(ns, pref, tag)
class Ilan(IanaInterfaceType):
"""
Internal LAN on a bridge per IEEE 802.1ap.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ilan"):
super(Ilan, self).__init__(ns, pref, tag)
class Pip(IanaInterfaceType):
"""
Provider Instance Port on a bridge per IEEE 802.1ah PBB.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:pip"):
super(Pip, self).__init__(ns, pref, tag)
class AluELP(IanaInterfaceType):
"""
Alcatel\-Lucent Ethernet Link Protection.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluELP"):
super(AluELP, self).__init__(ns, pref, tag)
class Gpon(IanaInterfaceType):
"""
Gigabit\-capable passive optical networks (G\-PON) as per
ITU\-T G.948.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:gpon"):
super(Gpon, self).__init__(ns, pref, tag)
class Vdsl2(IanaInterfaceType):
"""
Very high speed digital subscriber line Version 2
(as per ITU\-T Recommendation G.993.2).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:vdsl2"):
super(Vdsl2, self).__init__(ns, pref, tag)
class CapwapDot11Profile(IanaInterfaceType):
"""
WLAN Profile Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:capwapDot11Profile"):
super(CapwapDot11Profile, self).__init__(ns, pref, tag)
class CapwapDot11Bss(IanaInterfaceType):
"""
WLAN BSS Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:capwapDot11Bss"):
super(CapwapDot11Bss, self).__init__(ns, pref, tag)
class CapwapWtpVirtualRadio(IanaInterfaceType):
"""
WTP Virtual Radio Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:capwapWtpVirtualRadio"):
super(CapwapWtpVirtualRadio, self).__init__(ns, pref, tag)
class Bits(IanaInterfaceType):
"""
bitsport.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:bits"):
super(Bits, self).__init__(ns, pref, tag)
class DocsCableUpstreamRfPort(IanaInterfaceType):
"""
DOCSIS CATV Upstream RF Port.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:docsCableUpstreamRfPort"):
super(DocsCableUpstreamRfPort, self).__init__(ns, pref, tag)
class CableDownstreamRfPort(IanaInterfaceType):
"""
CATV downstream RF Port.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:cableDownstreamRfPort"):
super(CableDownstreamRfPort, self).__init__(ns, pref, tag)
class VmwareVirtualNic(IanaInterfaceType):
"""
VMware Virtual Network Interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:vmwareVirtualNic"):
super(VmwareVirtualNic, self).__init__(ns, pref, tag)
class Ieee802154(IanaInterfaceType):
"""
IEEE 802.15.4 WPAN interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ieee802154"):
super(Ieee802154, self).__init__(ns, pref, tag)
class OtnOdu(IanaInterfaceType):
"""
OTN Optical Data Unit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:otnOdu"):
super(OtnOdu, self).__init__(ns, pref, tag)
class OtnOtu(IanaInterfaceType):
"""
OTN Optical channel Transport Unit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:otnOtu"):
super(OtnOtu, self).__init__(ns, pref, tag)
class IfVfiType(IanaInterfaceType):
"""
VPLS Forwarding Instance Interface Type.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:ifVfiType"):
super(IfVfiType, self).__init__(ns, pref, tag)
class G9981(IanaInterfaceType):
"""
G.998.1 bonded interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:g9981"):
super(G9981, self).__init__(ns, pref, tag)
class G9982(IanaInterfaceType):
"""
G.998.2 bonded interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:g9982"):
super(G9982, self).__init__(ns, pref, tag)
class G9983(IanaInterfaceType):
"""
G.998.3 bonded interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:g9983"):
super(G9983, self).__init__(ns, pref, tag)
class AluEpon(IanaInterfaceType):
"""
Ethernet Passive Optical Networks (E\-PON).
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluEpon"):
super(AluEpon, self).__init__(ns, pref, tag)
class AluEponOnu(IanaInterfaceType):
"""
EPON Optical Network Unit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluEponOnu"):
super(AluEponOnu, self).__init__(ns, pref, tag)
class AluEponPhysicalUni(IanaInterfaceType):
"""
EPON physical User to Network interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluEponPhysicalUni"):
super(AluEponPhysicalUni, self).__init__(ns, pref, tag)
class AluEponLogicalLink(IanaInterfaceType):
"""
The emulation of a point\-to\-point link over the EPON
layer.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluEponLogicalLink"):
super(AluEponLogicalLink, self).__init__(ns, pref, tag)
class AluGponOnu(IanaInterfaceType):
"""
GPON Optical Network Unit.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluGponOnu"):
super(AluGponOnu, self).__init__(ns, pref, tag)
class AluGponPhysicalUni(IanaInterfaceType):
"""
GPON physical User to Network interface.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:aluGponPhysicalUni"):
super(AluGponPhysicalUni, self).__init__(ns, pref, tag)
class VmwareNicTeam(IanaInterfaceType):
"""
VMware NIC Team.
"""
_prefix = 'ianaift'
_revision = '2014-05-08'
def __init__(self, ns="urn:ietf:params:xml:ns:yang:iana-if-type", pref="iana-if-type", tag="iana-if-type:vmwareNicTeam"):
super(VmwareNicTeam, self).__init__(ns, pref, tag)
| StarcoderdataPython |
1629476 | import unittest
"""
1
/ \
2 3
/ \
4 5
"""
#DFS PostOrder 4 5 2 3 1 (Left-Right-Root)
def is_balancedRecurive(tree_root):
def postorder(node):
if node is None:
return
postorder(node.left)
postorder(node.right)
print(node.value, end=' ')
postorder(tree_root)
return True
def postOrderHack(tree_root):
res, stack = [], [tree_root]
while stack:
node = stack.pop()
if node:
res.append(node.value)
stack.append(node.left)
stack.append(node.right)
print(res[::-1])
return True
#DFS PostOrder 4 5 2 3 1 (Left-Right-Root)
def postOrder(tree_root):
stack = [(tree_root, False)]
while stack:
node, visited = stack.pop()
if node:
if visited:
print(node.value, end=' ')
else:
stack.append((node, True))
stack.append((node.right, False))
stack.append((node.left, False))
return True
# Tests
class Test(unittest.TestCase):
class BinaryTreeNode(object):
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def insert_left(self, value):
self.left = Test.BinaryTreeNode(value)
return self.left
def insert_right(self, value):
self.right = Test.BinaryTreeNode(value)
return self.right
def test_traversal(self):
tree = Test.BinaryTreeNode(1)
left = tree.insert_left(2)
tree.insert_right(3)
left.insert_left(4)
left.insert_right(5)
result = postOrder(tree)
self.assertTrue(result)
unittest.main(verbosity=2) | StarcoderdataPython |
6659158 | from django import template
register = template.Library()
@register.filter
def chat_with(obj, user):
return obj.chat_with(user)
| StarcoderdataPython |
1944149 | from tkinter import Tk, Canvas, Message, Label, Entry, Button, StringVar
from tkinter.messagebox import showinfo
from lib_interaction import *
lborder = 16
tborder = 16
entry_col = 130
button_col = 320
font = "roboto-mono 12"
arr_sym_tuple = ('BackSpace', 'space', 'minus')
step_sym_tuple = ('BackSpace')
def rotate_arr(lib: CDLL, arr: list, step: int, txt: StringVar) -> bool:
if len(arr) == 0:
return False
c_arr = (c_int * len(arr))(*arr)
rc = lib.lrotate(
c_arr,
c_size_t(len(c_arr)),
c_size_t(sizeof(c_int)),
c_size_t(step)
)
txt.set([i for i in c_arr])
return True
def copyfullsrqs(lib: CDLL, arr: list[any], txt: StringVar) -> bool:
if len(arr) == 0:
return False
arr_a = (c_int * len(arr))(*arr)
arr_b = (c_int * len(arr))(*[-1] * len(arr))
rc = lib.cpybycond(
(arr_a),
len(arr_a),
(arr_b),
len(arr_b),
c_size_t(sizeof(c_int)),
condtition_f(isfullsqr)
)
txt.set([i for i in arr_b if i >= 0])
return True
def cmd_wrapper(cmd: callable, args: list[any]) -> None:
try:
if not cmd(*args):
showinfo(message="Please, check you input data")
except TypeError as identifier:
print(identifier)
def press(event, syms: tuple[str]):
if not (event.char.isdigit() or event.keysym in syms):
return 'break'
def config(root: Tk, lib: CDLL):
root.geometry("560x360")
root.title("Shared lib interaction demo")
root.resizable(False, False)
Label(
root,
text="Array elems",
font=font
).place(x=lborder, y=tborder)
entry_array = Entry(root)
entry_array.bind(
'<KeyPress>',
lambda event: press(event, arr_sym_tuple)
)
entry_array.place(height=22, x=entry_col, y=tborder)
Label(
root,
text="Step",
font=font
).place(x=lborder, y=48)
entry_step = Entry(root)
entry_step.bind(
'<KeyPress>',
lambda event: press(event, step_sym_tuple)
)
entry_step.place(height=22, x=entry_col, y=48)
Label(
root,
text="Result",
font=font
).place(x=lborder, y=80)
txt = StringVar()
Label(
root,
textvariable=txt,
font=font
).place(x=lborder, y=100)
Button(
text="Rotate array",
font=font,
pady="4",
command=lambda:
cmd_wrapper(
rotate_arr,
[
lib,
list(map(int, entry_array.get().split())),
0 if not list(map(int, entry_step.get().split()))
else list(map(int, entry_step.get().split()))[0],
txt
]
)
).place(
x=button_col,
y=12
)
Button(
text="Copy full sqr to 2nd arr",
font=font,
pady="4",
command=lambda:
cmd_wrapper(
copyfullsrqs,
[
lib,
list(map(int, entry_array.get().split())),
txt
]
)
).place(
x=button_col,
y=44
)
| StarcoderdataPython |
3376682 | from random import random
class Synapse:
def __init__(self, from_neuron, to_neuron):
self.from_neuron = from_neuron
self.to_neuron = to_neuron
self.weight = random()
| StarcoderdataPython |
1676904 | <gh_stars>0
from collections import defaultdict
from pycocotools.coco import COCO
from pycocoevalcap.eval import COCOEvalCap
import json
from json import encoder
encoder.FLOAT_REPR = lambda o: format(o, '.3f')
import random
from scipy.io import loadmat
import pandas as pd
def load_pascal_triplets(filepath='experimental_data/consensus_pascal.mat'):
consensus = loadmat(filepath)
triplets = {}
sent_to_index = {}
index = 0
for item in consensus['triplets'][0]:
A = str(item[0][0][0][0]).encode('ascii', 'ignore').decode()
B = str(item[1][0][0][0]).encode('ascii', 'ignore').decode()
C = str(item[2][0][0][0]).encode('ascii', 'ignore').decode()
winner = item[3][0][0]
key = B + C
triplets[key] = triplets.get(key, [])
bucket = ''
if len(triplets[key]) == 48:
bucket = 1
while len(triplets.get(key + str(bucket), [])) == 48:
bucket += 1
key = key + str(bucket)
triplets[key] = triplets.get(key, [])
triplets[key].append((A, B, C, winner, bucket))
if sent_to_index.get(key, None) is None:
sent_to_index[key] = index
index += 1
return triplets, sent_to_index
def compute_metrics(annFile, resFile):
# create coco object and cocoRes object
coco = COCO(annFile)
cocoRes = coco.loadRes(resFile)
# create cocoEval object by taking coco and cocoRes
cocoEval = COCOEvalCap(coco, cocoRes)
# evaluate on a subset of images by setting
# cocoEval.params['image_id'] = cocoRes.getImgIds()
# please remove this line when evaluating the full validation set
cocoEval.params['image_id'] = cocoRes.getImgIds()
# evaluate results
# SPICE will take a few minutes the first time, but speeds up due to caching
cocoEval.evaluate()
results = {}
for item in cocoEval.evalImgs:
image_id = item['image_id']
Bleu_4 = item['Bleu_4']
METEOR = item['METEOR']
ROUGE_L = item['ROUGE_L']
CIDEr = item['CIDEr']
CIDEr_R = item['CIDEr-R']
SPICE = item['SPICE']['All']['f']
results[image_id] = {'Bleu_4': Bleu_4,
'METEOR': METEOR,
'ROUGE_L': ROUGE_L,
'CIDEr': CIDEr,
'CIDEr-R': CIDEr_R,
'SPICE': SPICE}
return results
def compute_accuracy(results_B, results_C, winners):
counters = {'Bleu_4': 0,
'METEOR': 0,
'ROUGE_L': 0,
'CIDEr': 0,
'CIDEr-R': 0,
'SPICE': 0}
print('computing accuracy...', len(winners), 'elements')
win_count = 0
for img, winner in winners.items():
if winner != 0:
win_count += 1
for metric in ['Bleu_4', 'METEOR', 'ROUGE_L', 'CIDEr', 'CIDEr-R', 'SPICE']:
if (results_B[img][metric] > results_C[img][metric] and winner > 0) or \
(results_B[img][metric] < results_C[img][metric] and winner < 0):
counters[metric] += 1
for metric in ['Bleu_4', 'METEOR', 'ROUGE_L', 'CIDEr', 'CIDEr-R', 'SPICE']:
counters[metric] = counters[metric] / win_count
return counters
def get_class(pair):
if pair[0] <= 5:
if pair[1] <= 5:
return 'MM'
if pair[1] == 6:
return 'HM'
elif pair[0] == 6:
if pair[1] <= 5:
return 'HM'
if pair[1] == 6:
return 'HC'
if pair[1] == 7:
return 'HI'
return None
def exp_5_references_pascal_50s(triplets, sent_to_index, pairs):
n_ref = 5
ref_data = {'images': [],
"licenses": [{"url": "http://creativecommons.org/licenses/by-nc-sa/2.0/", "id": 1,
"name": "Attribution-NonCommercial-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nc/2.0/", "id": 2,
"name": "Attribution-NonCommercial License"},
{"url": "http://creativecommons.org/licenses/by-nc-nd/2.0/", "id": 3,
"name": "Attribution-NonCommercial-NoDerivs License"},
{"url": "http://creativecommons.org/licenses/by/2.0/", "id": 4,
"name": "Attribution License"},
{"url": "http://creativecommons.org/licenses/by-sa/2.0/", "id": 5,
"name": "Attribution-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nd/2.0/", "id": 6,
"name": "Attribution-NoDerivs License"},
{"url": "http://flickr.com/commons/usage/", "id": 7,
"name": "No known copyright restrictions"},
{"url": "http://www.usa.gov/copyright.shtml", "id": 8,
"name": "United States Government Work"}],
'type': 'captions',
'info': {"description": "This is stable 1.0 version of the 2014 MS COCO dataset.",
"url": "http://mscoco.org",
"version": "1.0",
"year": 2014,
"contributor": "Microsoft COCO group", "date_created": "2015-01-27 09:11:52.357475"},
'annotations': []}
cand_b = []
cand_c = []
winners = {}
img_to_index = {}
results_B = {}
results_C = {}
for i, refs in enumerate(triplets.values()):
ref_data['images'].append({'id': i})
A, B, C, winner, bucket = refs[0]
cand_b.append({"image_id": i, "caption": B})
cand_c.append({"image_id": i, "caption": C})
if n_ref <= len(refs):
refs = random.sample(refs, n_ref)
for ref in refs:
A, B, C, winner, bucket = ref
ref_data['annotations'].append({"image_id": i, "id": i, "caption": A})
winners[i] = winners.get(i, 0) + winner
img_to_index[i] = sent_to_index[B + C + str(bucket)]
if i % 500 == 499:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
ref_data['images'] = []
ref_data['annotations'] = []
cand_b = []
cand_c = []
accuracies = compute_accuracy(results_B, results_C, winners)
print(accuracies)
HC = {'B': {img: value for img, value in results_B.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HC'},
'C': {img: value for img, value in results_C.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HC'},
'winners': {img: value for img, value in winners.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HC'}}
HI = {
'B': {img: value for img, value in results_B.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HI'},
'C': {img: value for img, value in results_C.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HI'},
'winners': {img: value for img, value in winners.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HI'}}
HM = {
'B': {img: value for img, value in results_B.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HM'},
'C': {img: value for img, value in results_C.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HM'},
'winners': {img: value for img, value in winners.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'HM'}}
MM = {'B': {img: value for img, value in results_B.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'MM'},
'C': {img: value for img, value in results_C.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'MM'},
'winners': {img: value for img, value in winners.items() if get_class(pairs['new_data'][img_to_index[img]]) == 'MM'}}
HC_accuracies = compute_accuracy(HC['B'], HC['C'], HC['winners'])
HI_accuracies = compute_accuracy(HI['B'], HI['C'], HI['winners'])
HM_accuracies = compute_accuracy(HM['B'], HM['C'], HM['winners'])
MM_accuracies = compute_accuracy(MM['B'], MM['C'], MM['winners'])
with open('results_pascal_50S.json', 'w') as file:
json.dump({'HC': HC_accuracies,
'HI': HI_accuracies,
'HM': HM_accuracies,
'MM': MM_accuracies, }, file)
def exp_pascal_varying_n_refs(triplets, imgfile, csvfile, sent_to_index, pairs=None, only_MM=False):
results = {'n_ref': [],
'Bleu_4': [],
'METEOR': [],
'ROUGE_L': [],
'CIDEr': [],
'CIDEr-R': [],
'SPICE': []}
all_refs = {}
for n_ref in range(1, 49):
ref_data = {'images': [],
"licenses": [{"url": "http://creativecommons.org/licenses/by-nc-sa/2.0/", "id": 1,
"name": "Attribution-NonCommercial-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nc/2.0/", "id": 2,
"name": "Attribution-NonCommercial License"},
{"url": "http://creativecommons.org/licenses/by-nc-nd/2.0/", "id": 3,
"name": "Attribution-NonCommercial-NoDerivs License"},
{"url": "http://creativecommons.org/licenses/by/2.0/", "id": 4,
"name": "Attribution License"},
{"url": "http://creativecommons.org/licenses/by-sa/2.0/", "id": 5,
"name": "Attribution-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nd/2.0/", "id": 6,
"name": "Attribution-NoDerivs License"},
{"url": "http://flickr.com/commons/usage/", "id": 7,
"name": "No known copyright restrictions"},
{"url": "http://www.usa.gov/copyright.shtml", "id": 8,
"name": "United States Government Work"}],
'type': 'captions',
'info': {"description": "This is stable 1.0 version of the 2014 MS COCO dataset.",
"url": "http://mscoco.org",
"version": "1.0",
"year": 2014,
"contributor": "Microsoft COCO group", "date_created": "2015-01-27 09:11:52.357475"},
'annotations': []}
cand_b = []
cand_c = []
winners = {}
results_B = {}
results_C = {}
img_to_index = {}
for i, refs in enumerate(triplets.values()):
ref_data['images'].append({'id': i})
A, B, C, winner, _ = refs[0]
cand_b.append({"image_id": i, "caption": B})
cand_c.append({"image_id": i, "caption": C})
all_refs[i] = all_refs.get(i, [])
if n_ref <= len(refs) and len(list(set(refs) - set(all_refs[i]))) > 0:
ref = random.choice(list(set(refs) - set(all_refs[i])))
all_refs[i].append(ref)
for ref in all_refs[i]:
A, B, C, winner, bucket = ref
ref_data['annotations'].append({"image_id": i, "id": i, "caption": A})
winners[i] = winners.get(i, 0) + winner
img_to_index[i] = sent_to_index[B + C + str(bucket)]
if i % 500 == 499:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
ref_data['images'] = []
ref_data['annotations'] = []
cand_b = []
cand_c = []
if only_MM:
MM = {'B': {img: value for img, value in results_B.items() if
get_class(pairs['new_data'][img_to_index[img]]) == 'MM'},
'C': {img: value for img, value in results_C.items() if
get_class(pairs['new_data'][img_to_index[img]]) == 'MM'},
'winners': {img: value for img, value in winners.items() if
get_class(pairs['new_data'][img_to_index[img]]) == 'MM'}}
accuracies = compute_accuracy(MM['B'], MM['C'], MM['winners'])
else:
accuracies = compute_accuracy(results_B, results_C, winners)
results['n_ref'].append(n_ref)
results['Bleu_4'].append(accuracies['Bleu_4'])
results['METEOR'].append(accuracies['METEOR'])
results['ROUGE_L'].append(accuracies['ROUGE_L'])
results['CIDEr'].append(accuracies['CIDEr'])
results['CIDEr-R'].append(accuracies['CIDEr-R'])
results['SPICE'].append(accuracies['SPICE'])
df_results = pd.DataFrame(results)
plot = df_results.plot(x='n_ref')
fig = plot.get_figure()
fig.savefig(imgfile)
df_results.to_csv(csvfile)
def exp_mscoco_varying_n_refs(captions, imgfile, csvfile):
results = {'n_ref': [],
'Bleu_4': [],
'METEOR': [],
'ROUGE_L': [],
'CIDEr': [],
'CIDEr-R': [],
'SPICE': []}
all_refs = defaultdict(lambda: [])
for n_ref in range(1, 5):
ref_data = {'images': [],
"licenses": [{"url": "http://creativecommons.org/licenses/by-nc-sa/2.0/", "id": 1,
"name": "Attribution-NonCommercial-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nc/2.0/", "id": 2,
"name": "Attribution-NonCommercial License"},
{"url": "http://creativecommons.org/licenses/by-nc-nd/2.0/", "id": 3,
"name": "Attribution-NonCommercial-NoDerivs License"},
{"url": "http://creativecommons.org/licenses/by/2.0/", "id": 4,
"name": "Attribution License"},
{"url": "http://creativecommons.org/licenses/by-sa/2.0/", "id": 5,
"name": "Attribution-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nd/2.0/", "id": 6,
"name": "Attribution-NoDerivs License"},
{"url": "http://flickr.com/commons/usage/", "id": 7,
"name": "No known copyright restrictions"},
{"url": "http://www.usa.gov/copyright.shtml", "id": 8,
"name": "United States Government Work"}],
'type': 'captions',
'info': {"description": "This is stable 1.0 version of the 2014 MS COCO dataset.",
"url": "http://mscoco.org",
"version": "1.0",
"year": 2014,
"contributor": "Microsoft COCO group", "date_created": "2015-01-27 09:11:52.357475"},
'annotations': []}
cand_b = []
cand_c = []
winners = {}
results_B = {}
results_C = {}
count = 0
for img, triplets in captions.items():
ref_data['images'].append({'id': img})
refs, B, C, winner = triplets
cand_b.append({"image_id": img, "caption": B})
cand_c.append({"image_id": img, "caption": C})
if n_ref <= len(refs):
ref = random.choice(list(set(refs) - set(all_refs[img])))
all_refs[img].append(ref)
for ref in all_refs[img]:
ref_data['annotations'].append({"image_id": img, "id": img, "caption": ref})
winners[img] = winners.get(img, 0) + winner
if count % 500 == 499:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
ref_data['images'] = []
ref_data['annotations'] = []
cand_b = []
cand_c = []
count += 1
if count % 500 == 499:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
accuracies = compute_accuracy(results_B, results_C, winners)
results['n_ref'].append(n_ref)
results['Bleu_4'].append(accuracies['Bleu_4'])
results['METEOR'].append(accuracies['METEOR'])
results['ROUGE_L'].append(accuracies['ROUGE_L'])
results['CIDEr'].append(accuracies['CIDEr'])
results['CIDEr-R'].append(accuracies['CIDEr-R'])
results['SPICE'].append(accuracies['SPICE'])
df_results = pd.DataFrame(results)
plot = df_results.plot(x='n_ref')
fig = plot.get_figure()
fig.savefig(imgfile)
df_results.to_csv(csvfile)
def compute_accuracy_pracegover(data):
ref_data = {'images': [],
"licenses": [{"url": "http://creativecommons.org/licenses/by-nc-sa/2.0/", "id": 1,
"name": "Attribution-NonCommercial-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nc/2.0/", "id": 2,
"name": "Attribution-NonCommercial License"},
{"url": "http://creativecommons.org/licenses/by-nc-nd/2.0/", "id": 3,
"name": "Attribution-NonCommercial-NoDerivs License"},
{"url": "http://creativecommons.org/licenses/by/2.0/", "id": 4,
"name": "Attribution License"},
{"url": "http://creativecommons.org/licenses/by-sa/2.0/", "id": 5,
"name": "Attribution-ShareAlike License"},
{"url": "http://creativecommons.org/licenses/by-nd/2.0/", "id": 6,
"name": "Attribution-NoDerivs License"},
{"url": "http://flickr.com/commons/usage/", "id": 7,
"name": "No known copyright restrictions"},
{"url": "http://www.usa.gov/copyright.shtml", "id": 8,
"name": "United States Government Work"}],
'type': 'captions',
'info': {"description": "This is stable 1.0 version of the 2014 MS COCO dataset.",
"url": "http://mscoco.org",
"version": "1.0",
"year": 2014,
"contributor": "Microsoft COCO group", "date_created": "2015-01-27 09:11:52.357475"},
'annotations': []}
cand_b = []
cand_c = []
winners = {}
results_B = {}
results_C = {}
for k, triplet in data.items():
k = int(k)
A, B, C, winner = triplet
ref_data['images'].append({'id': k})
cand_b.append({"image_id": k, "caption": B})
cand_c.append({"image_id": k, "caption": C})
ref_data['annotations'].append({"image_id": k, "id": k, "caption": A})
winners[k] = winner
if k % 500 == 499:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
ref_data['images'] = []
ref_data['annotations'] = []
cand_b = []
cand_c = []
if len(cand_b) > 0:
with open('references.json', 'w') as file:
json.dump(ref_data, file)
with open('captions_B.json', 'w') as file:
json.dump(cand_b, file)
with open('captions_C.json', 'w') as file:
json.dump(cand_c, file)
annFile = 'references.json'
resFile = 'captions_B.json'
results_B_aux = compute_metrics(annFile, resFile)
results_B.update(results_B_aux)
resFile = 'captions_C.json'
results_C_aux = compute_metrics(annFile, resFile)
results_C.update(results_C_aux)
return compute_accuracy(results_B, results_C, winners)
def exp_pracegover(output_file, filepath='experimental_data/pracegover_triplets_complete.json'):
with open(filepath) as file:
data = json.load(file)
accuracies = {}
accuracies['HCI'] = compute_accuracy_pracegover(data['HCI'])
print('HCI', accuracies)
accuracies['HII'] = compute_accuracy_pracegover(data['HII'])
print('HII', accuracies)
with open(output_file, 'w') as file:
json.dump(accuracies, file)
def compute_score_for_toy_candidate_classification():
annFile = 'experimental_data/toy_candidate_classification/toy_candidate_classification_references.json'
resFile = 'experimental_data/toy_candidate_classification/toy_candidate_classification_correct_candidates.json'
results_correct = compute_metrics(annFile, resFile)
resFile = 'experimental_data/toy_candidate_classification/toy_candidate_classification_incorrect_candidates.json'
results_incorrect = compute_metrics(annFile, resFile)
results = { 'key':[],
'CIDEr-R CC':[], 'CIDEr-R IC':[],
'CIDEr-D CC':[], 'CIDEr-D IC':[],
'SPICE CC':[], 'SPICE IC':[],
'METEOR CC':[], 'METEOR IC':[],
'ROUGE CC':[], 'ROUGE IC':[],
'BLEU4 CC':[], 'BLEU4 IC':[]}
for k, cc_v in results_correct.items():
ic_v = results_incorrect[k]
results['key'].append(k)
results['CIDEr-R IC'].append(100*ic_v['CIDEr-R'])
results['CIDEr-R CC'].append(100*cc_v['CIDEr-R'])
results['CIDEr-D IC'].append(100*ic_v['CIDEr'])
results['CIDEr-D CC'].append(100 * cc_v['CIDEr'])
results['SPICE CC'].append(100*cc_v['SPICE'])
results['SPICE IC'].append(100*ic_v['SPICE'])
results['ROUGE IC'].append(100*ic_v['ROUGE_L'])
results['ROUGE CC'].append(100*cc_v['ROUGE_L'])
results['METEOR IC'].append(100*ic_v['METEOR'])
results['METEOR CC'].append(100*cc_v['METEOR'])
results['BLEU4 IC'].append(100*ic_v['Bleu_4'])
results['BLEU4 CC'].append(100*cc_v['Bleu_4'])
df = pd.DataFrame(results)
df.to_csv('toy_candidate_classification_results.csv')
if __name__ == '__main__':
# triplets, sent_to_index = load_pascal_triplets()
# pairs = loadmat('experimental_data/pair_pascal.mat')
# exp_5_references_pascal_50s(triplets, sent_to_index, pairs=pairs)
#
# exp_varying_n_refs(triplets, sent_to_index=sent_to_index, imgfile='pascal_50S.png', csvfile='pascal_50S.csv')
# exp_varying_n_refs(triplets, sent_to_index=sent_to_index, imgfile='pascal_50S_only_MM.png', csvfile='pascal_50S_only_MM.csv',
# only_MM=True, pairs=pairs)
# exp_pracegover(output_file='results_pracegover_complete.json')
# with open('experimental_data/mscoco_triplets_complete.json') as file:
# data = json.load(file)
#
# exp_mscoco_varying_n_refs(data['HCI'], imgfile='mscoco_HCI.png', csvfile='mscoco_HCI.csv')
# exp_mscoco_varying_n_refs(data['HII'], imgfile='mscoco_HII.png', csvfile='mscoco_HII.csv')
compute_score_for_toy_candidate_classification() | StarcoderdataPython |
6401518 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/grid/messages/success_resp_message.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
from syft.proto.core.io import address_pb2 as proto_dot_core_dot_io_dot_address__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n.proto/grid/messages/success_resp_message.proto\x12\x12syft.grid.messages\x1a%proto/core/common/common_object.proto\x1a\x1bproto/core/io/address.proto"y\n\x16SuccessResponseMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x10\n\x08resp_msg\x18\x03 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x04 \x01(\x0b\x32\x15.syft.core.io.Address"w\n\x14\x45rrorResponseMessage\x12%\n\x06msg_id\x18\x01 \x01(\x0b\x32\x15.syft.core.common.UID\x12\x10\n\x08resp_msg\x18\x02 \x01(\t\x12&\n\x07\x61\x64\x64ress\x18\x03 \x01(\x0b\x32\x15.syft.core.io.Addressb\x06proto3'
)
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "proto.grid.messages.success_resp_message_pb2", globals()
)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_SUCCESSRESPONSEMESSAGE._serialized_start = 138
_SUCCESSRESPONSEMESSAGE._serialized_end = 259
_ERRORRESPONSEMESSAGE._serialized_start = 261
_ERRORRESPONSEMESSAGE._serialized_end = 380
# @@protoc_insertion_point(module_scope)
| StarcoderdataPython |
3429073 | # TODO: someday: I think these should probably be refactored to be in an external file; yaml, maybe?
SAMPLE_ID_SELECTOR = 'samples.sample_id'
def get_sample_id_filter(sample_name):
return {SAMPLE_ID_SELECTOR: sample_name}
def get_any_of_sample_ids_filter(sample_names_list):
return {SAMPLE_ID_SELECTOR: {'$in': sample_names_list}}
def make_de_novo_variants_filter(proband, ancestor1, ancestor2):
"""
Function for de novo variant analysis. Can be performed on multisample files or or on data coming
from a collection of files. In the former case, every sample contains the same variants, although they have
differences in their allele frequency and read values. A de novo variant is defined as a variant that
occurs only in the specified sample (sample1) and not on the other two (sample2, sample3). Occurrence is
defined as having allele frequencies greater than [0, 0] ([REF, ALT]).
"""
return {
"$and":
[
get_sample_id_filter(proband),
{
"$and":
[
{SAMPLE_ID_SELECTOR: {"$ne": ancestor1}},
{SAMPLE_ID_SELECTOR: {"$ne": ancestor2}}
]
}
]
}
def make_deleterious_compound_heterozygous_variants_filter(sample_ids_list=None):
and_list = [
{"genotype_subclass_by_class.heterozygous": "compound"},
{"cadd.phred": {"$gte": 10}}
]
result = _append_sample_id_constraint_if_needed(and_list, sample_ids_list)
return result
def make_known_disease_variants_filter(sample_ids_list=None):
""" Function for retrieving known disease variants by presence in Clinvar and Cosmic."""
result = {
"$or":
[
{
"$and":
[
{"clinvar.rcv.accession": {"$exists": True}},
{"clinvar.rcv.clinical_significance": {"$nin": ["Benign", "Likely benign"]}}
]
},
{"cosmic.cosmic_id": {"$exists": True}}
]
}
if sample_ids_list is not None:
result = _append_sample_id_constraint_if_needed([result], sample_ids_list)
return result
def make_rare_deleterious_variants_filter(sample_ids_list=None):
""" Function for retrieving rare, deleterious variants """
and_list = [
{
"$or":
[
{"cadd.esp.af": {"$lt": 0.051}},
{"cadd.esp.af": {"$exists": False}}
]
},
{
"$or":
[
{"func_knowngene": "exonic"},
{"func_knowngene": "splicing"}
]
},
{"cadd.phred": {"$gte": 10}},
{"exonicfunc_knowngene": {"$ne": "synonymous SNV"}},
{"1000g2015aug_all": {"$lt": 0.051}}
]
result = _append_sample_id_constraint_if_needed(and_list, sample_ids_list)
return result
def _append_sample_id_constraint_if_needed(and_list, sample_ids_list):
if sample_ids_list is not None:
and_list.append(get_any_of_sample_ids_filter(sample_ids_list))
return {"$and": and_list}
| StarcoderdataPython |
6497579 | print('This is created from home!')
| StarcoderdataPython |
4934124 |
import os
from datetime import datetime
from whotracksme.website.utils import print_progress
from whotracksme.website.templates import render_template, get_template
def parse_blogpost(filepath):
with open(filepath) as r:
text = r.read()
meta, body = text.split('+++')
title, subtitle, author, post_type, publish, date, tags, header, _ = meta.split("\n")
return {
"filename": filepath.split("/")[-1].replace(".md", ""),
"title": title.split(":")[1].strip(),
"subtitle": subtitle.split(":")[1].strip(),
"author": author.split(":")[1].strip(),
"type": post_type.split(":")[1].strip(),
"publish": bool(publish.split(":")[1].strip() == "True"),
"date": date.split(":")[1].strip(),
"tags": tags.split(":")[-1].split(","),
"header_img": header.split(":")[1].strip(),
"body": body
}
def load_blog_posts():
blog_posts = [
parse_blogpost(os.path.join("blog", f))
for f in os.listdir("blog/")
]
blog_posts.sort(
key=lambda p: datetime.strptime(p['date'], '%Y-%m-%d'),
reverse=True
)
return blog_posts
def build_blogpost_list(data, blog_posts):
with open('_site/blog.html', 'w') as output:
output.write(render_template(
template=get_template(data, "blog.html"),
blog_posts=[p for p in blog_posts if p['publish']]
))
print_progress(text="Generate blog list")
def build_blogpost_pages(data, blog_posts):
template = get_template(
data,
"blog-page.html",
render_markdown=True,
path_to_root='..'
)
for blog_post in blog_posts:
with open(f'_site/blog/{blog_post.get("filename")}.html', 'w') as output:
output.write(
render_template(
path_to_root='..',
template=template,
blog_post=blog_post
)
)
print_progress(text="Generate blog posts")
| StarcoderdataPython |
9694839 | <reponame>aloknnikhil/orion-server
import socket
import time
import statsd
class MetricsClient(object):
"""
Abstractions over statsd metrics emissions.
"""
def __init__(self, addr, prefix):
"""
Create a client instance.
:param addr: IPv4 address of the statsd server.
:param prefix: String prefix for all emitted metrics.
"""
self.hostname = socket.gethostname()
if addr:
ip, port = addr.split(':')
self.backend = statsd.StatsClient(ip, int(port), prefix=prefix)
else:
self.backend = NoopStatsdClient()
@property
def _default_tags(self):
"""
Default tags to include with every metrics emission.
:return: Dictionary of default tags.
"""
return {
'host': self.hostname,
}
@staticmethod
def _format_metric(metric, tags, tag_delimiter='='):
"""
Format a metric name to include InfluxDB-style tags.
:param metric: Metric name.
:param tags: Dictionary of tags.
:param tag_delimiter: Tag key-value delimiter; defaults to '=' for InfluxDB-style metrics.
Use ':' for Datadog-style metrics.
:return: Formatted metric name.
"""
if not tags:
return metric
serialized_tags = ','.join(
'{}{}{}'.format(key, tag_delimiter, value)
for key, value in tags.items()
)
return '{},{}'.format(metric, serialized_tags)
class NoopStatsdClient(object):
"""
Null object implementing the statsd client interface to noop on all metrics emissions.
"""
def gauge(self, *args, **kwargs):
pass
def incr(self, *args, **kwargs):
pass
def timing(self, *args, **kwargs):
pass
class EventMetricsClient(MetricsClient):
"""
Metrics client that provides imperative APIs for emitting metrics when events occur.
"""
def emit_event(self, metric, tags={}):
"""
Emit a record of an event occurrence. Semantically, the value of this event is monotonically
increasing.
:param metric: Metric name.
:param tags: Dictionary of additional tags to include.
"""
self.backend.incr(self._format_metric(
metric='event.{}'.format(metric),
tags=dict(self._default_tags, **tags),
))
class LatencyMetricsClient(MetricsClient):
"""
Metrics client that provides APIs for measuring latency of operations.
"""
def profile(self, metric, tags={}):
"""
Create a context manager for emitting a timing metric describing the latency of an
operation.
:param metric: Metric name.
:param tags: Dictionary of additional tags to include.s
:return: Context manager for measuring execution duration and emitting metrics.
"""
def emission_proxy(duration):
self.backend.timing(
stat=self._format_metric(
metric='latency.{}'.format(metric),
tags=dict(self._default_tags, **tags),
),
delta=duration,
)
return ExecutionTimer(emission_proxy)
class ExecutionTimer(object):
"""
Context manager for timing an execution duration.
"""
def __init__(self, duration_cb):
"""
Create a context manager instance.
:param duration_cb: Callback function invoked with the duration, in milliseconds, of the
context manager body when complete.
"""
self.duration_cb = duration_cb
def __enter__(self):
self.start_ms = 1000.0 * time.time()
def __exit__(self, *args, **kwargs):
end_ms = 1000.0 * time.time()
self.duration_cb(end_ms - self.start_ms)
| StarcoderdataPython |
12838596 | import numpy as np
import matplotlib.pyplot as pl
yearInSec = 365.0*24.0*3600.0
solarMassPerYear = 1.99e33 / yearInSec
RStar = 4e13
TStar = 2330.0
MStar = 0.8 * 1.99e33
R0 = 1.2 * RStar
Rc = 5 * RStar
Rw = 20.0 * RStar
vexp = 14.5 * 1e5
vturb = 1.0
MLoss = 2e-5 * solarMassPerYear
G = 6.67259e-8
k = 1.381e-16
mg = 2.3 * 1.6605402e-24
alpha = 0.55
nStar = 1.8e14
gamma = 0.89
pc = 3.0857e18
inputModel = np.loadtxt('rpfit_iktau.dat', skiprows=3)
n = inputModel.shape[0]
B = np.ones(n) * 1.0
r = inputModel[:,0]
nH2 = inputModel[:,1]
SOAbundance = inputModel[:,11]
Tk = inputModel[:,2]
TDust = inputModel[:,5]
v = inputModel[:,4]
f = open('model1G.atmos', 'w')
f.write("r [cm] n[cm^-3] A(mol) Tk [K] Tdust[K] v[km/s] B[G]\n")
f.write("{0}\n".format(n))
for i in range(n):
f.write("{0:10.3e} {1:10.3e} {2:10.3e} {3:10.3f} {4:10.3f} {5:10.3f} {6:10.3f}\n".format(r[i], nH2[i], SOAbundance[i], Tk[i], TDust[i], v[i], B[i]))
f.close()
v = inputModel[:,4] * 0.0
f = open('model1G_rest.atmos', 'w')
f.write("r [cm] n[cm^-3] A(mol) Tk [K] Tdust[K] v[km/s] B[G]\n")
f.write("{0}\n".format(n))
for i in range(n):
f.write("{0:10.3e} {1:10.3e} {2:10.3e} {3:10.3f} {4:10.3f} {5:10.3f} {6:10.3f}\n".format(r[i], nH2[i], SOAbundance[i], Tk[i], TDust[i], v[i], B[i]))
f.close()
| StarcoderdataPython |
9718500 | """test query operations"""
import pytest
from aiodb import Model, Field
from aiodb.model.query import QueryTable
from aiodb.model.query import _find_foreign_key_reference
from aiodb.model.query import _find_primary_key_reference
from aiodb.model.query import _pair
class A(Model): # pylint: disable=invalid-name
"""test model"""
TABLENAME = "yikes"
id = Field(is_primary=True)
class B(Model): # pylint: disable=invalid-name
"""test model"""
TABLENAME = "yeah"
id = Field(is_primary=True)
a_id = Field(foreign='tests.test_query.A')
c_id = Field(foreign='tests.test_query.C')
class C(Model): # pylint: disable=invalid-name
"""test model"""
id = Field(is_primary=True)
a_id = Field(foreign='tests.test_query.A')
class D(Model): # pylint: disable=invalid-name
"""test model"""
TABLENAME = "d"
a = Field()
b = Field(expression='NOW()')
c = Field(expression='FN({Q}z{Q})')
def test_expression():
"""verify simple SELECT expression"""
stmt = D.query._prepare( # pylint: disable=protected-access
False, None, None, None, "'")
result = \
"SELECT 'd'.'a' AS 0_a, NOW() AS 0_b, FN('z') AS 0_c FROM 'd' AS 'd'"
assert stmt == result
def test_table_name():
"""verify _build use of table name"""
query = A.query.where('{TABLE.A}.id=10')
stmt = query._prepare( # pylint: disable=protected-access
False, None, None, None, "'")
expect = (
"SELECT 'a'.'id' AS 0_id FROM 'yikes' AS 'a'"
" WHERE 'a'.id=10"
)
assert stmt == expect
def test_table_names():
"""verify _build use of table names"""
query = A.query.join(B, alias='FOO').where('{TABLE.A}.id={TABLE.FOO}.a')
stmt = query._prepare( # pylint: disable=protected-access
False, None, None, None, "'")
expect = (
"SELECT 'a'.'id' AS 0_id,"
" 'FOO'.'id' AS 1_id,"
" 'FOO'.'a_id' AS 1_a_id,"
" 'FOO'.'c_id' AS 1_c_id"
" FROM 'yikes' AS 'a' JOIN 'yeah' AS 'FOO'"
" ON 'FOO'.'a_id' = 'a'.'id' WHERE 'a'.id='FOO'.a"
)
assert stmt == expect
def test_duplicate():
"""verify duplicate table detection"""
with pytest.raises(ValueError) as ex:
A.query.join(C).join(C)
assert ex.value.args[0] == "duplicate table 'c'"
A.query.join(C).join(C, alias='CC')
@pytest.mark.parametrize(
'table,tables,is_none,rtable,rfield', (
(B, (A,), False, A, 'a_id'),
(A, (B,), True, 0, 0),
),
)
def test_find_foreign(table, tables, is_none, rtable, rfield):
"""test join on foreign key"""
result = _find_foreign_key_reference(table, tables)
if is_none:
assert result is None
else:
table, field = result
assert table == rtable
assert field == rfield
def test_find_foreign_multiple():
"""test detection of join matching multiple foreign keys"""
with pytest.raises(TypeError) as ex:
_find_foreign_key_reference(B, (A, C))
assert ex.value.args[0] == "'B' has multiple foreign keys that match"
def test_find_primary():
"""test primary key match"""
table, field = _find_primary_key_reference(A, (B,))
assert table == B
assert field == 'a_id'
assert _find_primary_key_reference(B, (A,)) is None
def test_find_primary_multiple():
"""test multiple primary key matches"""
with pytest.raises(TypeError):
_find_primary_key_reference(A, (B, C))
@pytest.mark.parametrize(
'tab1,tab2,limit,match_col1,match_tab,match_col2', (
(A, B, None, 'a_id', 'a', 'id'),
(B, A, None, 'id', 'b', 'a_id'),
((A, B), C, None, 'a_id', 'a', 'id'),
((A, C), B, 'a', 'a_id', 'a', 'id'),
((A, C), B, A, 'a_id', 'a', 'id'),
) # pylint: disable=too-many-arguments
)
def test_pair(tab1, tab2, limit, match_col1, match_tab, match_col2):
"""test _pair function"""
if not isinstance(tab1, tuple):
tab1 = (tab1,)
query = [QueryTable(tab) for tab in tab1]
column1, table2, column2 = _pair(tab2, query, limit)
assert column1 == match_col1
assert table2 == match_tab
assert column2 == match_col2
| StarcoderdataPython |
8012900 | <reponame>MuhammadAlzamily/MyKivyApp<filename>pregnancy-app/pregnancy_app.py
from kivymd.app import MDApp
from kivymd.uix.screen import Screen
from kivy.lang.builder import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
from kivymd.uix.label import MDLabel
from kivymd.uix.button import MDFillRoundFlatButton, MDIconButton, MDFlatButton
from kivymd.uix.toolbar import MDToolbar
from kivy.core.window import Window
from kivymd.uix.dialog import MDDialog
Window.size = (300, 500)
app = """
ScreenManager:
StartScreen:
GenderScreen:
TestScreen:
<StartScreen>:
name:"start"
BoxLayout:
orientation:"vertical"
MDToolbar:
title:"The Pregnancy App"
elevation:10
MDLabel:
text:"Click to get started"
halign:"center"
MDFillRoundFlatButton:
text:"Get Started"
pos_hint:{'center_x':0.5,'center_y':0.3}
on_press:root.manager.current="genderscreen"
MDIconButton:
icon:"discord"
pos_hint:{'center_x':0.5,'center_y':0.1}
user_font_size:"48sp"
<GenderScreen>:
name:"genderscreen"
BoxLayout:
orientation:"vertical"
MDToolbar:
title:"Select A Gender"
MDLabel:
text:"Firstly, pick a gender"
halign:"center"
MDFillRoundFlatButton:
text:"Male"
pos_hint:{'center_x':0.2,'center_y':0.3}
on_press:root.manager.current="testscreen"
MDFillRoundFlatButton:
text:"Female"
pos_hint:{'center_x':0.8,'center_y':0.3}
on_press:root.manager.current="testscreen"
MDFillRoundFlatButton:
text:"Back"
pos_hint:{'center_x':0.5,'center_y':0.1}
on_press:root.manager.current="start"
<TestScreen>:
name:"testscreen"
BoxLayout:
orientation:"vertical"
MDToolbar:
title:"Test Results"
MDLabel:
text:"Piss on the screen then press the button below"
halign:"center"
MDFillRoundFlatButton:
text:"Test"
pos_hint:{'center_x':0.5,'center_y':0.3}
on_press:app.show_results()
MDFillRoundFlatButton:
text:"Back"
pos_hint:{'center_x':0.5,'center_y':0.1}
on_press:root.manager.current="genderscreen"
"""
class StartScreen(Screen):
pass
class GenderScreen(Screen):
pass
class TestScreen(Screen):
pass
ss = ScreenManager()
ss.add_widget(StartScreen(name="start"))
ss.add_widget(GenderScreen(name="genderscreen"))
ss.add_widget(TestScreen(name="testscreen"))
class mainApp(MDApp):
def build(self):
self.theme_cls.primary_palette = "Pink"
screen = Screen()
myapp = Builder.load_string(app)
screen.add_widget(myapp)
return screen
def go_back_to_start(self):
pass
def go_back_to_gender(self):
pass
def show_results(self):
self.msg = MDDialog(title="Test Results",
text="You're not ready to be a parent since you just pissed on a phone screen to find out if you're going to be one", size_hint=(0.8, 1), buttons=[MDFlatButton(text="Close", on_release=self.close_dialog)])
self.msg.open()
def close_dialog(self, obj):
self.msg.dismiss()
mainApp().run()
| StarcoderdataPython |
4936626 | # Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for robot_wrist_ft_sensor."""
from absl.testing import absltest
from dm_robotics.moma.models.end_effectors.wrist_sensors import robotiq_fts300
from dm_robotics.moma.sensors import robot_wrist_ft_sensor
from dm_robotics.moma.sensors import wrench_observations
class RobotWristFTSensorTest(absltest.TestCase):
def test_sensor_has_all_observables(self):
wrist_ft_sensor = robotiq_fts300.RobotiqFTS300()
sensor = robot_wrist_ft_sensor.RobotWristFTSensor(
wrist_ft_sensor=wrist_ft_sensor, name='ft_sensor')
for obs in wrench_observations.Observations:
self.assertIn(sensor.get_obs_key(obs), sensor.observables)
if __name__ == '__main__':
absltest.main()
| StarcoderdataPython |
6595754 | #!/usr/bin/env python3
import math
# Move the triple quotes downward to uncover each segment of code
"""
# The 'for' loop is one of the most common loop constructs you will use
# Note the indentation of the print(i) statement
# Code 'inside' loops must be indented
# Note that Python starts counting from 0 not 1
for i in range(3):
print(i)
print('-')
# The above is really a shortcut for the following code
for i in range(0, 3):
print(i)
print('--')
# But the second construct allows you to set where the loop starts
for i in range(1, 3):
print(i)
print('---')
# And also the steps between iterations
for i in range(1, 10, 3):
print(i)
print('----')
# You can iterate over the characters of a string
s = 'ACGT'
for c in s:
print(c)
print('-----')
# An alternate way to do the same thing
# It's absolutely critical you understand this code!
for i in range(len(s)):
print(i, s[i])
print('------')
# Everything that is tabbed-over is within the same loop
# Try removing the tab in front of i += 1 below
i = 0
for c in s:
print(i, c)
i += 1
print('-------')
# The real power begins with loops inside of loops
for i in range(0, 4):
for j in range(i, 4):
print(i, j)
print('--------')
# Just some fun with loops and math!
precision = 10
e = 0
for n in range(0, precision):
e += 1/math.factorial(n)
print(e, math.e - e)
"""
| StarcoderdataPython |
1831935 | from django.contrib.sitemaps import Sitemap
from blog.models import Post
from django.urls import reverse
class PostSitemap(Sitemap):
changefreq = "weekly"
priority = 0.9
def items(self):
return Post.objects.all()
def lastmod(self, obj):
return obj.date_posted
# def location() Django uses get_absolute_url() by defaultsss
class RoadmapSitemap(Sitemap):
changefreq = "daily"
priority = 0.7
def items(self):
return ["blog-roadmap"]
def location(self, item):
return reverse(item)
| StarcoderdataPython |
6552880 | #! /bin/env python3
# -*- coding: utf-8 -*-
################################################################################
#
# This file is part of PYJUNK.
#
# Copyright © 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the “Software”),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# You should have received a copy of the MIT License
# along with PYJUNK. If not, see <https://mit-license.org/>.
#
################################################################################
"""
Developp.py rassemble la définition des classes:
Developp2D
Developp(Developp2D)
"""
from __future__ import annotations
import sys
import pathlib
import math
from datetime import datetime
import Direction as di
#----- constantes pour finir le programme
NORMAL_TERMINATION = 0
ABNORMAL_TERMINATION = 1
#----- tableau (dictionnaire) pour les couleurs des tracés
couleur = {
"blanc": 0,
"rouge": 1,
"jaune": 2,
"vert": 3,
"magenta": 4,
"bleu": 5,
"violet": 6,
"gris": 8
}
#----- Classe représentant le modèle pour le calcul du développé
class Developp2D:
"""
Classe Developp2D
=================
La classe Developp2D calcule et stocke la représentation du développé, 2D par définition
:datas:
self.dictDevelopp2D: dict
self.numPanneau: int
self.lendroit2DMil: list
self.lendroit2DHaut: list
self.lendroit2DBas: list
self.lendroit2DHautChainette: list
self.lendroit2DBasChainette: list
self.lendroit2DHautCouture: list
self.endroit2DMil: Endroit2D
self.endroit2DHaut: Endroit2D
self.endroit2DBas: Endroit2D
:Example:
>>> a = Developp2D({"numPanneau": 0})
>>> print(a)
--> Developp2D :
<BLANKLINE>
.. seealso::
.. warning::
.. note::
.. todo::
"""
#-----
def __init__(self, dictDevelopp2D: dict) -> None:
self.dictDevelopp2D = dictDevelopp2D
if "numPanneau" in self.dictDevelopp2D and isinstance(self.dictDevelopp2D["numPanneau"], int):
self.numPanneau = self.dictDevelopp2D["numPanneau"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp2D')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
# les listes de points 2D qui seront placés dans le dxf
self.lendroit2DMil = []
self.lendroit2DHaut = []
self.lendroit2DBas = []
self.lendroit2DHautChainette = []
self.lendroit2DBasChainette = []
self.lendroit2DHautCouture = []
# les points 2D précédents
self.endroit2DMil = None
self.endroit2DHaut = None
self.endroit2DBas = None
#-----
@staticmethod
def calc(dictCalc: dict) -> tuple:
"""
soit 2 cercles (x-a)²+(y-b)²=r0² et (x-c)²+(y-d)²=r1², on cherche les points d'intersection
la Distance entre les centres est D = sqrt[(c-a)²+(d-b)²]
la condition pour qu'il y ait une intersection :
D < r0+r1 et D > abs(r0-r1)
les solutions sont données par :
avec δ = 1/4*sqrt((D+r0+r1)(D+r0-r1)(D-r0+r1)(-D+r0+r1))
x1,2 = (a+c)/2 + (c-a)(r0²-r1²)/(2D²) +- 2δ(b-d)/D²
y1,2 = (b+d)/2 + (d-b)(r0²-r1²)/(2D²) -+ 2δ(a-c)/D²
"""
a = dictCalc["c0"]["x"]
b = dictCalc["c0"]["y"]
c = dictCalc["c1"]["x"]
d = dictCalc["c1"]["y"]
r0 = dictCalc["r0"]
r1 = dictCalc["r1"]
dD = math.hypot((c-a), (d-b))
if not (dD < (r0+r1) and dD > math.fabs(r0-r1)):
print(f'pas de solutions')
print(f'a -> {a} b -> {b} c -> {c} d -> {d} r0 -> {r0} r1 -> {r1}')
print(f' --> Arrêt du programme')
sys.exit(ABNORMAL_TERMINATION)
part1X = (a+c)/2.
part1Y = (b+d)/2.
part2 = (r0*r0-r1*r1)/(2.*dD*dD)
part2X = (c-a)*part2
part2Y = (d-b)*part2
delta = math.sqrt((dD+r0+r1)*(dD+r0-r1)*(dD-r0+r1)*(-dD+r0+r1))/(2.*dD*dD)
deltaX = (b-d)*delta
deltaY = (a-c)*delta
x = part1X + part2X
x1 = x + deltaX
x2 = x - deltaX
if x1 > x2:
return (x1, part1Y + part2Y - deltaY)
return (x2, part1Y + part2Y + deltaY)
#-----
@staticmethod
def couture(dictCouture: dict) -> tuple:
"""
Calcul de la couture sur le bord haut du développé
Principe : à partir de 2 points successifs de la chainette donc une droite,
on calcule 2 autres points décalés de fCouture et faisant un angle intérieur de angleR
avec la droite
"""
if "fCouture" in dictCouture and isinstance(dictCouture["fCouture"], float):
fCouture = dictCouture["fCouture"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictCouture')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
angleR = math.radians(60.) # don't try 90°
if "endroitDeb" in dictCouture and isinstance(dictCouture["endroitDeb"], di.Endroit2D):
endroitDeb = dictCouture["endroitDeb"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictCouture')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "endroitFin" in dictCouture and isinstance(dictCouture["endroitFin"], di.Endroit2D):
endroitFin = dictCouture["endroitFin"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictCouture')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
angleChainette = di.Direction2D(endroitFin - endroitDeb).angle2D()
direction2DDeb = di.Direction2D({"vect2D": {"x": fCouture / math.tan(angleR) , "y": fCouture}})
endroit2DCoutureDeb = endroitDeb + di.Direction2D(direction2DDeb.rot2d(angleChainette))
angleChainette = di.Direction2D(endroitDeb - endroitFin).angle2D()
direction2DFin = di.Direction2D({"vect2D": {"x": fCouture / math.tan(angleR) , "y": -fCouture}})
endroit2DCoutureFin = endroitFin + di.Direction2D(direction2DFin.rot2d(angleChainette))
return (endroit2DCoutureDeb["point2D"]["x"], endroit2DCoutureDeb["point2D"]["y"], \
endroit2DCoutureFin["point2D"]["x"], endroit2DCoutureFin["point2D"]["y"] \
)
#-----
def comp(self, dictDevelopp2D: dict) -> None:
"""
Dans l'espace 2D le calcul a
"""
if dictDevelopp2D["index"] == 0:
endroit2DMil = di.Endroit2D({"point2D": {"x": 0., "y": 0.}})
self.lendroit2DMil.append(endroit2DMil)
fdist3DMilHaut = dictDevelopp2D["fdist3DMilHaut"]
endroit2DHaut = di.Endroit2D({"point2D": {"x": 0., "y": fdist3DMilHaut}})
self.lendroit2DHaut.append(endroit2DHaut)
fdist3DMilBas = dictDevelopp2D["fdist3DMilBas"]
endroit2DBas = di.Endroit2D({"point2D": {"x": 0., "y": -fdist3DMilBas}})
self.lendroit2DBas.append(endroit2DBas)
fdist3DMilHautChainette = dictDevelopp2D["fdist3DMilHautChainette"]
endroit2DHautChainette = di.Endroit2D({"point2D": {"x": 0., "y": fdist3DMilHautChainette}})
self.lendroit2DHautChainette.append(endroit2DHautChainette)
fdist3DMilBasChainette = dictDevelopp2D["fdist3DMilBasChainette"]
endroit2DBasChainette = di.Endroit2D({"point2D": {"x": 0., "y": -fdist3DMilBasChainette}})
self.lendroit2DBasChainette.append(endroit2DBasChainette)
self.lendroit2DHautCouture.append(endroit2DHautChainette)
else:
dictCalc = {}
dictCalc['c0'] = self.endroit2DMil.p2ddict.getDict()
dictCalc["r0"] = dictDevelopp2D["fdist3DMilMil"]
dictCalc['c1'] = self.endroit2DHaut.p2ddict.getDict()
dictCalc["r1"] = dictDevelopp2D["fdist3DHautMil"]
(x, y) = Developp2D.calc(dictCalc=dictCalc)
endroit2DMil = di.Endroit2D({"point2D": {"x": x, "y": y}})
self.lendroit2DMil.append(endroit2DMil)
dictCalc['c0'] = self.endroit2DMil.p2ddict.getDict()
dictCalc["r0"] = dictDevelopp2D["fdist3DMilHaut"]
dictCalc['c1'] = self.endroit2DHaut.p2ddict.getDict()
dictCalc["r1"] = dictDevelopp2D["fdist3DHautHaut"]
(x, y) = Developp2D.calc(dictCalc=dictCalc)
endroit2DHaut = di.Endroit2D({"point2D": {"x": x, "y": y}})
self.lendroit2DHaut.append(endroit2DHaut)
dictCalc['c0'] = self.endroit2DMil.p2ddict.getDict()
dictCalc["r0"] = dictDevelopp2D["fdist3DMilBas"]
dictCalc['c1'] = self.endroit2DBas.p2ddict.getDict()
dictCalc["r1"] = dictDevelopp2D["fdist3DBasBas"]
(x, y) = Developp2D.calc(dictCalc=dictCalc)
endroit2DBas = di.Endroit2D({"point2D": {"x": x, "y": y}})
self.lendroit2DBas.append(endroit2DBas)
dictCalc['c0'] = self.endroit2DMil.p2ddict.getDict()
dictCalc["r0"] = dictDevelopp2D["fdist3DMilHautChainette"]
dictCalc['c1'] = self.endroit2DHaut.p2ddict.getDict()
dictCalc["r1"] = dictDevelopp2D["fdist3DHautHautChainette"]
(x, y) = Developp2D.calc(dictCalc=dictCalc)
endroit2DHautChainette = di.Endroit2D({"point2D": {"x": x, "y": y}})
self.lendroit2DHautChainette.append(endroit2DHautChainette)
dictCalc['c0'] = self.endroit2DMil.p2ddict.getDict()
dictCalc["r0"] = dictDevelopp2D["fdist3DMilBasChainette"]
dictCalc['c1'] = self.endroit2DBas.p2ddict.getDict()
dictCalc["r1"] = dictDevelopp2D["fdist3DBasBasChainette"]
(x, y) = Developp2D.calc(dictCalc=dictCalc)
endroit2DBasChainette = di.Endroit2D({"point2D": {"x": x, "y": y}})
self.lendroit2DBasChainette.append(endroit2DBasChainette)
dictCouture = {}
dictCouture["endroitDeb"] = self.lendroit2DHautChainette[-2]
dictCouture["endroitFin"] = self.lendroit2DHautChainette[-1]
dictCouture["fCouture"] = dictDevelopp2D["fCouture"]
(x1, y1, x2, y2) = Developp2D.couture(dictCouture=dictCouture)
endroit2DHautCouture = di.Endroit2D({"point2D": {"x": x1, "y": y1}})
self.lendroit2DHautCouture.append(endroit2DHautCouture)
endroit2DHautCouture = di.Endroit2D({"point2D": {"x": x2, "y": y2}})
self.lendroit2DHautCouture.append(endroit2DHautCouture)
#self.lendroit2DHautCouture.append(self.lendroit2DHautChainette[-1])
self.endroit2DMil = self.lendroit2DMil[-1]
self.endroit2DHaut = self.lendroit2DHaut[-1]
self.endroit2DBas = self.lendroit2DBas[-1]
#-----
def horiz(self) -> None:
"""
tout les points du panneau sont tournés pour être mis
à "l'horizontale" définie par l'axe du millieu du panneau
"""
alpha = di.Direction2D(self.lendroit2DMil[-1] - self.lendroit2DMil[0]).angle2D()
lendroit2DMil = []
lendroit2DHaut = []
lendroit2DBas = []
lendroit2DHautChainette = []
lendroit2DBasChainette = []
lendroit2DHautCouture = []
for i in self.lendroit2DMil:
lendroit2DMil.append(i.rot2d(fAth=-alpha))
for i in self.lendroit2DHaut:
lendroit2DHaut.append(i.rot2d(fAth=-alpha))
for i in self.lendroit2DBas:
lendroit2DBas.append(i.rot2d(fAth=-alpha))
for i in self.lendroit2DHautChainette:
lendroit2DHautChainette.append(i.rot2d(fAth=-alpha))
for i in self.lendroit2DBasChainette:
lendroit2DBasChainette.append(i.rot2d(fAth=-alpha))
for i in self.lendroit2DHautCouture:
lendroit2DHautCouture.append(i.rot2d(fAth=-alpha))
self.lendroit2DMil = lendroit2DMil
self.lendroit2DHaut = lendroit2DHaut
self.lendroit2DBas = lendroit2DBas
self.lendroit2DHautChainette = lendroit2DHautChainette
self.lendroit2DBasChainette = lendroit2DBasChainette
self.lendroit2DHautCouture = lendroit2DHautCouture
#-----
def createDxf(self, block) -> None:
"""
la mise en place du dxf
"""
# la ligne millieu en pointillé
polyLineMil = block.add_lwpolyline([], dxfattribs={'color': couleur["jaune"], 'linetype': 'DOT2'})
for i in self.lendroit2DMil:
polyLineMil.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# la ligne du haut en pointillé
polyLineHaut = block.add_lwpolyline([], dxfattribs={'color': couleur["jaune"], 'linetype': 'DOT2'})
for i in self.lendroit2DHaut:
polyLineHaut.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# la ligne du haut de chainette en plein
polyLineHautChainette = block.add_lwpolyline([], dxfattribs={'color': couleur["bleu"]})
for i in self.lendroit2DHautChainette:
polyLineHautChainette.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# la ligne du bas en pointillé
polyLineBas = block.add_lwpolyline([], dxfattribs={'color': couleur["jaune"], 'linetype': 'DOT2'})
for i in self.lendroit2DBas:
polyLineBas.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# la ligne du bas de chainette en plein
polyLineBasChainette = block.add_lwpolyline([], dxfattribs={'color': couleur["bleu"]})
for i in self.lendroit2DBasChainette:
polyLineBasChainette.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# la ligne de la couture en plein
polyLineHautCouture = block.add_lwpolyline([], dxfattribs={'color': couleur["bleu"]})
for i in self.lendroit2DHautCouture:
polyLineHautCouture.append_points(points=[(i["point2D"]["x"], \
i["point2D"]["y"])], \
format='xy')
# les lignes de section (la première et la dernière sont différentes)
for i in range(len(self.lendroit2DBasChainette)):
if i == 0 or i == len(self.lendroit2DBasChainette)-1:
polyLineSection = block.add_lwpolyline([], dxfattribs={'color': couleur["bleu"]})
else:
polyLineSection = block.add_lwpolyline([], dxfattribs={'color': couleur["rouge"], 'lineweight': 20})
polyLineSection.append_points(points=[(self.lendroit2DBasChainette[i]["point2D"]["x"], \
self.lendroit2DBasChainette[i]["point2D"]["y"])], \
format='xy')
polyLineSection.append_points(points=[(self.lendroit2DHautChainette[i]["point2D"]["x"], \
self.lendroit2DHautChainette[i]["point2D"]["y"])], \
format='xy')
# une inscription du numéro de panneau
endroit2DDeb = di.Endroit2D(self.lendroit2DHaut[0])
endroit2DFin = di.Endroit2D(self.lendroit2DHaut[-1])
intHautText = di.Endroit2D(endroit2DDeb.lin2d(k=0.97, endroit2D=endroit2DFin))
endroit2DDeb = di.Endroit2D(self.lendroit2DBas[0])
endroit2DFin = di.Endroit2D(self.lendroit2DBas[-1])
intBasText = di.Endroit2D(endroit2DDeb.lin2d(k=0.97, endroit2D=endroit2DFin))
debText = intHautText.lin2d(k=0.55, endroit2D=intBasText)
finText = intHautText.lin2d(k=0.45, endroit2D=intBasText)
panneauNum = f'<-- bas Panneau numéro : {self.numPanneau} (chute) haut -->'
block.add_text(panneauNum, \
dxfattribs={'style': 'OpenSansCondensed-Bold'} \
).set_pos([debText["point2D"]["x"], debText["point2D"]["y"]], \
[finText["point2D"]["x"], finText["point2D"]["y"]], \
align='ALIGNED')
# une inscription sur la chute
endroit2DDeb = di.Endroit2D(self.lendroit2DMil[0])
endroit2DFin = di.Endroit2D(self.lendroit2DMil[-1])
debText = endroit2DDeb.lin2d(k=0.10, endroit2D=endroit2DFin)
finText = endroit2DDeb.lin2d(k=0.15, endroit2D=endroit2DFin)
copyRight = f'Créé par Pyjunk le {datetime.utcnow():%c} UTC±00:00'
block.add_text(copyRight, \
dxfattribs={'style': 'OpenSansCondensed-Bold'} \
).set_pos([debText["point2D"]["x"], debText["point2D"]["y"]], \
[finText["point2D"]["x"], finText["point2D"]["y"]], \
align='ALIGNED')
#-----
def __str__(self) -> str:
strMsg = f'--> Developp2D :\n'
return strMsg
#----- Classe représentant le développé d'un panneau
class Developp(Developp2D):
"""
Classe Developp
===============
La classe Developp représente la partie 3D du calcul de développé
:datas:
self.dictDevelopp: dict
self.endroit3DMil: Endroit3D
self.endroit3DHaut: Endroit3D
self.endroit3DBas: Endroit3D
:Example:
>>> a = Developp({"numPanneau": 0})
>>> print(a)
--> Developp :
<BLANKLINE>
.. seealso::
.. warning::
.. note::
.. todo::
"""
#-----
def __init__(self, dictDevelopp: dict) -> None:
self.dictDevelopp = dictDevelopp
# les anciens points 3D
self.endroit3DMil = None
self.endroit3DHaut = None
self.endroit3DBas = None
Developp2D.__init__(self, dictDevelopp2D=self.dictDevelopp)
#-----
def comp(self, dictDevelopp: dict) -> None:
"""
La stratégie pour calculer les différents points du développé est simple.
Ici on est dans l'espace 3D, dans la fonction hérité on est dans l'espace 2D.
Le principe : en 3D, on mesure les distances du point recherché par rapport à
2 autres points, on reporte ces distances en 2D à partir de 2 autres points 2D
pour trouver le point 2D sur le développé
"""
if "dictBas" in dictDevelopp and isinstance(dictDevelopp["dictBas"], dict):
endroit3DBas = di.Endroit3D(dictDevelopp["dictBas"])
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "dictHaut" in dictDevelopp and isinstance(dictDevelopp["dictHaut"], dict):
endroit3DHaut = di.Endroit3D(dictDevelopp["dictHaut"])
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "dictMil" in dictDevelopp and isinstance(dictDevelopp["dictMil"], dict):
endroit3DMil = di.Endroit3D(dictDevelopp["dictMil"])
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "frac" in dictDevelopp and isinstance(dictDevelopp["frac"], float):
frac = dictDevelopp["frac"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "index" in dictDevelopp and isinstance(dictDevelopp["index"], int):
index = dictDevelopp["index"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
if "fCouture" in dictDevelopp and isinstance(dictDevelopp["fCouture"], float):
fCouture = dictDevelopp["fCouture"]
else:
print(f'< !!!! > dictionnaire incorrect pour dictDevelopp')
print(f'program aborted')
sys.exit(ABNORMAL_TERMINATION)
# on charge un dictDevelopp2D
dictDevelopp2D = {}
dictDevelopp2D["index"] = index
dictDevelopp2D["fCouture"] = fCouture
if index == 0:
# au premier tour on ne préoccupe pas de mil qui est (0, 0) par définition
# on s'intéresse uniquement à haut, bas, hautchainette, baschainette
dictDevelopp2D["fdist3DMilHaut"] = endroit3DMil.dist3d(endroit3DHaut)
dictDevelopp2D["fdist3DMilBas"] = endroit3DMil.dist3d(endroit3DBas)
endroit3DHautChainette = di.Endroit3D(endroit3DMil.lin3d(k=frac, endroit3D=endroit3DHaut))
dictDevelopp2D["fdist3DMilHautChainette"] = endroit3DMil.dist3d(endroit3DHautChainette)
endroit3DBasChainette = di.Endroit3D(endroit3DMil.lin3d(k=frac, endroit3D=endroit3DBas))
dictDevelopp2D["fdist3DMilBasChainette"] = endroit3DMil.dist3d(endroit3DBasChainette)
else:
# aux autres tours
# on s'intéresse à millieu, haut, bas, hautchainette, baschainette
dictDevelopp2D["fdist3DMilMil"] = self.endroit3DMil.dist3d(endroit3DMil)
dictDevelopp2D["fdist3DHautMil"] = self.endroit3DHaut.dist3d(endroit3DMil)
dictDevelopp2D["fdist3DMilHaut"] = self.endroit3DMil.dist3d(endroit3DHaut)
dictDevelopp2D["fdist3DHautHaut"] = self.endroit3DHaut.dist3d(endroit3DHaut)
dictDevelopp2D["fdist3DMilBas"] = self.endroit3DMil.dist3d(endroit3DBas)
dictDevelopp2D["fdist3DBasBas"] = self.endroit3DBas.dist3d(endroit3DBas)
endroit3DHautChainette = di.Endroit3D(endroit3DMil.lin3d(k=frac, endroit3D=endroit3DHaut))
dictDevelopp2D["fdist3DMilHautChainette"] = self.endroit3DMil.dist3d(endroit3DHautChainette)
dictDevelopp2D["fdist3DHautHautChainette"] = self.endroit3DHaut.dist3d(endroit3DHautChainette)
endroit3DBasChainette = di.Endroit3D(endroit3DMil.lin3d(k=frac, endroit3D=endroit3DBas))
dictDevelopp2D["fdist3DMilBasChainette"] = self.endroit3DMil.dist3d(endroit3DBasChainette)
dictDevelopp2D["fdist3DBasBasChainette"] = self.endroit3DBas.dist3d(endroit3DBasChainette)
# on lance le calcul dans l'espace 2D
Developp2D.comp(self, dictDevelopp2D=dictDevelopp2D)
# on sauvegarde les points pour le tour suivant
self.endroit3DMil = endroit3DMil
self.endroit3DHaut = endroit3DHaut
self.endroit3DBas = endroit3DBas
#-----
def __str__(self) -> str:
strMsg = f'--> Developp :\n'
return strMsg
#----- start here
if __name__ == '__main__':
import doctest
(failureCount, testCount) = doctest.testmod(verbose=False)
print(f'nombre de tests : {testCount:>3d}, nombre d\'erreurs : {failureCount:>3d}', end='')
if failureCount != 0:
print(f' --> Arrêt du programme {pathlib.Path(__file__)}')
sys.exit(ABNORMAL_TERMINATION)
else:
print(f' --> All Ok {pathlib.Path(__file__)}')
sys.exit(NORMAL_TERMINATION)
| StarcoderdataPython |
11239245 | import numpy as np
import matplotlib.pyplot as plt
import cv2
# With jupyter notebook uncomment below line
# %matplotlib inline
# This plots figures inside the notebook
def point_operation(img, K, L):
"""
Applies point operation to given grayscale image
"""
img = np.asarray(img, dtype=np.float)
img = img*K + L
img[img > 255] = 255
img[img < 0] = 0
return np.asarray(img, dtype = np.int)
def main():
# read an image
img = cv2.imread('../figures/flower.png')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# k = 0.5, l = 0
out1 = point_operation(gray, 0.5, 0)
# k = 1., l = 10
out2 = point_operation(gray, 1., 10)
# k = 0.8, l = 15
out3 = point_operation(gray, 0.7, 25)
res = np.hstack([gray,out1, out2, out3])
plt.imshow(res, cmap='gray')
plt.axis('off')
plt.show()
if __name__ == '__main__':
main() | StarcoderdataPython |
5054627 | <gh_stars>1-10
"""This module tests the real component.py, just mocking the client"""
import pytest
import conftest
import time
import suricate.services
import suricate.component
from suricate.configuration import config
from suricate.errors import CannotGetComponentError
COMP_NAME = 'TestNamespace/Positioner00'
CONT_NAME = 'PositionerContainer'
suricate.services.is_container_online = lambda x: True
def test_manager_offline():
# suricate.component has been moked, reload the original one
reload(suricate.component)
try:
suricate.services.is_manager_online = lambda: False
with pytest.raises(CannotGetComponentError):
suricate.component.Component(COMP_NAME, CONT_NAME)
finally:
suricate.services.is_manager_online = lambda: True
def test_component_unavailable():
# suricate.component has been moked, reload the original one
reload(suricate.component)
try:
suricate.component.Component.unavailables.append('TestNamespace/Positioner00')
with pytest.raises(CannotGetComponentError):
suricate.component.Component(COMP_NAME, CONT_NAME)
finally:
suricate.component.Component.unavailables = []
def test_container_offline():
"""The container is offline, raise CannotGetComponentError"""
# suricate.component has been moked, reload the original one
reload(suricate.component)
try:
suricate.services.is_container_online = lambda x: False
with pytest.raises(CannotGetComponentError):
suricate.component.Component(COMP_NAME, CONT_NAME)
finally:
suricate.services.is_container_online = lambda x: True
def test_get_component():
# suricate.component has been moked, reload the original one
# Real component, mocked client
reload(suricate.component)
try:
Client = suricate.services.get_client_class()
conftest.MockACSClient.set_exc_name('CannotGetComponent')
suricate.services.get_client_class = lambda: conftest.MockACSClient
with pytest.raises(CannotGetComponentError) as exc:
suricate.component.Component(COMP_NAME, CONT_NAME)
expected = 'component %s not available' % COMP_NAME
assert expected in str(exc.value)
finally:
suricate.services.get_client_class = lambda: Client
conftest.MockACSClient.set_exc_name('')
def test_no_permission_ex():
# suricate.component has been moked, reload the original one
# Real component, mocked client
reload(suricate.component)
try:
Client = suricate.services.get_client_class()
conftest.MockACSClient.set_exc_name('NoPermissionEx')
suricate.services.get_client_class = lambda: conftest.MockACSClient
with pytest.raises(CannotGetComponentError) as exc:
suricate.component.Component(COMP_NAME, CONT_NAME)
expected = 'component %s not available' % COMP_NAME
assert expected in str(exc.value)
finally:
suricate.services.get_client_class = lambda: Client
conftest.MockACSClient.set_exc_name('')
def test_comm_failure_manager_online():
# suricate.component has been moked, reload the original one
# Real component, mocked client
reload(suricate.component)
try:
Client = suricate.services.get_client_class()
conftest.MockACSClient.set_exc_name('COMM_FAILURE')
suricate.services.get_client_class = lambda: conftest.MockACSClient
with pytest.raises(CannotGetComponentError) as exc:
suricate.component.Component(COMP_NAME, CONT_NAME)
expected = 'cannot communicate with component'
assert expected in str(exc.value)
finally:
suricate.services.get_client_class = lambda: Client
conftest.MockACSClient.set_exc_name('')
def test_comm_failure_manager_offline():
# suricate.component has been moked, reload the original one
# Real component, mocked client
reload(suricate.component)
try:
Client = suricate.services.get_client_class()
conftest.MockACSClient.set_exc_name('COMM_FAILURE')
suricate.services.get_client_class = lambda: conftest.MockACSClient
suricate.services.is_manager_online = lambda: False
with pytest.raises(CannotGetComponentError) as exc:
suricate.component.Component(COMP_NAME, CONT_NAME)
expected = 'ACS not running'
assert expected in str(exc.value)
finally:
suricate.services.get_client_class = lambda: Client
suricate.services.is_manager_online = lambda: True
conftest.MockACSClient.set_exc_name('')
def test_unexpected_exception_manager_online():
# suricate.component has been moked, reload the original one
# Real component, mocked client
reload(suricate.component)
try:
Client = suricate.services.get_client_class()
conftest.MockACSClient.set_exc_name('unexpected')
suricate.services.get_client_class = lambda: conftest.MockACSClient
with pytest.raises(CannotGetComponentError) as exc:
suricate.component.Component(COMP_NAME, CONT_NAME)
expected = 'component %s not available' % COMP_NAME
assert expected in str(exc.value)
finally:
suricate.services.get_client_class = lambda: Client
conftest.MockACSClient.set_exc_name('')
def test_proxy_attribute():
# suricate.component has been moked, reload the original one
reload(suricate.component)
my_object = 'a string'
p = suricate.component.Proxy('text', my_object)
assert p.upper() == 'TEXT'
def test_proxy_call():
# suricate.component has been moked, reload the original one
reload(suricate.component)
my_object = 'a string'
p = suricate.component.Proxy('text', my_object)
with pytest.raises(CannotGetComponentError) as exc:
p()
assert 'broken reference' in str(exc.value)
if __name__ == '__main__':
pytest.main()
| StarcoderdataPython |
9627350 | <gh_stars>0
#!/usr/bin/env python3
import datetime
import pathlib
import sys
from typing import List, Optional
import orjson
import pydantic
import us
from vaccine_feed_ingest_schema import location
from vaccine_feed_ingest.utils.log import getLogger
logger = getLogger(__file__)
SOURCE_NAME = "getmyvax_org"
LOCATIONS_URL = "https://getmyvax.org/api/edge/locations.ndjson"
VACCINE_MAPPING = {
"moderna": location.VaccineType.MODERNA,
"pfizer": location.VaccineType.PFIZER_BIONTECH,
"jj": location.VaccineType.JOHNSON_JOHNSON_JANSSEN,
}
PROVIDER_MAPPING = {
"albertsons_acme": location.VaccineProvider.ACME,
"albertsons_amigos": location.VaccineProvider.AMIGOS,
"albertsons_carrs": location.VaccineProvider.CARRS,
"albertsons_haggen": location.VaccineProvider.HAGGEN,
"albertsons_jewelosco": location.VaccineProvider.JEWEL_OSCO,
"albertsons_market_street": location.VaccineProvider.MARKET_STREET,
"albertsons_market": location.VaccineProvider.ALBERTSONS_MARKET,
"albertsons_pak_n_save": location.VaccineProvider.PAK_N_SAVE,
"albertsons_pavilions": location.VaccineProvider.PAVILIONS,
"albertsons_randalls": location.VaccineProvider.RANDALLS,
"albertsons_safeway": location.VaccineProvider.SAFEWAY,
"albertsons_shaws": location.VaccineProvider.SHAWS,
"albertsons_star_market": location.VaccineProvider.STAR_MARKET,
"albertsons_tom_thumb": location.VaccineProvider.TOM_THUMB,
"albertsons_united": location.VaccineProvider.UNITED_SUPERMARKET,
"albertsons_vons": location.VaccineProvider.VONS,
"albertsons": location.VaccineProvider.ALBERTSONS,
"alliancerx_walgreens_prime": location.VaccineProvider.WALGREENS,
"community_a_walgreens_pharmacy": location.VaccineProvider.WALGREENS,
"costco": location.VaccineProvider.COSTCO,
"cvs": location.VaccineProvider.CVS,
"fresco_y_mas": location.VaccineProvider.FRESCO_Y_MAS,
"harveys": location.VaccineProvider.HARVEYS,
"health_mart_health_mart": location.VaccineProvider.HEALTH_MART,
"health_mart": location.VaccineProvider.HEALTH_MART,
"heb": location.VaccineProvider.HEB,
"hyvee": location.VaccineProvider.HY_VEE,
"kroger_bakers": location.VaccineProvider.BAKERS,
"kroger_citymarket": location.VaccineProvider.CITY_MARKET,
"kroger_covid": location.VaccineProvider.KROGER,
"kroger_dillons": location.VaccineProvider.DILLONS,
"kroger_fred": location.VaccineProvider.FRED_MEYER,
"kroger_frys": location.VaccineProvider.FRYS,
"kroger_gerbes": location.VaccineProvider.GERBES,
"kroger_hart": location.VaccineProvider.HART,
"kroger_jayc": location.VaccineProvider.JAYC,
"kroger_kingsoopers": location.VaccineProvider.KING_SOOPERS,
"kroger_marianos": location.VaccineProvider.MARIANOS,
"kroger_metro_market": location.VaccineProvider.METRO_MARKET,
"kroger_payless": location.VaccineProvider.PAY_LESS,
"kroger_pick_n_save": location.VaccineProvider.PICK_N_SAVE,
"kroger_qfc": location.VaccineProvider.QFC,
"kroger_ralphs": location.VaccineProvider.RALPHS,
"kroger_smiths": location.VaccineProvider.SMITHS,
"kroger_the_little_clinic": location.VaccineProvider.LITTLE_CLINIC,
"kroger": location.VaccineProvider.KROGER,
"kta_super_stores": location.VaccineProvider.KTA_SUPER_STORES,
"pharmaca": location.VaccineProvider.PHARMACA,
"price_chopper_market_32": location.VaccineProvider.MARKET_32,
"price_chopper_market_bistro": location.VaccineProvider.MARKET_BISTRO,
"price_chopper": location.VaccineProvider.PRICE_CHOPPER,
"publix": location.VaccineProvider.PUBLIX,
"rite_aid": location.VaccineProvider.RITE_AID,
"riteaid": location.VaccineProvider.RITE_AID,
"safeway": location.VaccineProvider.SAFEWAY,
"sams_club_sams_club": location.VaccineProvider.SAMS,
"sams_club": location.VaccineProvider.SAMS,
"southeastern_grocers_fresco_y_mas": location.VaccineProvider.FRESCO_Y_MAS,
"southeastern_grocers_harveys": location.VaccineProvider.HARVEYS,
"southeastern_grocers_winn_dixie": location.VaccineProvider.WINN_DIXIE,
"thrifty_white": location.VaccineProvider.THRIFTY_WHITE,
"walgreens_duane_reade": location.VaccineProvider.DUANE_READE,
"walgreens_specialty_pharmacy_of_puerto_rico": location.VaccineProvider.WALGREENS,
"walgreens_specialty_pharmacy": location.VaccineProvider.WALGREENS,
"walgreens": location.VaccineProvider.WALGREENS,
"walmart_walmart": location.VaccineProvider.WALMART,
"walmart": location.VaccineProvider.WALMART,
"wegmans": location.VaccineProvider.WEGMANS,
"weis_weis": location.VaccineProvider.WEIS,
"weis": location.VaccineProvider.WEIS,
"winn_dixie": location.VaccineProvider.WINN_DIXIE,
}
class BaseModel(pydantic.BaseModel):
"""BaseModel for all schema to inherit from."""
class Config:
# Fail if an attribute that doesn't exist is added.
# This helps reduce typos.
extra = "forbid"
# Store enums as string values.
# This helps when using exporting models with enums
use_enum_values = True
class CapacityItem(BaseModel):
date: datetime.date
dose: Optional[str]
products: Optional[List[str]]
available: str
available_count: Optional[int]
unavailable_count: Optional[int]
class Availability(BaseModel):
source: str
valid_at: datetime.datetime
checked_at: datetime.datetime
available: str
available_count: Optional[int]
capacity: Optional[List[CapacityItem]]
products: Optional[List[str]]
doses: Optional[List[str]]
class Position(BaseModel):
latitude: float
longitude: float
class GMVLocation(BaseModel):
id: str
provider: str
location_type: str
name: str
address_lines: List[str]
city: Optional[str]
state: str
postal_code: Optional[str]
county: Optional[str]
position: Optional[Position]
info_phone: Optional[str]
info_url: Optional[str]
booking_phone: Optional[str]
booking_url: Optional[str]
eligibility: Optional[str]
description: Optional[str]
requires_waitlist: bool
meta: Optional[dict]
created_at: datetime.datetime
updated_at: datetime.datetime
availability: Optional[Availability]
external_ids: Optional[dict]
def process_line(line: bytes, timestamp: datetime.datetime) -> bytes:
in_loc_dict = orjson.loads(line)
in_loc = GMVLocation.parse_obj(in_loc_dict)
out_loc = normalize_location(in_loc, timestamp)
out_loc_dict = out_loc.dict(exclude_none=True)
return orjson.dumps(out_loc_dict, option=orjson.OPT_APPEND_NEWLINE)
def _get_address(loc: GMVLocation) -> Optional[location.Address]:
if not loc.address_lines and not loc.city and not loc.state and not loc.postal_code:
logger.info("No address for location %s (%s)", loc.id, loc.name)
return None
street1 = None
if loc.address_lines:
street1 = loc.address_lines[0]
street2 = None
if len(loc.address_lines) > 1:
street2 = ", ".join(loc.address_lines[1:])
state_abbr = None
if loc.state:
if state := us.states.lookup(loc.state):
state_abbr = state.abbr
else:
logger.warning("Invalid state %s for %s (%s)", loc.state, loc.id, loc.name)
postal_code = None
# Handle invalid postal codes that are less than 5 digits
if loc.postal_code:
if len(loc.postal_code) >= 5:
postal_code = loc.postal_code
else:
logger.warning(
"Invalid postal code %s for %s (%s)", loc.postal_code, loc.id, loc.name
)
return location.Address(
street1=street1,
street2=street2,
city=loc.city,
state=state_abbr,
zip=postal_code,
)
def _get_lat_lng(loc: GMVLocation) -> Optional[location.LatLng]:
if not loc.position:
logger.debug("No lat-lng for location %s (%s)", loc.id, loc.name)
return None
# Skip positions that are missing a value
if not loc.position.latitude or not loc.position.longitude:
logger.warning("Skipping position with missing coordinates")
return None
return location.LatLng(
latitude=loc.position.latitude,
longitude=loc.position.longitude,
)
def _get_contacts(loc: GMVLocation) -> Optional[List[location.Contact]]:
contacts = []
if loc.info_phone:
contacts.append(
location.Contact(
contact_type=location.ContactType.GENERAL,
phone=loc.info_phone,
)
)
if loc.info_url:
contacts.append(
location.Contact(
contact_type=location.ContactType.GENERAL,
website=loc.info_url,
)
)
if loc.booking_phone:
contacts.append(
location.Contact(
contact_type=location.ContactType.BOOKING,
phone=loc.booking_phone,
)
)
if loc.booking_url:
contacts.append(
location.Contact(
contact_type=location.ContactType.BOOKING,
website=loc.booking_url,
)
)
if not contacts:
return None
return contacts
def _get_availability(loc: GMVLocation) -> Optional[location.Availability]:
if not loc.availability:
return None
if loc.availability.available == "UNKNOWN":
return None
return location.Availability(appointments=True)
def _get_inventory(loc: GMVLocation) -> Optional[List[location.Vaccine]]:
if not loc.availability:
return None
vaccines = set()
for product in loc.availability.products or []:
if vaccine_type := VACCINE_MAPPING.get(product):
vaccines.add(vaccine_type)
else:
logger.info("Unrecognized vaccine for product %s", product)
if not vaccines:
return None
return [location.Vaccine(vaccine=vaccine) for vaccine in vaccines]
def _get_parent_organization(loc: GMVLocation) -> Optional[location.Organization]:
provider = PROVIDER_MAPPING.get(loc.provider)
if not provider:
return None
return location.Organization(id=provider)
def _get_links(loc: GMVLocation) -> Optional[List[location.Link]]:
if not loc.external_ids:
return None
links = []
for provider_id, store_id in loc.external_ids.items():
if not store_id:
logger.info("Missing value for external_id %s", provider_id)
continue
authority = None
if provider_id == "vaccinespotter":
authority = "vaccinespotter_org"
elif provider_id == "vtrcks":
authority = location.LocationAuthority.VTRCKS
elif provider_id.startswith("univaf_"):
# Skip versioned univaf ids until we figure out what to do with them.
continue
elif provider_id in ("njiis_covid", "comassvax"):
# Skip because their ids are just addresses
continue
elif provider_id in ("centura_driveup_event", "denver_ball_arena"):
# Skip because I am not sure if the ids are of good quality or useful
continue
elif (
provider_id.endswith("_powered_by_walgreens")
or provider_id.endswith("_a_walgreens_pharmacy")
or provider_id.endswith("_a_walgreens_rx")
):
# These stores keep their custom names, but have walgreens store ids
authority = location.VaccineProvider.WALGREENS
else:
authority = PROVIDER_MAPPING.get(provider_id)
if not authority:
logger.info(
"Unrecognized provider for external_id %s:%s", provider_id, store_id
)
continue
links.append(location.Link(authority=authority, id=str(store_id)))
if not links:
return None
return links
def normalize_location(
loc: GMVLocation, timestamp: datetime.datetime
) -> location.NormalizedLocation:
return location.NormalizedLocation(
id=f"{SOURCE_NAME}:{loc.id}",
name=loc.name,
address=_get_address(loc),
location=_get_lat_lng(loc),
contact=_get_contacts(loc),
languages=None,
opening_dates=None,
opening_hours=None,
availability=_get_availability(loc),
inventory=_get_inventory(loc),
access=None,
parent_organization=_get_parent_organization(loc),
links=_get_links(loc),
notes=None,
active=None,
source=location.Source(
source=SOURCE_NAME,
id=loc.id,
fetched_from_uri=LOCATIONS_URL,
fetched_at=timestamp,
published_at=loc.updated_at,
data=loc.dict(exclude_none=True),
),
)
output_dir = pathlib.Path(sys.argv[1]) if len(sys.argv) >= 2 else None
if output_dir is None:
logger.error("Must pass an output_dir as first argument")
sys.exit(1)
input_dir = pathlib.Path(sys.argv[2]) if len(sys.argv) >= 3 else None
if input_dir is None:
logger.error("Must pass an input_dir as second argument")
sys.exit(1)
for in_filepath in input_dir.iterdir():
if not in_filepath.name.endswith(".parsed.ndjson"):
continue
logger.info(f"Normalizing locations in {in_filepath.name}")
timestamp = datetime.datetime.now()
with in_filepath.open("rb") as in_file:
filepath_stem = in_filepath.name[: -len(".parsed.ndjson")]
out_filepath = output_dir / f"{filepath_stem}.normalized.ndjson"
with out_filepath.open("wb") as out_file:
for line in in_file:
out_loc_ndjson = process_line(line, timestamp)
out_file.write(out_loc_ndjson)
| StarcoderdataPython |
91219 | <filename>test/test.py
from pinnacle import pinnacle
import pytest
import pathlib
import os
PATH = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
def test_gen_dir_name():
import secrets
import string
ALPHABET = string.ascii_letters + string.digits
new_dir = ''.join(secrets.choice(ALPHABET) for i in range(10))
return new_dir
def test_tst1():
assert (1+1)==2
| StarcoderdataPython |
1753906 | from __future__ import absolute_import, division, print_function
from builtins import super, range, zip, round, map
import logging
import os
from glob import glob
import argparse
logger = logging.getLogger(__name__)
def main():
'''This module is designed for cleaning log files that might accumulate in the validation folder.
**Usage:**
There are two ways to use this module:
- Remove all log files:
$ python clean_logs.py
- Remove only a subset of the log files:
$ python clean_logs.py -f ./logs/reader/opendss
This will remove all opendss reader log files for example.
$ python clean_logs.py -f ./logs/writer
This will remove all writer log files.
.. note:: Names of removed files are printed when they are deleted.
.. todo:: Add time capabilities like 'delete all log files older than yesterday'
Author: <NAME>. November 2017.
'''
#Parse the arguments
parser = argparse.ArgumentParser()
#Feeder list
parser.add_argument('-f', action='append', dest='folder_list', default=[])
results = parser.parse_args()
#If nothing is provided, clean everything...
if results.folder_list == []:
remove_log_and_return_subfolders('./logs/')
else:
[remove_log_and_return_subfolders(folder) for folder in results.folder_list]
def remove_log_and_return_subfolders(path):
'''Remove log files with the following strategy:
- List *.log in the current folder and remove them
- List all subfolders and repeat
'''
log_files = glob(path.strip('/') + '/*.log')
for log_file in log_files:
os.remove(log_file)
logger.debug('-->cleaned:: {}'.format(log_file))
subfolders = [path.strip('/') + '/' + x for x in os.listdir(path) if '.' not in x]
if len(subfolders) == 0:
return
else:
return [remove_log_and_return_subfolders(folder) for folder in subfolders]
if __name__ == '__main__':
main()
| StarcoderdataPython |
1920139 | from handlers.uploading import upload_handler
def test_upload_handler():
# test the upload handler
a = upload_handler("test.py")
assert a.status == True | StarcoderdataPython |
1955230 | import argparse
parser = argparse.ArgumentParser(description='Unaligned > aligned raw.')
parser.add_argument('directory', type=str)
args = parser.parse_args()
print(args.directory)
try:
import HTPA32x32d
except:
raise Exception("Can't import HTPA32x32d")
HTPA32x32d.dataset.VERBOSE = True
import os
raw_dir = args.directory # this is your directory that contains raw .TXT files from HTPA32x32d, all named YYYYMMDD_HHmm_ID{id}.TXT
import json
config_f = "align_config.json"
a_file = open(config_f, "r")
json_object = json.load(a_file)
a_file.close()
json_object["raw_input_dir"] = raw_dir
json_object["processed_destination_dir"] = os.path.join(os.path.split(raw_dir)[0], raw_dir+"_aligned")
a_file = open(config_f, "w")
json.dump(json_object, a_file)
a_file.close()
preparer = HTPA32x32d.dataset.TPA_RGB_Preparer()
preparer.config(config_f)
HTPA32x32d.dataset.SYNCHRONIZATION_MAX_ERROR = 5
preparer.prepare() # now fill labels and make_config.json.
| StarcoderdataPython |
1702483 | <filename>mlplaygrounds/datasets/tests/test_trainers.py
from unittest import TestCase
import pandas as pd
from mlplaygrounds.datasets.trainers.base import Trainer, FeatureTypeError
class TestTrainer(TestCase):
def setUp(self):
self.X = [
{'x_one': 1, 'x_two': 4, 'x_three': 1, 'val': 5},
{'x_one': 2, 'x_two': 6, 'x_three': 1, 'val': 8},
{'x_one': 3, 'x_two': 7, 'x_three': 1, 'val': 10},
{'x_one': 4, 'x_two': None, 'x_three': 1, 'val': 9},
{'x_one': 5, 'x_two': 7, 'x_three': 1, 'val': 12}
]
self.exclude_features = ['x_three']
self.alg = 'linear regression'
self.trainer = Trainer(self.X, 'val', self.exclude_features, self.alg)
def test_valid_features_to_exclude(self):
res = self.trainer.features_to_exclude_are_valid(self.exclude_features)
self.assertEqual(res, True)
def test_invalid_features_to_exclude(self):
with self.assertRaises(KeyError):
self.trainer.features_to_exclude_are_valid(['x_four'])
def test_valid_algorithm(self):
res = self.trainer.algorithm_is_valid(self.alg)
self.assertEqual(res, True)
def test_invalid_algorithm(self):
with self.assertRaises(ValueError):
self.trainer.algorithm_is_valid('invalid_alg')
def test_valid_features_to_use_at_training(self):
res = self.trainer.features_to_use_in_training_are_valid()
self.assertEqual(res, True)
def test_invalid_features_to_use_at_training(self):
self.trainer.prepared_X['example'] = ['a', 'b', 'c', 'd', 'e']
with self.assertRaises(FeatureTypeError):
self.trainer.features_to_use_in_training_are_valid()
def test_drop_features(self):
expected = pd.DataFrame(self.X)
expected = expected.drop('x_three', axis=1)
expected = expected.where(pd.notnull(expected), None)
expected = expected.to_dict('records')
self.trainer.drop_features()
self.assertListEqual(self.get_prepared_X_dict_from_trainer(), expected)
def test_drop_no_features(self):
self.trainer.exclude_features = []
self.trainer.drop_features()
self.assertListEqual(self.get_prepared_X_dict_from_trainer(), self.X)
def test_transform_nan_values(self):
expected = self.X.copy()
expected[3]['x_two'] = 6
self.trainer.transform_nan_values()
self.assertListEqual(self.get_prepared_X_dict_from_trainer(), expected)
def test_train(self):
expected_coef = [0.5, 2.5]
trained_model = self.trainer.train()
coefficients = trained_model.coefficients()
for i, coef in enumerate(expected_coef):
self.assertAlmostEqual(coefficients[i], coef)
def get_prepared_X_dict_from_trainer(self):
prepared_X = self.trainer.prepared_X
prepared_X = prepared_X.where(pd.notnull(prepared_X), None)
return prepared_X.to_dict('records')
| StarcoderdataPython |
12854179 | #!/bin/python
# coding=utf-8
import schedule
import time
from subprocess import call
# Referências:
# https://pypi.org/project/schedule/
# https://stackoverflow.com/questions/373335/how-do-i-get-a-cron-like-scheduler-in-python
# https://www.geeksforgeeks.org/python-schedule-library/
def postgres_backup_00_h():
print("#################### inicio postgres_backup_00_h ####################")
print("postgres_backup_60_min : {}".format(time.ctime()))
try:
call(['sh', '/scripts/postgres_backup.sh'])
except Exception as e:
print('problema ao executar postgres_backup.sh')
print(e)
print("#################### fim postgres_backup_60_min ####################")
if __name__ == "__main__":
print("#################### tasks.py iniciado ####################")
# Executa a tarefa postgres_backup_00_h() às 00:00.
schedule.every().day.at("00:00").do(postgres_backup_00_h)
while True:
schedule.run_pending()
time.sleep(1)
| StarcoderdataPython |
5067483 | <gh_stars>10-100
import functools
import inspect
from typing import Any, Callable, List, Literal, cast, get_args
from koreanbots.typing import CORO
def strict_literal(argument_names: List[str]) -> Callable[[CORO], CORO]:
def decorator(f: CORO) -> CORO:
@functools.wraps(f)
async def decorated_function(*args: Any, **kwargs: Any) -> Any:
# First get about func args
full_arg_spec = inspect.getfullargspec(f)
for argument_name in argument_names:
# Get annotation
arg_annoration = full_arg_spec.annotations[argument_name]
# Check annotation is Lireral
if arg_annoration.__origin__ is Literal:
# Literal -> list
literal_list = list(get_args(arg_annoration))
# Get index
arg_index = full_arg_spec.args.index(argument_name)
# Handle arguments
if arg_index < len(args) and args[arg_index] not in literal_list:
raise ValueError(
f"Arguments do not match. Expected: {literal_list}"
)
# Handle keyword arguments
elif (
kwargs.get(argument_name)
and kwargs[argument_name] not in literal_list
):
if kwargs[argument_name] not in literal_list:
raise ValueError(
f"Arguments do not match. Expected: {literal_list}"
)
return await f(*args, **kwargs)
return cast(CORO, decorated_function)
return decorator
| StarcoderdataPython |
66742 | import asyncio
import collections
import hashlib
import json
import os
import sys
import time
from collections import OrderedDict
from datetime import datetime
from functools import reduce
from pathlib import Path
from urllib.parse import quote_plus
import aiohttp
import nexussdk as nxs
import pandas as pd
import progressbar
from colorama import Fore
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import JsonLdLexer
import numpy as np
from nexuscli.config import _DEFAULT_ORGANISATION_KEY_, _DEFAULT_PROJECT_KEY_, _URL_KEY_, _TOKEN_KEY_, _SELECTED_KEY_
def error(message: str):
print(Fore.RED + message)
sys.exit(101)
def warn(message: str):
print(Fore.YELLOW + message)
def success(message: str):
print(Fore.GREEN + message)
def print_json(data: dict, colorize: bool=False):
"""
Print a json payload.
:param data: the json payload to print
:param colorize: if true, colorize the output
"""
json_str = json.dumps(data, indent=2)
if colorize:
sys.stdout.write(highlight(json_str, JsonLdLexer(), TerminalFormatter()))
sys.stdout.flush()
else:
print(json_str)
def datetime_from_utc_to_local(utc_datetime: int):
now_timestamp = time.time()
offset = datetime.fromtimestamp(now_timestamp) - datetime.utcfromtimestamp(now_timestamp)
return utc_datetime + offset
def print_time(seconds: int):
sign_string = '-' if seconds < 0 else ''
seconds = abs(int(seconds))
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if days > 0:
return '%s%dd %dh %dm %ds' % (sign_string, days, hours, minutes, seconds)
elif hours > 0:
return '%s%dh %dm %ds' % (sign_string, hours, minutes, seconds)
elif minutes > 0:
return '%s%dm %ds' % (sign_string, minutes, seconds)
else:
return '%s%ds' % (sign_string, seconds)
def get_nexus_client():
key, cfg = get_selected_deployment_config()
if cfg is None:
error("You must select a profile.")
nxs.config.set_environment(cfg[_URL_KEY_])
if _TOKEN_KEY_ in cfg:
nxs.config.set_token(cfg[_TOKEN_KEY_])
else:
warn("WARNING - you haven not set a token in your profile, use the 'auth set-token' command to do it.")
return nxs
def pretty_filesize(num: int , suffix: str='B'):
for unit in ['','K','M','G','T','P','E','Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f %s%s" % (num, 'Yi', suffix)
def remove_nexus_metadata(d: dict):
""" Returns a copy of the provided dictionary without nexus metadata (i.e. root keys starting with '_'). """
x = dict()
for k in d.keys():
if not k.startswith('_'):
x[k] = d[k]
return x
def remove_nexus_added_context(d:dict):
""" Returns a copy of the provided dictionary without nexus metadata (i.e. root keys starting with '_'). """
if "@context" in d:
context = d["@context"]
context = context if isinstance(context, list) else [context]
if 'https://bluebrain.github.io/nexus/contexts/shacl-20170720.json' in context:
context.remove('https://bluebrain.github.io/nexus/contexts/shacl-20170720.json')
if 'https://bluebrain.github.io/nexus/contexts/resource.json' in context:
context.remove('https://bluebrain.github.io/nexus/contexts/resource.json')
d["@context"] = context
return d
def sort_dictionary(od):
res = OrderedDict()
for k, v in sorted(od.items()):
if isinstance(v, dict):
res[k] = sort_dictionary(v)
elif isinstance(v, list):
sorted_list = []
for x in v:
if isinstance(x, dict):
sorted_list.append(sort_dictionary(x))
else:
sorted_list.append(x)
res[k] = sorted_list
else:
res[k] = v
return res
def generate_nexus_payload_checksum(payload: dict, debug: bool=False):
""" Given a nexus payload, remove nexus metadata, order the keys and generate a MD5. """
filtered = remove_nexus_metadata(payload)
filtered = remove_nexus_added_context(filtered)
data_ordered = sort_dictionary(filtered)
if debug:
print("JSON to checksum:")
print_json(data_ordered, colorize=True)
checksum = hashlib.md5(json.dumps(data_ordered, indent=2).encode('utf-8')).hexdigest()
if debug:
print("Checksum: %s" % checksum)
return checksum
def format_json_field(payload: dict, field: str):
formatted = ""
if field in payload:
if type(payload[field]) is str:
formatted = payload[field]
elif isinstance(payload[field], collections.Sequence):
for t in payload[field]:
formatted += t + "\n"
formatted = formatted.strip("\n")
else:
warn("Unsupported type: " + type(payload[field]))
formatted = payload[field]
return formatted
#######################
# CLI CONFIG
def get_cli_config_dir():
"""Returns absolute path of CLI config directory, creates it if not found."""
home = str(Path.home())
cfg_dir = home + '/.nexus-cli'
if not os.path.exists(cfg_dir):
print("Creating CLI config directory: " + cfg_dir)
os.makedirs(cfg_dir)
return cfg_dir
def get_cli_config_file():
"""Returns the path of the CLI config file."""
return get_cli_config_dir() + '/config.json'
def get_cli_config():
"""Load CLI config as a dictionary if it exists, if not, return an empty dictionary."""
cfg_file = get_cli_config_file()
data = {}
if not os.path.isfile(cfg_file):
save_cli_config(data)
else:
with open(cfg_file, 'r') as fp:
data = json.load(fp)
return data
def save_cli_config(dict_cfg: dict):
"""Save the given dictionary in the CLI config directory."""
cfg_file = get_cli_config_file()
with open(cfg_file, 'w') as fp:
json.dump(dict_cfg, fp, sort_keys=True, indent=4)
def get_selected_deployment_config(config: dict=None):
"""Searches for currently selected nexus profile.
Returns a tuple containing (name, config) or None if not found.
"""
if config is None:
# load from disk if not given
config = get_cli_config()
for key in config.keys():
if _SELECTED_KEY_ in config[key] and config[key][_SELECTED_KEY_] is True:
return key, config[key]
return None, None
def get_default_organization():
config = get_cli_config()
profile, selected_config = get_selected_deployment_config(config)
if selected_config is None:
error("You must first select a profile using the 'profiles' command")
if _DEFAULT_ORGANISATION_KEY_ in selected_config:
return selected_config[_DEFAULT_ORGANISATION_KEY_]
else:
return None
def set_default_organization(org_label: str):
config = get_cli_config()
profile, selected_config = get_selected_deployment_config(config)
if selected_config is None:
error("You must first select a profile using the 'profiles' command")
config[profile][_DEFAULT_ORGANISATION_KEY_] = org_label
save_cli_config(config)
def get_organization_label(given_org_label: str):
if given_org_label is None:
given_org_label = get_default_organization()
if given_org_label is None:
error("No organization specified, either set default using the 'orgs' command or pass it as a "
"parameter using --org")
return given_org_label
def get_default_project():
config = get_cli_config()
profile, selected_config = get_selected_deployment_config(config)
if selected_config is None:
error("You must first select a profile using the 'profiles' command")
if _DEFAULT_PROJECT_KEY_ in selected_config:
return selected_config[_DEFAULT_PROJECT_KEY_]
else:
return None
def set_default_project(project_label: str):
config = get_cli_config()
profile, selected_config = get_selected_deployment_config(config)
if selected_config is None:
error("You must first select a profile using the 'profiles' command")
config[profile][_DEFAULT_PROJECT_KEY_] = project_label
save_cli_config(config)
def get_project_label(given_project_label: str):
if given_project_label is None:
given_project_label = get_default_project()
if given_project_label is None:
error("No project specified, either set default using the 'projects' command or pass it as a "
"parameter using --project")
return given_project_label
def create_in_nexus(data_model, reader, max_connections):
key, cfg = get_selected_deployment_config()
env = cfg[_URL_KEY_]
headers = {}
if _TOKEN_KEY_ in cfg:
headers["Authorization"] = "Bearer {}".format(cfg[_TOKEN_KEY_])
headers["Content-Type"] = "application/json"
org = quote_plus(data_model["_org_label"])
project = quote_plus(data_model["_prj_label"])
schema = quote_plus(data_model["schema"])
path = "resources/" + org + "/" + project + "/" + schema
url = env + "/" + path
counter = 0
failures = []
loop = asyncio.get_event_loop()
bar = progressbar.ProgressBar(max_value=len(reader))
async def post(session, url, row):
async with session.post(url, data=json.dumps(row)) as response:
if response.status == 201:
nonlocal counter
counter += 1
bar.update(counter)
else:
failures.append((response.status, row))
async def bound_post(semaphore, session, url, row):
async with semaphore:
await post(session, url, row)
async def send():
futures = []
semaphore = asyncio.Semaphore(max_connections)
async with aiohttp.ClientSession(headers=headers) as session:
for row in reader:
if "rdf_type" in data_model:
row["@type"] = data_model["rdf_type"]
id_namespace = ""
if "id_namespace" in data_model:
id_namespace = data_model["id_namespace"]
elif "rdf_type" in data_model:
id_namespace = "".join([data_model["rdf_type"],"_"])
if "id" in data_model:
row["@id"] = "".join([id_namespace,str(row[data_model["id"]])])
request = asyncio.ensure_future(bound_post(semaphore, session, url, row))
futures.append(request)
await asyncio.gather(*futures)
loop.run_until_complete(send())
if len(failures) > 0:
with open("errors.log", "w") as file:
for (status_code, row) in failures:
file.write("code={} body={}\n".format(status_code, row))
error("\nFailed to ingest {} documents. See 'errors.log' for details.".format(len(failures)))
def merge_csv(file_paths, on):
dfs = [pd.read_csv(file_path, keep_default_na=False) for file_path in file_paths]
df = reduce(lambda x, y: pd.merge(x, y, on=on, how='outer'), dfs)
return df
def load_csv(_org_label, _prj_label, schema, file_path, merge_with=None, merge_on=None, _type=None, id_colum=None, id_namespace=None, aggreg_column=None, max_connections=50):
try:
if merge_with:
merge_with = list(merge_with)
merge_with.append(file_path)
reader = merge_csv(merge_with, merge_on)
else:
reader = pd.read_csv(file_path, keep_default_na=False)
reader.drop_duplicates(inplace=True)
reader.fillna('')
if aggreg_column:
aggreg_column = list(aggreg_column)
grouby_columns = [column for column in reader.columns if column not in aggreg_column]
reader.fillna("nan",inplace=True)
reader_unique = reader.groupby(by=grouby_columns).agg(lambda x: list(x))
reader = reader_unique.reset_index()
for column in reader.columns:
m = [v == ['nan'] or v=="nan" for v in reader[column]]
reader.loc[m, column] = np.nan
reader = (reader.apply(lambda x: x.dropna(), axis=1).to_json(orient='records'))
reader = json.loads(reader)
print("Loading {} resources...".format(len(reader)))
data_model = dict()
if id_colum:
data_model["id"] = id_colum
if id_namespace:
data_model["id_namespace"] = id_namespace
if _type:
data_model["rdf_type"] = _type
data_model["_org_label"] = _org_label
data_model["_prj_label"] = _prj_label
data_model["schema"] = schema
create_in_nexus(data_model, reader, max_connections)
except Exception as e:
raise Exception from e
| StarcoderdataPython |
5001814 | import datetime
import os
import shutil
import sys
import arrow
import pytest
import virtool.utils
@pytest.fixture
def fake_dir(tmpdir):
file_1 = tmpdir.join("hello.txt")
file_2 = tmpdir.join("world.txt")
file_1.write("hello world")
file_2.write("this is a test file")
return tmpdir
@pytest.fixture(scope="session")
def alphanumeric():
return "abcdefghijklmnopqrstuvwxyz1234567890"
@pytest.fixture(scope="function")
def randomizer():
source = ["abc123", "jkl932", "90r2ja", "87e9wa", "skk342", "skl1qq"]
def function():
return source.pop()
return function
@pytest.fixture(scope="function")
def collection():
return [
{
"id": 0,
"name": "lambert"
},
{
"id": 1,
"name": "winston"
},
{
"id": 2,
"name": "stuart"
},
]
def test_decompress_tgz(tmpdir):
path = str(tmpdir)
src_path = os.path.join(sys.path[0], "tests", "test_files", "virtool.tar.gz")
shutil.copy(src_path, path)
virtool.utils.decompress_tgz(os.path.join(path, "virtool.tar.gz"), os.path.join(path, "de"))
assert set(os.listdir(path)) == {"virtool.tar.gz", "de"}
assert os.listdir(os.path.join(path, "de")) == ["virtool"]
assert set(os.listdir(os.path.join(path, "de", "virtool"))) == {"run", "client", "VERSION", "install.sh"}
class TestRm:
def test_rm_file(self, fake_dir):
assert set(os.listdir(str(fake_dir))) == {"hello.txt", "world.txt"}
path = os.path.join(str(fake_dir), "world.txt")
virtool.utils.rm(path)
assert set(os.listdir(str(fake_dir))) == {"hello.txt"}
def test_rm_folder(self, fake_dir):
fake_dir.mkdir("dummy")
assert set(os.listdir(str(fake_dir))) == {"hello.txt", "world.txt", "dummy"}
path = os.path.join(str(fake_dir), "dummy")
with pytest.raises(IsADirectoryError):
virtool.utils.rm(path)
assert set(os.listdir(str(fake_dir))) == {"hello.txt", "world.txt", "dummy"}
def test_rm_folder_recursive(self, fake_dir):
fake_dir.mkdir("dummy_recursive")
assert set(os.listdir(str(fake_dir))) == {"hello.txt", "world.txt", "dummy_recursive"}
path = os.path.join(str(fake_dir), "dummy_recursive")
virtool.utils.rm(path, recursive=True)
assert set(os.listdir(str(fake_dir))) == {"hello.txt", "world.txt"}
def test_timestamp(mocker):
"""
Test that the timestamp util returns a datetime object with the last 3 digits of the microsecond frame set to
zero.
"""
m = mocker.Mock(return_value=arrow.Arrow(2017, 10, 6, 20, 0, 0, 612304))
mocker.patch("arrow.utcnow", new=m)
timestamp = virtool.utils.timestamp()
assert isinstance(timestamp, datetime.datetime)
assert timestamp == arrow.arrow.Arrow(2017, 10, 6, 20, 0, 0, 612000).naive
class TestRandomAlphanumeric:
def test_default(self, alphanumeric):
for _ in range(0, 10):
an = virtool.utils.random_alphanumeric()
assert len(an) == 6
assert all(l in alphanumeric for l in an)
def test_length(self, alphanumeric):
for length in [7, 10, 25, 12, 4, 22, 17, 30, 8, 14, 19]:
an = virtool.utils.random_alphanumeric(length)
assert len(an) == length
assert all(l in alphanumeric for l in an)
def test_excluded(self, alphanumeric):
for _ in range(0, 5):
an = virtool.utils.random_alphanumeric(excluded=["87e9wa"])
assert an != "87e9wa"
assert len(an) == 6
assert all(l in alphanumeric for l in an)
class TestAverageList:
def test_default(self):
list1 = [2, 5, 6, 10, 14, 20]
list2 = [-1, 3, 0, 22, 12, 11]
expected = [0.5, 4, 3, 16, 13, 15.5]
assert virtool.utils.average_list(list1, list2) == expected
def test_mismatched(self):
with pytest.raises(TypeError):
virtool.utils.average_list([1, 3, 2, 4], [2, 3, 7])
def test_wrong_item_type(self):
with pytest.raises(TypeError):
virtool.utils.average_list([2, 5, 6], [8, "a", 5])
def test_wrong_arg_type(self):
with pytest.raises(TypeError):
virtool.utils.average_list([2, 5, 6], "a")
| StarcoderdataPython |
4907789 | import bpy
class ahs_maincurve_volume_down(bpy.types.Operator):
bl_idname = 'object.ahs_maincurve_volume_down'
bl_label = "Remove Taper/Bevel"
bl_description = "Remove Taper/Bevel from selected Curve"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
try:
for ob in context.selected_objects:
if ob.type != 'CURVE':
continue
if ob.data.taper_object or ob.data.bevel_object:
break
else:
return False
except:
return False
return True
def execute(self, context):
for ob in context.selected_objects:
if ob.type != 'CURVE':
continue
if ob.data.taper_object:
context.blend_data.curves.remove(ob.data.taper_object.data, do_unlink=True)
if ob.data.bevel_object:
context.blend_data.curves.remove(ob.data.bevel_object.data, do_unlink=True)
for area in context.screen.areas:
area.tag_redraw()
return {'FINISHED'}
| StarcoderdataPython |
9700579 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-25 05:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('farms', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='farm',
name='profile',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='farms', to='session.Profile'),
),
]
| StarcoderdataPython |
3529228 | <filename>codesync/auth/handler.py
import shelve
import socket
import subprocess
import time
from threading import Thread
import requests
from codesync.auth.server import start_server
from codesync.constants import CACHE_FILE_PATH
from codesync.settings import IS_DEV
class AuthServerHandler(object):
@staticmethod
def get_open_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
s.listen(1)
port = s.getsockname()[1]
s.close()
return port
@staticmethod
def get_port_from_shelve():
d = shelve.open(CACHE_FILE_PATH)
port = d.get('port')
d.close()
return port
@staticmethod
def clean_up_shelve():
# Clean up shelve
d = shelve.open(CACHE_FILE_PATH)
for key in ['access_token']:
if d.get(key):
del d[key]
d.close()
@staticmethod
def start_thread(port):
thread = Thread(target=start_server, args=(port,))
thread.setDaemon(True)
thread.start()
def check_server_running(self):
# See if saved port is working
port = self.get_port_from_shelve()
if port:
plugin_server = f"http://localhost:{port}"
try:
response = requests.get(plugin_server)
# TODO: Improve this by verifying specific response
server_is_up = response.ok
if server_is_up:
self.clean_up_shelve()
return port
except Exception:
pass
def trigger_from_cli(self):
# Get port and set in shelve
port = self.get_open_port()
d = shelve.open(CACHE_FILE_PATH)
d['port'] = port
d.close()
command = f"python3 -m codesync.cli --run-auth-server -p {port}" if IS_DEV else \
f"codesync --run-auth-server -p {port}"
try:
subprocess.Popen(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, shell=True)
except Exception:
print("Failed to start auth-server")
raise
# Sleep added to wait for the server to start
time.sleep(3)
return port
def restart(self):
port = self.check_server_running()
if not port:
self.clean_up_shelve()
port = self.get_open_port()
d = shelve.open(CACHE_FILE_PATH)
d['port'] = port
d.close()
self.start_thread(port)
return port
| StarcoderdataPython |
12861604 | <filename>pygears_vivado/vivmod.py<gh_stars>1-10
import os
from pygears.hdl.sv import SVModuleInst
from .ip_resolver import IPResolver
class SVVivModuleInst(SVModuleInst):
def __init__(self, node, lang=None):
resolver = IPResolver(node)
super().__init__(node, resolver.lang, resolver)
@property
def is_generated(self):
return True
@property
def include(self):
return [os.path.join(self.ipdir, 'hdl')]
def get_wrap_portmap(self, parent_lang):
sig_map = {}
for s in self.node.meta_kwds['signals']:
sig_map[s.name] = s.name
port_map = {}
for p in self.node.in_ports + self.node.out_ports:
name = p.basename
if self.lang == 'sv':
port_map[name] = name
elif parent_lang == 'sv':
sig_map[f'{name}_tvalid'] = f'{name}.valid'
sig_map[f'{name}_tready'] = f'{name}.ready'
sig_map[f'{name}_tdata'] = f'{name}.data'
elif parent_lang == 'v':
sig_map[f'{name}_tvalid'] = f'{name}_valid'
sig_map[f'{name}_tready'] = f'{name}_ready'
sig_map[f'{name}_tdata'] = f'{name}_data'
else:
port_map[name] = name
return port_map, sig_map
| StarcoderdataPython |
4880779 | <reponame>DanielDaCosta/dwh-redshift
import boto3
from botocore.exceptions import ClientError
import logging
import json
import configparser
config = configparser.ConfigParser()
config.read('dwh.cfg')
DWH_CLUSTER_TYPE = config.get("DWH","DWH_CLUSTER_TYPE")
DWH_NUM_NODES = config.get("DWH","DWH_NUM_NODES")
DWH_NODE_TYPE = config.get("DWH","DWH_NODE_TYPE")
DWH_CLUSTER_IDENTIFIER = config.get("DWH","DWH_CLUSTER_IDENTIFIER")
DWH_DB = config.get("DWH","DWH_DB")
DWH_DB_USER = config.get("DWH","DWH_DB_USER")
DWH_DB_PASSWORD = config.get("DWH","DWH_DB_PASSWORD")
DWH_PORT = config.get("DWH","DWH_PORT")
def create_redshift_cluster(role_arn: str) -> dict:
"""Creates Redshift Cluster based on DWH config file
with roleArn permission
Args:
role_arn (str): redshift iam role arn
Returns:
dict: redshift response
"""
try:
redshift = boto3.client('redshift')
response = redshift.create_cluster(
#HW
ClusterType=DWH_CLUSTER_TYPE,
NodeType=DWH_NODE_TYPE,
NumberOfNodes=int(DWH_NUM_NODES),
#Identifiers & Credentials
DBName=DWH_DB,
ClusterIdentifier=DWH_CLUSTER_IDENTIFIER,
MasterUsername=DWH_DB_USER,
MasterUserPassword=<PASSWORD>,
#Roles (for s3 access)
IamRoles=[role_arn]
)
return response
except Exception as err:
logging.error(err)
def open_incoming_tcp(vpc_id: str):
"""Open VPC TCP connection
Args:
vpc_id (str)
"""
try:
ec2 = boto3.resource('ec2')
vpc = ec2.Vpc(id=vpc_id)
defaultSg = list(vpc.security_groups.all())[0]
print(defaultSg)
defaultSg.authorize_ingress(
GroupName=defaultSg.group_name,
CidrIp='0.0.0.0/0',
IpProtocol='TCP',
FromPort=int(DWH_PORT),
ToPort=int(DWH_PORT)
)
except Exception as err:
logging.error(err)
if __name__ == '__main__':
iam = boto3.client('iam')
DWH_IAM_ROLE_NAME = config.get("DWH", "DWH_IAM_ROLE_NAME")
role_arn = iam.get_role(RoleName=DWH_IAM_ROLE_NAME)['Role']['Arn']
response = create_redshift_cluster(role_arn)
## Check redshift creation
redshift = boto3.client('redshift')
response = redshift.describe_clusters(ClusterIdentifier=DWH_CLUSTER_IDENTIFIER)['Clusters'][0]
if response['ClusterStatus'] == 'available':
DWH_ENDPOINT = response['Endpoint']['Address']
DWH_ROLE_ARN = role_arn
VPC_ID = response['VpcId']
open_incoming_tcp(VPC_ID)
| StarcoderdataPython |
4823611 | from random import randint
from bomb import Bomb
class User:
DEFAULT_BOMB_CLASS = Bomb
START_POINTS = 0
START_LIFE = 3
def __init__(self, board, life, points, bomb_class):
self._set_up_user(board)
self.step = 1
self.points = points
self.life = life
self.bomb_class = bomb_class
def _set_up_user(self, board):
while True:
y = randint(0, 10)
x = randint(0, 16)
if board.is_field_occupied(y, x):
continue
else:
self.y = y
self.x = x
break
def draw(self, graphic_buffer, _context):
graphic_buffer[self.y][self.x] = 'U'
def is_brick_there(self, bricks, y, x):
for brick in bricks:
if (y, x) == (brick.y, brick.x):
return True
return False
def make_step(self, context):
chosen_direction = input('w/a/s/d/space')
if chosen_direction == 'a':
new_x = self.x - self.step
if not context.board.is_field_occupied(self.y, new_x) and not self.is_brick_there(context.bricks, self.y, new_x):
self.x -= self.step
elif chosen_direction == 'd':
new_x = self.x + self.step
if not context.board.is_field_occupied(self.y, new_x) and not self.is_brick_there(context.bricks, self.y, new_x):
self.x += self.step
elif chosen_direction == 'w':
new_y = self.y - self.step
if not context.board.is_field_occupied(new_y, self.x) and not self.is_brick_there(context.bricks, new_y, self.x):
self.y -= self.step
elif chosen_direction == 's':
new_y = self.y + self.step
if not context.board.is_field_occupied(new_y, self.x) and not self.is_brick_there(context.bricks, new_y, self.x):
self.y += self.step
elif chosen_direction == ' ':
context.bombs.append(self.bomb_class(context.user))
| StarcoderdataPython |
11216454 | <reponame>loads/loads-broker
from string import Template
from tornado.web import StaticFileHandler
class GrafanaHandler(StaticFileHandler):
"""Grafana page handler"""
def __init__(self, application, request, **kw):
super(GrafanaHandler, self).__init__(application, request, **kw)
self.broker = application.broker
def _get_run(self, run_id):
from loadsbroker.db import Run
from sqlalchemy.orm.exc import NoResultFound
session = self.broker.db.session()
try:
run = session.query(Run).filter(Run.uuid == run_id).one()
except NoResultFound:
run = None
return run, session
async def get(self, path, include_body=True):
run_id, path = self.path_args
if not path:
path = "index.html"
include_body = True
run, _ = self._get_run(run_id)
mgr = self.broker._runs[run.uuid]
influxdb_options = mgr.influxdb_options
if not influxdb_options:
# XXX: guard against not ready yet
pass
absolute_path = self.get_absolute_path(self.root, path)
if absolute_path.endswith("config.js"):
opts = dict(INFLUX_HOST=influxdb_options.host,
INFLUX_USER=influxdb_options.user or "",
INFLUX_PASSWORD=influxdb_options.password or "",
RUN_ID=run_id)
with open(absolute_path) as f:
tmpl = Template(f.read())
content = tmpl.substitute(opts)
self.set_status(200)
self.set_header("Content-Type", "application/json")
self.set_header("Content-Length", len(content))
self.write(content)
await self.flush()
else:
await StaticFileHandler.get(self, path, include_body)
| StarcoderdataPython |
5074880 | <filename>testitems/apps.py
from django.apps import AppConfig
class TestitemsConfig(AppConfig):
name = 'testitems'
| StarcoderdataPython |
8015283 | # -*- coding: utf-8 -*-
"""Keithley.py: A pyVISA wrapper for Keithley devices
__author__ = "<NAME>"
__copyright__ = "Copyright 2016, <NAME>"
__license__ = "MIT"
__email__ = "<EMAIL>"
"""
import visa
class M2308():
def __init__(self, address=16):
self._instr = visa.ResourceManager().open_resource('GPIB0::%s' % address) # Default GPIB address is 16
self._result = 0
self.reset()
def reset(self):
self._instr.write('*RST') # Reset to power-on defaults
self._instr.write('DISPlay:TEXT:STATe 0') # LCD display must be separately reset
def vset(self, vset):
self._instr.write('SOURce1:VOLTage %.3f' % vset) # 1mV resolution, 0 ~ 15V range
def ilim(self, ilim):
self._instr.write('SOURce1:CURRent:LIMit:VALue %.4f' % ilim) # 100uV resolution, 6mA ~ 5A range
def vlim(self, vlim):
self._instr.write('SOURce1:VOLTage:PROTection %i' % vlim) # 1V resolution, 0 ~ 8V range
self._instr.write('SOURce1:VOLTage:PROTection:CLAMp 0') # Enable clamp
def enable(self):
self._instr.write('OUTPut1:STATe 1') # Enable Ch1 output
def disable(self):
self._instr.write('OUTPut1:STATe 0') # Disable Ch1 output
def vmeas(self, smp_avgcount=5, smp_nplc=0.5, noise_floor=1e-3):
if smp_avgcount < 0: # 1 ~ 10 sample averaging range
smp_avgcount = 1
elif smp_avgcount > 10:
smp_avgcount = 10
if smp_nplc < 0.002: # 0.002 ~ 10 NPLC sampling (33 us ~ 167 ms)
smp_nplc = 0.002
elif smp_nplc > 10:
smp_nplc = 10
self._instr.write('SENSe:FUNCtion "VOLTage"') # Set voltage sensing mode
self._instr.write('SENSe:AVERage %s' % smp_avgcount) # Set sample averaging
self._instr.write('SENSe:NPLCycles %s' % smp_nplc) # Set sampling frequency
self._result = round(float(self._instr.query('READ?').strip('\n')), 3) # Read and format response
if self._result < noise_floor: # Zero sub mV values
self._result = float(0)
return self._result
def imeas(self, smp_avgcount=5, smp_nplc=0.5, noise_floor=100e-6):
if smp_avgcount < 1: # 1 ~ 10 sample averaging range
smp_avgcount = 1
elif smp_avgcount > 10:
smp_avgcount = 10
if smp_nplc < 0.002: # 0.002 ~ 10 NPLC sampling (33 us ~ 167 ms)
smp_nplc = 0.002
elif smp_nplc > 10:
smp_nplc = 10
self._instr.write('SENSe:FUNCtion "CURRent"') # Set current sensing mode
self._instr.write('SENSe:AVERage %s' % smp_avgcount) # Set sample averaging
self._instr.write('SENSe:NPLCycles %s' % smp_nplc) # Set sampling frequency
self._instr.write('SENSe:CURRent:RANGe:AUTO 1') # Enable auto range-finding
self._result = round(float(self._instr.query('READ?').strip('\n')), 4) # Read and format response
if (self._result < noise_floor) and (self._result > -noise_floor): # Zero sub 100uA values
self._result = float(0)
return self._result
def msgon(self, msg='TEST IN PROGRESS!!!!!!!!!!!!!!!!'):
try:
msg.isalnum() # Check for proper string entry
except:
print 'Input message is not a string. Please try again.'
else:
self._instr.write('DISPlay:TEXT:DATA "%s"' % msg) # Write string
self._instr.write('DISPlay:TEXT:STATe 1') # Enable text display mode
def msgoff(self, msg=' '*32):
self._instr.write('DISPlay:TEXT:DATA "%s"' % msg) # Restore default text
self._instr.write('DISPlay:TEXT:STATe 0') # Disable text display mode
def dispon(self):
self._instr.write('DISPlay:ENABle 1') # Enable LCD
def dispoff(self):
self._instr.write('DISPlay:ENABle 0') # Disable LCD | StarcoderdataPython |
1994299 | <gh_stars>1-10
"""
This module search for GUI controls by sending TAB button events
and comparing the image with the original.
.. note::
It does not work if the GUI changes during the scan.
(e.g. blinking cursor)
"""
import logging
from time import sleep
from PIL import ImageChops, ImageFilter, ImageStat
from pykeyboard import PyKeyboard
from discogui.imglog import img_log, img_log_rects
from discogui.imgutil import focus_wnd, getbbox, grab
from discogui.screenrect import ScreenRect
log = logging.getLogger(__name__)
def darker(im1, im2, box):
"""
im1 is darker than im2 in box -> 0
im2 is darker than im1 in box -> 1
"""
# add border, because box is an 'inside box', crop needs 'outside box'
box = box.add_border(1)
stat1 = ImageStat.Stat(im1.crop(box))
stat2 = ImageStat.Stat(im2.crop(box))
sum1 = sum(stat1.sum)
sum2 = sum(stat2.sum)
assert sum1 != sum2, box
return 0 if sum1 > sum2 else 1
def tab_rectangles():
"""
Return rectangles found by sending TAB button events.
Does not work if other parts of screen are changing (e.g. blinking cursor)
:rtype: rectangles list
"""
ls = []
img_orig = focus_wnd()
im1 = img_orig
k = PyKeyboard()
while 1:
k.tap_key(k.tab_key)
sleep(0.1)
im2 = grab()
img_log(im1, "im1")
img_log(im2, "im2")
boxes = tab_rect_pair(im1, im2)
if not boxes:
return []
if len(ls):
if len(boxes) == 2:
assert boxes[0] == ls[-1]
if boxes[-1] in ls:
break
ls += [boxes[-1]]
else:
# first
ls += boxes
im1 = im2
img_log_rects(img_orig, ls, "img_orig")
log.debug("rectangles found:%s", ls)
return ls
def tab_rect_pair(img_orig, im_next):
"""
"""
img_diff = ImageChops.difference(img_orig, im_next)
img_log(img_diff, "img_diff")
# can be dotted -> filter + enhance color
img_diff_filtered = img_diff.filter(ImageFilter.MaxFilter(5))
img_diff_filtered = img_diff_filtered.point(lambda x: 255 * bool(x))
img_log(img_diff_filtered, "img_diff_filtered")
bbox = getbbox(img_diff)
if not bbox:
return None
def check_edges(horiz):
if horiz:
r1 = bbox.left
r2 = bbox.right
else:
r1 = bbox.top
r2 = bbox.bottom
ls = []
for c in range(int(r1), int(r2)):
if horiz:
p1 = (c, bbox.top)
p2 = (c, bbox.bottom)
else:
p1 = (bbox.left, c)
p2 = (bbox.right, c)
color1 = sum(img_diff_filtered.getpixel(p1))
color2 = sum(img_diff_filtered.getpixel(p2))
ls += [int(bool(color1 + color2))]
if not 0 in ls:
log.debug("split pos not found")
return
i = ls.index(0)
if i == 0:
ls.reverse()
i = ls.index(0)
i = len(ls) - i - 1
pos = i + r1
log.debug("split pos found:%s" % pos)
if horiz:
rsegment1 = ScreenRect(0, 0, pos, img_orig.size[1])
rsegment2 = ScreenRect(pos, 0, img_orig.size[0], img_orig.size[1])
else:
rsegment1 = ScreenRect(0, 0, img_orig.size[0], pos)
rsegment2 = ScreenRect(0, pos, img_orig.size[0], img_orig.size[1])
box1 = getbbox(img_diff.crop(rsegment1))
box1.move(rsegment1.topleft)
box2 = getbbox(img_diff.crop(rsegment2))
box2.move(rsegment2.topleft)
return box1, box2
r = check_edges(0)
if r is None:
r = check_edges(1)
if r is None:
# in new styles the textbox is not changing
return [bbox]
box1, box2 = r
d1 = darker(img_orig, im_next, box1)
d2 = darker(img_orig, im_next, box2)
if d1 == d2:
log.warning("d1 == d2 %s %s %s %s", d1, d2, box1, box2)
if d1 == 1:
boxes = (box1, box2)
else:
boxes = (box2, box1)
return boxes
| StarcoderdataPython |
73237 | import json
import requests
url = 'http://localhost:5000'
def test(document_name: str, output_name: str, _type: str, data: dict):
data = {
'data': data,
'template_name': document_name,
'filename': output_name,
'type': _type,
}
r = requests.post(url+'/publipost', json=data)
print(r.text)
data = {
'mission.projectManager.student.firstName': "Paul",
'mission.projectManager.student.lastName': 'Leveau',
"mission.documentReference(\"DDE\")": "DAT REF",
}
test('ppt1', 'jeb/test.pptx', 'phoenix', data)
# test('ndf', 'jeb/test.xlsx', 'mission', {
# 'date': 'OUAIPS'
# })
| StarcoderdataPython |
34249 | <reponame>Odin-SMR/odin-api
import attr
from typing import List, Any, Dict, Union
import datetime as dt
from enum import Enum, unique, auto
from dateutil.relativedelta import relativedelta
import numpy as np # type: ignore
DATEFMT = "%Y-%m-%dT%H:%M:%SZ"
COMMON_FILE_HEADER_DATA = {
"creator_name": '<NAME>',
"creator_url": 'odin.rss.chalmers.se',
"creator_email": '<EMAIL>',
"address": '412 96 Gothenburg, Sweden',
"institution": 'Chalmers University of Technology',
"platform": 'Odin',
"sensor": 'SMR',
"version_l1b": "8",
"version_l2": "3.0.0"
}
@unique
class L2Type(Enum):
l2 = auto()
l2i = auto()
l2anc = auto()
@unique
class L2ancDesc(Enum):
LST = "Mean local solar time for the scan."
Orbit = "Odin/SMR orbit number."
SZA1D = (
"Mean solar zenith angle of the observations used in the retrieval "
"process.")
SZA = (
"Approximate solar zenith angle corresponding to each retrieval"
" value.")
Theta = "Estimate of the potential temperature profile."
@property
def l2type(self) -> L2Type:
return L2Type.l2anc
@unique
class L2Desc(Enum):
Altitude = "Altitude of retrieved values."
Apriori = "A priori profile used in the inversion algorithm."
AVK = "Averaging kernel matrix."
ErrorNoise = (
"Error due to measurement thermal noise (square root of the "
"diagonal elements of the corresponding error matrix).")
ErrorTotal = (
"Total retrieval error, corresponding to the error due to thermal"
" noise and all interfering smoothing errors (square root of the"
" diagonal elements of the corresponding error matrix).")
InvMode = "Inversion mode."
Lat1D = "A scalar representative latitude of the retrieval."
Latitude = "Approximate latitude of each retrieval value."
Lon1D = "A scalar representative longitude of the retrieval."
Longitude = "Approximate longitude of each retrieval value."
MeasResponse = (
"Measurement response, defined as the row sum of the averaging"
" kernel matrix.")
Pressure = "Pressure grid of the retrieved profile."
Profile = "Retrieved temperature or volume mixing ratio profile."
Quality = "Quality flag."
ScanID = "Satellite time word scan identifier."
Temperature = (
"Estimate of the temperature profile (corresponding to the"
" ZPT input data).")
Time = "Mean time of the scan."
VMR = "Volume mixing ratio or retrieved profile."
@property
def l2type(self) -> L2Type:
return L2Type.l2
@unique
class L2iDesc(Enum):
GenerationTime = "Processing date."
Residual = (
"The difference between the spectra matching retrieved state and used "
"measurement spectra"
)
MinLmFactor = (
"The minimum value of the Levenberg - Marquardt factor during "
"the OEM iterations"
)
FreqMode = "Odin/SMR observation frequency mode."
@property
def l2type(self) -> L2Type:
return L2Type.l2i
@unique
class DType(Enum):
i8 = "i8"
f4 = "f4"
double = "double"
@unique
class Dimension(Enum):
d1 = ["time"]
d2 = ["time", "level"]
d3 = ["time", "level", "level"]
@unique
class Unit(Enum):
time = "days since 1858-11-17 00:00"
altitude = "m"
lat = "degrees north"
lon = "degrees east"
hours = "hours"
unitless = "-"
pressure = "Pa"
temperature = "K"
degrees = "degrees"
koverk = "K/K"
poverp = "%/%"
product = "product"
@attr.s
class Parameter:
description = attr.ib(type=Union[L2Desc, L2ancDesc, L2iDesc])
unit = attr.ib(type=Unit)
dtype = attr.ib(type=DType)
dimension = attr.ib(type=Dimension)
@property
def name(self) -> str:
return self.description.name
@property
def l2type(self) -> L2Type:
return self.description.l2type
def get_description(self, istemperature: bool) -> str:
if self.description == L2Desc.Profile:
return (
"Retrieved temperature profile."
if istemperature
else "Retrieved volume mixing ratio."
)
return self.description.value
def get_unit(self, istemperature: bool) -> Unit:
if self.description == L2Desc.AVK:
return Unit.koverk if istemperature else Unit.poverp
elif self.unit != Unit.product:
return self.unit
else:
return (
Unit.temperature if istemperature
else Unit.unitless
)
@attr.s
class L2File:
parameters = attr.ib(type=List[Parameter])
@attr.s
class Filter:
residual = attr.ib(type=float)
minlmfactor = attr.ib(type=float)
@attr.s
class L2anc:
LST = attr.ib(type=float)
Orbit = attr.ib(type=int)
SZA1D = attr.ib(type=float)
SZA = attr.ib(type=List[float])
Theta = attr.ib(type=List[float])
@attr.s
class L2:
InvMode = attr.ib(type=str)
ScanID = attr.ib(type=int)
Time = attr.ib(type=dt.datetime)
Lat1D = attr.ib(type=float)
Lon1D = attr.ib(type=float)
Quality = attr.ib(type=float)
Altitude = attr.ib(type=List[float])
Pressure = attr.ib(type=List[float])
Profile = attr.ib(type=List[float])
Latitude = attr.ib(type=List[float])
Longitude = attr.ib(type=List[float])
Temperature = attr.ib(type=List[float])
ErrorTotal = attr.ib(type=List[float])
ErrorNoise = attr.ib(type=List[float])
MeasResponse = attr.ib(type=List[float])
Apriori = attr.ib(type=List[float])
VMR = attr.ib(type=List[float])
AVK = attr.ib(type=List[List[float]])
@attr.s
class L2i:
GenerationTime = attr.ib(type=dt.datetime)
Residual = attr.ib(type=float)
MinLmFactor = attr.ib(type=float)
FreqMode = attr.ib(type=int)
@property
def filter(self) -> Filter:
return Filter(
residual=1.5,
minlmfactor=10. if self.FreqMode in [8., 13., 19.] else 2.
)
def isvalid(self) -> bool:
return (
np.isfinite(self.Residual)
and np.isfinite(self.MinLmFactor)
and self.Residual <= self.filter.residual
and self.MinLmFactor <= self.filter.minlmfactor
)
@attr.s
class L2Full:
l2i = attr.ib(type=L2i)
l2anc = attr.ib(type=L2anc)
l2 = attr.ib(type=L2)
# validators connect this class to L2iDesc, L2iDesc, L2Desc, and Parameter
@l2i.validator
def _check_includes_all_l2idesc_attributes(self, attribute, value):
assert all([hasattr(self.l2i, v.name) for v in L2iDesc])
@l2anc.validator
def _check_includes_all_l2ancdesc_attributes(self, attribute, value):
assert all([hasattr(self.l2anc, v.name) for v in L2ancDesc])
@l2.validator
def _check_includes_all_l2desc_attributes(self, attribute, value):
assert all([hasattr(self.l2, v.name) for v in L2Desc])
def get_data(self, parameter: Parameter):
if parameter.l2type is L2Type.l2i:
return getattr(self.l2i, parameter.name)
elif parameter.l2type is L2Type.l2anc:
return getattr(self.l2anc, parameter.name)
return getattr(self.l2, parameter.name)
def get_file_header_data(
freqmode: int,
invmode: str,
product: str,
time_coverage_start: dt.datetime,
time_coverage_end: dt.datetime
) -> Dict[str, str]:
header_data = {
"observation_frequency_mode": str(freqmode),
"inversion_mode": invmode,
"level2_product_name": product,
"date_created": dt.datetime.utcnow().strftime(DATEFMT),
"time_coverage_start": time_coverage_start.strftime(DATEFMT),
"time_coverage_end": time_coverage_end.strftime(DATEFMT)
}
header_data.update(COMMON_FILE_HEADER_DATA)
return header_data
def to_l2(l2: Dict[str, Any], product: str) -> L2:
profile = (
l2["Temperature"] if is_temperature(product)
else l2["VMR"]
)
return L2(
InvMode=l2["InvMode"],
ScanID=l2["ScanID"],
Time=dt.datetime(1858, 11, 17) + relativedelta(days=l2["MJD"]),
Lat1D=l2["Lat1D"],
Lon1D=l2["Lon1D"],
Quality=l2["Quality"],
Altitude=l2["Altitude"],
Pressure=l2["Pressure"],
Profile=profile,
Latitude=l2["Latitude"],
Longitude=l2["Longitude"],
Temperature=l2["Temperature"],
ErrorTotal=l2["ErrorTotal"],
ErrorNoise=l2["ErrorNoise"],
MeasResponse=l2["MeasResponse"],
Apriori=l2["Apriori"],
VMR=l2["VMR"],
AVK=l2["AVK"],
)
def to_l2anc(l2: Dict[str, Any]) -> L2anc:
return L2anc(
LST=l2["LST"],
Orbit=l2["Orbit"],
SZA1D=l2["SZA1D"],
SZA=l2["SZA"],
Theta=l2["Theta"],
)
def to_l2i(l2: Dict[str, Any]) -> L2i:
return L2i(
GenerationTime=dt.datetime.strptime(l2["GenerationTime"], DATEFMT),
Residual=l2["Residual"],
MinLmFactor=l2["MinLmFactor"],
FreqMode=l2["FreqMode"],
)
def generate_filename(
project: str, product: str, date_start: dt.datetime) -> str:
return "Odin-SMR_L2_{project}_{product}_{year}-{month:02}.nc".format(
project=project,
product=product.replace(
" / ", "-").replace(" - ", "-").replace(" ", "-"),
year=date_start.year,
month=date_start.month
)
def is_temperature(product: str) -> bool:
return "Temperature" in product
L2FILE = L2File([
Parameter(
L2iDesc.GenerationTime, Unit.time, DType.f4, Dimension.d1
),
Parameter(
L2Desc.Altitude, Unit.altitude, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Apriori, Unit.product, DType.f4, Dimension.d2
),
Parameter(
L2Desc.AVK, Unit.product, DType.f4, Dimension.d3
),
Parameter(
L2Desc.ErrorNoise, Unit.product, DType.f4, Dimension.d2
),
Parameter(
L2Desc.ErrorTotal, Unit.product, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Lat1D, Unit.lat, DType.f4, Dimension.d1
),
Parameter(
L2Desc.Latitude, Unit.lat, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Lon1D, Unit.lon, DType.f4, Dimension.d1
),
Parameter(
L2Desc.Longitude, Unit.lon, DType.f4, Dimension.d2
),
Parameter(
L2ancDesc.LST, Unit.hours, DType.f4, Dimension.d1
),
Parameter(
L2Desc.MeasResponse, Unit.unitless, DType.f4, Dimension.d2
),
Parameter(
L2ancDesc.Orbit, Unit.unitless, DType.f4, Dimension.d1
),
Parameter(
L2Desc.Pressure, Unit.pressure, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Profile, Unit.product, DType.f4, Dimension.d2
),
Parameter(
L2Desc.ScanID, Unit.unitless, DType.i8, Dimension.d1
),
Parameter(
L2ancDesc.SZA1D, Unit.degrees, DType.f4, Dimension.d1
),
Parameter(
L2ancDesc.SZA, Unit.degrees, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Temperature, Unit.temperature, DType.f4, Dimension.d2
),
Parameter(
L2ancDesc.Theta, Unit.temperature, DType.f4, Dimension.d2
),
Parameter(
L2Desc.Time, Unit.time, DType.double, Dimension.d1
)
])
| StarcoderdataPython |
3433161 | <reponame>Iamlegend-Imani/airbnb-plotly-dash-app
'''
Used for creating marks for the input bathroom slider
in prediction.py
'''
bathroom_marks = {
0: '0',
0.5: '0.5',
1: '1',
1.5: '1.5',
2: '2',
2.5: '2.5',
3: '3',
3.5: '3.5',
4: '4',
4.5: '4.5',
5: '5',
5.5: '5.5',
6: '6',
6.5: '6.5',
7: '7',
7.5: '7.5',
8: '8'
} | StarcoderdataPython |
157804 | from gamechangerml.src.search.sent_transformer.finetune import STFinetuner
from gamechangerml.configs.config import EmbedderConfig
from gamechangerml.api.utils.pathselect import get_model_paths
from gamechangerml.api.utils.logger import logger
import argparse
import os
from datetime import datetime
model_path_dict = get_model_paths()
LOCAL_TRANSFORMERS_DIR = model_path_dict["transformers"]
BASE_MODEL_NAME = EmbedderConfig.BASE_MODEL
def main(data_path, model_load_path, model_save_path):
tuner = STFinetuner(model_load_path=model_load_path, model_save_path=model_save_path, **EmbedderConfig.FINETUNE)
return tuner.retrain(data_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Finetuning the sentence transformer model")
parser.add_argument(
"--data-path", "-d",
dest="data_path",
required=True,
help="path to csv with finetuning data"
)
parser.add_argument(
"--model-load-path", "-m",
dest="model_load_path",
required=False,
help="path to load model for fine-tuning"
)
parser.add_argument(
"--model-save-path", "-s",
dest="model_save_path",
required=False,
help="path to save model after fine-tuning"
)
args = parser.parse_args()
## getting default paths
if args.model_load_path:
model_load_path = args.model_load_path
else:
model_load_path = os.path.join(LOCAL_TRANSFORMERS_DIR, BASE_MODEL_NAME)
if args.model_save_path:
model_save_path = args.model_save_path
else:
model_save_path = model_load_path + str(datetime.now().strftime("%Y%m%d"))
data_path = args.data_path
logger.info("\n|---------------------Beginning to finetune model-----------------------|")
main(data_path, model_load_path, model_save_path)
logger.info("|------------------------Done finetuning model--------------------------|\n") | StarcoderdataPython |
3218764 | <reponame>shareablee/XlsxWriter
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, <NAME>, <EMAIL>
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWritePageSetup(unittest.TestCase):
"""
Test the Worksheet _write_page_setup() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_page_setup_none(self):
"""Test the _write_page_setup() method. Without any page setup"""
self.worksheet._write_page_setup()
exp = ''
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_page_setup_landscape(self):
"""Test the _write_page_setup() method. With set_landscape()"""
self.worksheet.set_landscape()
self.worksheet._write_page_setup()
exp = """<pageSetup orientation="landscape"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_page_setup_portrait(self):
"""Test the _write_page_setup() method. With set_portrait()"""
self.worksheet.set_portrait()
self.worksheet._write_page_setup()
exp = """<pageSetup orientation="portrait"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_page_setup_paper(self):
"""Test the _write_page_setup() method. With set_paper()"""
self.worksheet.set_paper(9)
self.worksheet._write_page_setup()
exp = """<pageSetup paperSize="9" orientation="portrait"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_page_setup_print_across(self):
"""Test the _write_page_setup() method. With print_across()"""
self.worksheet.print_across()
self.worksheet._write_page_setup()
exp = """<pageSetup pageOrder="overThenDown" orientation="portrait"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| StarcoderdataPython |
8084179 | <reponame>orctom/hao
# -*- coding: utf-8 -*-
import functools
import os
import socket
import traceback
import typing
import yaml
from . import paths, singleton
ENV = os.environ.get("env")
HOSTNAME = socket.gethostname()
class Config(object, metaclass=singleton.Multiton):
def __init__(self, config_name='config') -> None:
super().__init__()
self.config_name = config_name or 'config' # force not none
self.config_dir = get_config_dir() or os.getcwd()
self.conf = self.read_conf()
def read_conf(self):
try:
if self.config_name.endswith('.yml'):
return self._conf_from(self.config_name)
if ENV is not None:
for config_file_name in [f"{self.config_name}-{ENV}.yml", f"{self.config_name}.yml"]:
conf = self._conf_from(config_file_name)
if conf is not None:
return conf
else:
for config_file_name in [f"{self.config_name}-{HOSTNAME}.yml", f"{self.config_name}.yml"]:
conf = self._conf_from(config_file_name)
if conf is not None:
return conf
return None
except ModuleNotFoundError:
print(f"[config] expecting a 'conf' module in current directory")
except Exception as e:
print(f"[config] {e}")
traceback.print_exc()
def _conf_from(self, config_file):
if config_file[0] in ('/', '~', '$'):
config_file = paths.expand(config_file)
else:
config_file = os.path.join(self.config_dir, config_file)
if not os.path.exists(config_file):
print(f"[config] from: {config_file}, not exist")
return None
with open(config_file, 'r') as stream:
try:
conf = yaml.safe_load(stream)
print(f"[config] from: {config_file}, loaded")
return conf or {}
except yaml.YAMLError as e:
print(f"[config] failed to load from: {config_file}, due to: {e}")
traceback.print_exc()
return {}
def get(self, name, default_value=None):
if name is None:
return default_value
cfg = self.conf
if cfg is None:
return default_value
for _key in name.split('.'):
if isinstance(cfg, str):
return default_value
cfg = cfg.get(_key)
if cfg is None:
return default_value
return cfg
def get_path(self, name, default_value=None):
if name is None:
return default_value
cfg = self.conf
if cfg is None:
return paths.get_path(default_value) if default_value else None
for _key in name.split('.'):
if isinstance(cfg, str):
return paths.get_path(default_value) if default_value else None
cfg = cfg.get(_key)
if cfg is None:
return default_value
return paths.get_path(cfg)
def is_production():
return ENV == 'prod'
def is_not_production():
return not is_production()
def get_config_dir():
config_dir = os.environ.get("CONFIG_DIR")
if config_dir is not None:
if not os.path.exists(config_dir):
print(f'[config] CONFIG_DIR: {config_dir} DOES NOT EXIST, trying from default path')
else:
return config_dir
root_path = paths.project_root_path()
if root_path is None:
root_path = os.getcwd()
program_path = os.environ.get('_')
if program_path:
os.environ['program_name'] = os.path.basename(program_path)
return os.path.join(root_path, 'conf')
def config_from(config_file_name):
return get_config(config_file_name)
def get_config(config: typing.Optional[typing.Union[str, Config]] = None):
if config is None:
cfg = Config()
elif isinstance(config, str):
cfg = Config(config)
elif isinstance(config, Config):
cfg = config
else:
raise ValueError(f'Unsupported cfg type: {type(config)}')
return cfg
def check_configured(silent=False):
def decorator(func):
@functools.wraps(func)
def check(*args, **kwargs):
config = args[2] if len(args) >= 3 else kwargs.get('config')
cfg = get_config(config)
if cfg is None:
if silent:
return args[0] if len(args) >= 2 else kwargs.get('default_value')
raise ValueError('Failed to configure from "config.yml" in "conf" package')
return func(*args, **kwargs)
return check
return decorator
@check_configured(silent=True)
def get(name, default_value=None, config: typing.Optional[typing.Union[str, Config]] = None):
return get_config(config).get(name, default_value)
@check_configured(silent=True)
def get_path(name, default_value=None, config: typing.Optional[typing.Union[str, Config]] = None):
return get_config(config).get_path(name, default_value)
| StarcoderdataPython |
6644047 | <filename>hashtable_module.py
#!/usr/bin/env python3
# shebang for linux
"""
simple hashtable
hashtable implementation using trivial hash function
data is stored as a dictionary of key value pairs
REFFERENCE: - https://en.wikipedia.org/wiki/Hash_function#Hash_function_algorithms
"""
# import std lib
import sys
import random
# import 3rd party lib
# import usr lib
# global var
# start debugging
#import pdb
#pdb.set_trace()
DEBUG = 0;
def hash_init(size):
"""
initialises new hash table
INPUTS: size of 1D hash table, int
size should be greater than length of data * 2 in order to reduce conflicts
OUTPUTS: hash table
EXAMPLES:
>>> hash_init();
"""
#initialise
KV = {"key": "", "value": None};
hash = [];
#create 1D matrix
for i in range(size):
hash.append([KV]);
return hash;
def hash_example(hash):
"""
fills initialised hash table with random data
INPUTS: - hashtable variable
- item in format {"key": "string", "value": int};
OUTPUTS: index of new item
EXAMPLES:
>>> hash_example(hash, 1, 1);
"""
for i in range(len(hash)):
for ii in range(len(hash[0])):
hash[i][ii]["key"] = "test_name";
hash[i][ii]["value"] = random.random();
return hash;
def hash_add(hash, item):
"""
adds key value pair to hashtable
INPUTS: - hashtable variable
- item in format {"key": 0, "value": 0};
OUTPUTS: - index of new item
EXAMPLES:
>>> hash_add(hash, item);
"""
#convert item key to index using trivial hash function
string = "";
for i in item["key"]:
string += str(ord(i));
index = int(string) % len(hash);
#add data
if hash[index][0]["key"] == "": #if index unused, add item
hash[index][0] = item;
else: #if index used, add new row for item
hash[index].append(item);
return index;
def hash_retrieve(hash, key):
"""
retrieves key value pair from hashtable
INPUTS: - key, string
OUTPUTS: - value, string
EXAMPLES:
>>> hash_add(hash, item);
"""
#convert key to index using trivial hash function
string = "";
for i in key:
string += str(ord(i));
index = int(string) % len(hash);
#search block
i = 0;
found = "";
while found != key:
found = hash[index][i]["key"];
i += 1;
value = hash[index][i-1]["value"];
return value;
def main(argv):
"""
tests simple hashtable library
INPUTS: none
OUTPUTS: return 0 on success
EXAMPLES:
>>> main();
0
"""
hash = hash_init(100);
hash = hash_example(hash);
item = {"key": "new_name", "value": 7};
index = hash_add(hash, item);
item_out = hash_retrieve(hash, "new_name");
#check for errors
if item["value"] != item_out:
raise UserWarning("hash not working");
return 0;
# script autorun
if __name__ == "__main__":
#run program
try:
main(sys.argv);
except UserWarning as err:
print("%s" % (err), file=sys.stderr);
exit(1);
if DEBUG == 1:
# unit test
import doctest;
doctest.testmod();
| StarcoderdataPython |
8047550 | #!/usr/bin/python
# Copyright: (c) 2019, DellEMC
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils import dellemc_ansible_utils as utils
import logging
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: dellemc_powermax_srdf
version_added: '2.6'
short_description: Manage SRDF pair on PowerMax/VMAX Storage
System
description:
- Managing SRDF link on PowerMax Storage System includes creating SRDF pair for
a storage group, modify SRDF mode, modify SRDF state of an existing
SRDF pair and delete SRDF pair. All create and modify calls are asynchronous
by default.
extends_documentation_fragment:
- dellemc.dellemc_powermax
author:
- <NAME> (<EMAIL>)
options:
sg_name:
description:
- Name of Storage Group. SRDF Pairings are managed at a storage group level.
- Required to identify the SRDF link.
type: string
default: None
serial_no:
description:
- The serial number will refer to the source (R1) PowerMax/VMAX array when
protecting a storage group. However srdf_state operations may be issued
from R1 or R2 array.
type: string
default: None
remote_serial_no:
description:
- Integer 12 Digit Serial Number of remote PowerMAX or VMAX array (R2).
- Required while creating an SRDF link.
type: string
default: None
rdfg_no:
description:
- The RDF group number.
- Optional parameter for each call. For create, if specified, the array
will reuse the RDF group, otherwise return error. For modify and delete
operations, if the RFD group number is not specified, and the storage
group is protected by multiple RDF Groups, then an error will be raised.
type: number
default: None
state:
description:
- Define whether the SRDF pairing should exist or not.
- present indicates that the SRDF pairing should exist in system.
- absent indicates that the SRDF pairing should not exist in system.
required: true
choices: [absent, present]
srdf_mode:
description:
- The replication mode of the SRDF pair.
- Required when creating SRDF pair.
- Can be modified by providing required value.
choices: [Active, Adaptive Copy, Synchronous, Asynchronous]
type: string
default: None
srdf_state:
description:
- Desired state of the SRDF pairing. While creating a new SRDF pair, allowed
values are 'Establish' and 'Suspend'. If state is not specified, the pair
will be created in 'Suspended' state. When modifying the state, only
certain changes are allowed.
choices: [Establish, Resume, Restore, Suspend, Swap, Split, Failback,
Failover, Setbias]
new_rdf_group:
description:
- Overrides the SRDF Group selection functionality and forces the creation
of a new SRDF Group.
default: false
type: bool
wait_for_completion:
description:
- Flag to indicate if the operation should be run synchronously or
asynchronously. True signifies synchronous execution. By default, all
create and update operations will be run asynchronously.
default: False
type: bool
job_id:
description:
- Job ID of an Asynchronous task. Can be used to get details of a job.
default: None
type: str
witness:
description:
- Flag to specify use of Witness for a Metro configuration. Setting to True
signifies to use Witness, setting it to False signifies to use Bias. It
is recommended to configure a witness for SRDF Metro in a production
environment, this is configured via Unipshere for PowerMAX UI or REST.
- The flag can be set only for modifying srdf_state to either Establish,
Suspend or Restore.
- While creating a Metro configuration, witness flag must be set to True.
default: None
type: bool
'''
EXAMPLES = r'''
- name: Create and establish storagegroup SRDF/a pairing
register: Job_details_body
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
remote_serial_no: "{{remote_serial_no}}"
srdf_mode: 'Asynchronous'
srdf_state: 'Establish'
state: 'present'
- name: Create storagegroup SRDF/s pair in default suspended mode as an
Synchronous task
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name2}}"
remote_serial_no: "{{remote_serial_no}}"
state: 'present'
srdf_mode: 'Synchronous'
wait_for_completion: True
- name: Create storagegroup Metro SRDF pair with Witness for resiliency
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: <PASSWORD>}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
remote_serial_no: "{{remote_serial_no}}"
state: 'present'
srdf_mode: 'Active'
wait_for_completion: True
srdf_state: 'Establish'
- name: Suspend storagegroup Metro SRDF pair
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
remote_serial_no: "{{remote_serial_no}}"
state: 'present'
srdf_state: 'Suspend'
- name: Establish link for storagegroup Metro SRDF pair and use Bias for
resiliency
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
remote_serial_no: "{{remote_serial_no}}"
state: 'present'
wait_for_completion: False
srdf_state: 'Establish'
witness: False
- name: Get SRDF details
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
state: 'present'
- name: Modify SRDF mode
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
srdf_mode: 'Synchronous'
state: 'present'
- name: Failover SRDF link
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
srdf_state: 'Failover'
state: 'present'
- name: Get SRDF Job status
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: <PASSWORD>}}"
serial_no: "{{serial_no}}"
job_id: "{{Job_details_body.Job_details.jobId}}"
state: 'present'
- name: Establish SRDF link
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name2}}"
srdf_state: 'Establish'
state: 'present'
- name: Suspend SRDF link
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name2}}"
srdf_state: 'Suspend'
state: 'present'
- name: Delete SRDF link
dellemc_powermax_srdf:
unispherehost: "{{unispherehost}}"
universion: "{{universion}}"
verifycert: "{{verifycert}}"
user: "{{user}}"
password: "{{password}}"
serial_no: "{{serial_no}}"
sg_name: "{{sg_name}}"
state: 'absent'
'''
RETURN = r'''
changed: [localhost] => {
"Job_details": {
"completed_date_milliseconds": 0,
"jobId": "1570622921504",
"last_modified_date": "Oct-09-2019 08:08:41.505",
"last_modified_date_milliseconds": 1570622921505,
"name": "Protect Storage Group - SRDF Ansible_Test_SRDF2",
"resourceLink": "https://xxx:8443/univmax/restapi/90/replication
/symmetrix/xx/storagegroup/x/rdf_group/x",
"result": "Started job execution on Wed 9 Oct 2019 08:08:43 EDT",
"status": "RUNNING",
"task": [
{
"description": "SRDF protect Storage Group Ansible_Test_SRDF2
to remote array xx, mode = Synchronous, establish = false,
remote Storage Group = Ansible_Test_SRDF2",
"execution_order": 1
}
],
"username": "C:xxx\\********"
},
"SRDF_link_details": {
"hop2Modes": [],
"hop2Rdfgs": [],
"hop2States": [],
"largerRdfSides": [
"Equal"
],
"localR1InvalidTracksHop1": 0,
"localR2InvalidTracksHop1": 0,
"modes": [
"Asynchronous"
],
"rdfGroupNumber": 25,
"remoteR1InvalidTracksHop1": 0,
"remoteR2InvalidTracksHop1": 0,
"states": [
"Consistent"
],
"storageGroupName": "Ansible_Test_SRDF",
"symmetrixId": "xxx",
"totalTracks": 8205,
"volumeRdfTypes": [
"R1"
]
},
"changed": true,
"invocation": {
"module_args": {
"wait_for_completion": true,
"new_rdf_group": false,
"job_id": null,
"password": "<PASSWORD>",
"rdfg_no": null,
"remote_serial_no": "xx",
"serial_no": "xx",
"sg_name": "Ansible_Test_SRDF",
"srdf_mode": "Asynchronous",
"srdf_state": "Establish",
"state": "present",
"unispherehost": "xx",
"universion": 90,
"user": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
"verifycert": false
}
}
}
'''
LOG = utils.get_logger(
module_name='dellemc_powermax_srdf',
log_devel=logging.INFO)
HAS_PYU4V = utils.has_pyu4v_sdk()
PYU4V_VERSION_CHECK = utils.pyu4v_version_check()
# Application Type
APPLICATION_TYPE = 'ansible_v1.1'
class PowerMax_SRDF(object):
'''Class with srdf operations'''
def __init__(self):
''' Define all parameters required by this module'''
self.module_params = utils.get_powermax_management_host_parameters()
self.module_params.update(self.get_powermax_srdf_pair_parameters())
# initialize the ansible module
self.module = AnsibleModule(
argument_spec=self.module_params,
supports_check_mode=False
)
# result is a dictionary that contains changed status, srdf_link
# and job details
self.result = {
"changed": False,
"SRDF_link_details": {},
"Job_details": {}}
if HAS_PYU4V is False:
self.module.fail_json(msg="Ansible modules for PowerMax require "
"the PyU4V python library to be "
"installed. Please install the library "
"before using these modules.")
if PYU4V_VERSION_CHECK is not None:
self.module.fail_json(msg=PYU4V_VERSION_CHECK)
LOG.error(PYU4V_VERSION_CHECK)
universion_details = utils.universion_check(
self.module.params['universion'])
LOG.info("universion_details: {0}".format(universion_details))
if not universion_details['is_valid_universion']:
self.module.fail_json(msg=universion_details['user_message'])
self.u4v_conn = utils.get_U4V_connection(
self.module.params, application_type=APPLICATION_TYPE)
self.replication = self.u4v_conn.replication
LOG.info('Got PyU4V instance for replication on PowerMax ')
self.idempotency_dict = {
'Synchronized': ['Establish', 'Resume'],
'Consistent': ['Establish', 'Resume'],
'Suspended': ['Suspend', 'Failover'],
'Failed Over': ['Suspend', 'Failover'],
'SyncInProg': ['Establish', 'Resume'],
}
self.idempotency_dict_metro = {
'Suspended': ['Suspend'],
'SyncInProg': ['Establish'],
'ActiveActive': ['Establish'],
'ActiveBias': ['Establish']
}
def get_powermax_srdf_pair_parameters(self):
return dict(
sg_name=dict(required=False, type='str'),
remote_serial_no=dict(required=False, type='str'),
state=dict(required=True, type='str', choices=['present',
'absent']),
srdf_state=dict(required=False, type='str', choices=['Establish',
'Resume',
'Restore',
'Suspend',
'Swap',
'Split',
'Failback',
'Failover',
'Setbias']),
srdf_mode=dict(required=False, type='str', choices=['Active',
'Adaptive Copy',
'Synchronous',
'Asynchronous']),
rdfg_no=dict(type='int', required=False, default=None),
wait_for_completion=dict(type='bool', required=False, default=False),
new_rdf_group=dict(type='bool', required=False, default=False),
witness=dict(type='bool', required=False, default=None),
job_id=dict(type='str', required=False, default=None))
def get_srdf_link(self, sg_name):
'''
Get details of a given srdf_link
'''
rdfg_number = self.module.params['rdfg_no']
if not rdfg_number:
rdfg_list = self.replication.get_storagegroup_srdfg_list(sg_name)
if len(rdfg_list) == 0:
error_msg = 'No RDF group exists for the given storage group'
LOG.info(error_msg)
return None
elif len(rdfg_list) > 1:
error_msg = ("Multiple RDF groups exists for the given storage"
" group. Please specify RDF number")
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
else:
rdfg_number = rdfg_list[0]
try:
# check for Concurrent/star configuration,
if self.module.params['remote_serial_no']:
remote_serial_no = self.module.params['remote_serial_no']
try:
rdfg_details = self.replication.get_rdf_group(rdfg_number)
if rdfg_details['remoteSymmetrix'] != remote_serial_no:
error_msg = (
"Remote array for the RDF group number {0} does"
" not match with the given Remote array {1}. Please"
" specify RDF group you want to use. Also note, Ansible"
" modules v1.1 do not support Concurrent SRDF"
" configurations.".format(
rdfg_number, remote_serial_no))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
except Exception as e:
error_msg = (
"Got error {0} while getting RDF group details for "
"rdfg number {1}" .format(str(e), rdfg_number))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
LOG.info(
"Getting srdf details for storage group {0} with rdfg number"
"{1}".format(
sg_name, rdfg_number))
srdf_linkFromGet = self.replication.get_storagegroup_srdf_details(
storagegroup_id=sg_name, rdfg_num=rdfg_number)
if srdf_linkFromGet:
LOG.info('SRDF link details fetched are: {0}'.format(
srdf_linkFromGet))
return srdf_linkFromGet
except Exception as e:
LOG.error(
"Got error {0} while getting SRDF details for storage group "
"{1} with rdfg number {2}" .format(
str(e), sg_name, rdfg_number))
return None
def create_srdf_link(self):
'''
Create srdf_link for given storagegroup_id group and remote array
'''
sg_name = self.module.params['sg_name']
remote_serial_no = self.module.params['remote_serial_no']
srdf_mode = self.module.params['srdf_mode']
if (remote_serial_no is None or srdf_mode is None):
error_msg = (
"Mandatory parameters not found. Required parameters "
"for creating an SRDF link are remote array serial number "
"and SRDF mode")
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
try:
establish_flag = self._compute_required_establish_flag(
self.module.params['srdf_state'])
rdfg_number = self.module.params['rdfg_no']
forceNewRdfGroup = self.module.params['new_rdf_group']
async_flag = not(self.module.params['wait_for_completion'])
witness = self.module.params['witness']
if witness is False:
errorMsg = ("Create SRDF link operation failed as Ansible"
" modules v1.1 does not allow creation of SRDF"
" links using Bias for resiliency.")
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
msg = (
"Creating srdf_link with parameters:sg_name={0}, "
"remote_serial_no={1}, srdfmode={2}, establish_flag={3}, "
"rdfgroup_no={4}, new_rdf_group={5}, async_flag={6}")
LOG.info(
msg.format(
sg_name,
remote_serial_no,
srdf_mode,
establish_flag,
rdfg_number,
forceNewRdfGroup,
async_flag))
resp = self.replication.create_storagegroup_srdf_pairings(
storagegroup_id=sg_name,
remote_sid=remote_serial_no,
srdfmode=srdf_mode,
establish=establish_flag,
forceNewRdfGroup=forceNewRdfGroup,
rdfg_number=rdfg_number,
_async=async_flag)
LOG.info('Response from create SRDF link call {0}'.format(resp))
if async_flag:
self.result['Job_details'] = resp
self.result['SRDF_link_details'] = None
else:
self.result['SRDF_link_details'] = resp
self.result['Job_details'] = None
return True
except Exception as e:
errorMsg = 'Create srdf_link for sg {0} failed with error {1}'.format(
sg_name, str(e))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def _compute_required_establish_flag(self, srdf_state):
if (srdf_state is None or srdf_state == 'Suspend'):
return False
elif srdf_state == 'Establish':
return True
else:
errorMsg = (
"Creation of SRDF link failed. Allowed states while "
"creating SRDF link are only Establish or Suspend. Got {0}".format(srdf_state))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def modify_srdf_mode(self, srdf_mode):
async_flag = not(self.module.params['wait_for_completion'])
srdf_link = self.result['SRDF_link_details']
if srdf_mode == 'Adaptive Copy':
srdf_mode = 'AdaptiveCopyDisk'
try:
resp = self.replication.modify_storagegroup_srdf(
storagegroup_id=srdf_link['storageGroupName'],
rdfg=srdf_link['rdfGroupNumber'],
action='SetMode',
options={
'setMode': {
'mode': srdf_mode}},
_async=async_flag)
if async_flag:
self.result['Job_details'] = resp
self.result['SRDF_link_details'] = None
else:
self.result['SRDF_link_details'] = resp
self.result['Job_details'] = None
return True
except Exception as e:
errorMsg = ("Modifying SRDF mode of srdf_link from {0} to {1} for "
"SG {2} failed with error {3}".format(
srdf_link['modes'][0], srdf_mode,
srdf_link['storageGroupName'], str(e)))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def modify_srdf_state(self, action):
modify_body = {}
async_flag = not(self.module.params['wait_for_completion'])
srdf_link = self.result['SRDF_link_details']
modify_body['storagegroup_id'] = srdf_link['storageGroupName']
modify_body['rdfg'] = srdf_link['rdfGroupNumber']
modify_body['action'] = action
modify_body['_async'] = async_flag
if self.module.params['witness'] is not None:
if srdf_link['modes'][0] != 'Active':
errorMsg = ("witness flag can not be used for non-Metro "
"configurations.")
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
elif action not in ['Establish', 'Restore', 'Suspend']:
errorMsg = ("witness flag can be used only for 3 actions:"
" Establish, Restore and Suspend")
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
else:
modify_body['options'] = {
action.lower(): {
'metroBias': not(self.module.params['witness'])}}
try:
LOG.info('The modify_body is {0}:'.format(modify_body))
resp = self.replication.modify_storagegroup_srdf(**modify_body)
if async_flag:
self.result['Job_details'] = resp
self.result['SRDF_link_details'] = None
else:
self.result['SRDF_link_details'] = resp
self.result['Job_details'] = None
return True
except Exception as e:
errorMsg = ("Modifying SRDF state of srdf_link for storage group "
"{0} failed with error {1}".format(
srdf_link['storageGroupName'], str(e)))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def _check_for_SRDF_state_modification(self, new_operation):
srdf_link = self.result['SRDF_link_details']
current_state = srdf_link['states'][0]
changed = False
if (srdf_link['modes'][0] == 'Active' and
current_state in self.idempotency_dict_metro and
new_operation in self.idempotency_dict_metro[current_state]
):
LOG.info('Modification of SRDF state not required')
changed = False
elif (srdf_link['modes'][0] != 'Active' and
current_state in self.idempotency_dict and
new_operation in self.idempotency_dict[current_state]):
LOG.info('Modification of SRDF state not required')
changed = False
else:
LOG.info('Modifying SRDF state from {0} to {1}'.format(
current_state, new_operation))
changed = self.modify_srdf_state(new_operation)
return changed
def delete_srdf_link(self):
'''
Delete srdf_link from system
'''
srdf_link = self.result['SRDF_link_details']
try:
self.replication.delete_storagegroup_srdf(
srdf_link['storageGroupName'], int(
srdf_link['rdfGroupNumber']))
self.result['SRDF_link_details'] = {}
return True
except Exception as e:
errorMsg = ('Delete srdf_link {0} failed with error {1}'.format(
srdf_link['storageGroupName'], str(e)))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def get_job_details(self, job_id):
try:
self.result['Job_details'] = self.u4v_conn.common.get_job_by_id(
job_id)
except Exception as e:
errorMsg = (
'Get Job details for job_id {0} failed with error {1}'.format(
job_id, str(e)))
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
def perform_module_operation(self):
'''
Perform different actions on srdf_link based on user parameter
chosen in playbook
'''
state = self.module.params['state']
sg_name = self.module.params['sg_name']
srdf_mode = self.module.params['srdf_mode']
srdf_state = self.module.params['srdf_state']
job_id = self.module.params['job_id']
changed = False
if (job_id and sg_name) or (not job_id and not sg_name):
errorMsg = 'Please specify either job ID or SG name in one Ansible task'
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
if job_id:
if state == 'present':
LOG.info('Geting details of the Job {0}'.format(job_id))
self.get_job_details(job_id)
else:
errorMsg = 'Set state=present for getting Job status'
LOG.error(errorMsg)
self.module.fail_json(msg=errorMsg)
else:
srdf_link = self.get_srdf_link(sg_name)
self.result['SRDF_link_details'] = srdf_link
if state == 'present' and not self.result['SRDF_link_details']:
changed = self.create_srdf_link()
elif state == 'present' and self.result['SRDF_link_details']:
if (srdf_mode !=
self.result['SRDF_link_details']['modes'][0] and srdf_mode):
LOG.info('Modifying SRDF mode from {0} to {1}'.format(
self.result['SRDF_link_details']['modes'][0], srdf_mode))
changed = self.modify_srdf_mode(srdf_mode) or changed
if srdf_state is not None:
changed = self._check_for_SRDF_state_modification(
srdf_state) or changed
elif state == 'absent' and self.result['SRDF_link_details']:
LOG.info('Deleting srdf_link with SG {0} '.format(sg_name))
changed = self.delete_srdf_link() or changed
# Update the module's final state
LOG.info('changed {0}'.format(changed))
self.result['changed'] = changed
self.module.exit_json(**self.result)
def main():
''' Create PowerMax_srdf object and perform action on it
based on user input from playbook'''
obj = PowerMax_SRDF()
obj.perform_module_operation()
if __name__ == '__main__':
main()
| StarcoderdataPython |
230052 | ''' Import all modules in this package '''
from .credits_screen import *
from .end_screen import *
from .pause_screen import *
from .start_screen import *
from .game_screen import *
| StarcoderdataPython |
9680082 | import argparse
import os
import glob
import random
import re
import librosa
import pandas as pd
from tqdm import tqdm
SPACES_PATTERN = re.compile('[\t\r\n\s0-9]+')
PUNCTUATION = re.compile('[!"#$%&\'()*+,-./:;<=>?@\]\[\\^_`{|}~]')
def get_duration(filename):
audio, sr = librosa.load(filename)
return librosa.get_duration(audio, sr)
def clean_text(transcript):
return PUNCTUATION.sub('', transcript)
def process_common_voice(path, tsv_file):
df = pd.read_csv(os.path.join(path, tsv_file), sep='\t')
output = []
for i, d in df.iterrows():
clip_path = os.path.join(path, os.path.join('clips', d['path']))
transcript = clean_text(d['sentence'].lower()).strip()
if len(SPACES_PATTERN.sub('', transcript)) == 0:
print(f'Skipping CV {clip_path} from {tsv_file}')
continue
output.append((f'{clip_path}.wav', transcript))
return output
def process_alcaim(alcaim_path, random_seed, max_test_people=20, max_test_utterances=200, compute_duration=False):
print('Processing alcaim')
folders = [os.path.join(alcaim_path, f.path) for f in os.scandir(alcaim_path) if f.is_dir()]
_random = random.Random(random_seed)
_random.shuffle(folders)
test_folders = folders[:max_test_people]
train, test = [], []
train_duration = 0
test_duration = 0
for folder in tqdm(folders, total=len(folders)):
is_eval_folder = folder in test_folders
test_utterances = []
for transcript_path in tqdm(glob.glob(f'{folder}/*.txt')):
with open(transcript_path) as f:
transcript = f.read().lower().strip()
audio_filename = transcript_path.replace('.txt', '.wav')
duration = 0
if compute_duration:
duration = get_duration(audio_filename)
if is_eval_folder and len(test_utterances) < max_test_utterances:
test_utterances.append((audio_filename, transcript))
test_duration += duration
continue
train.append((audio_filename, transcript))
train_duration += train_duration
test += test_utterances
return train, test, train_duration, test_duration
def process_generic(generic_path, compute_duration=False):
print('Processing generic')
folders = [os.path.join(generic_path, f.path) for f in os.scandir(generic_path) if f.is_dir()]
data = []
duration = 0
for folder in tqdm(folders, total=len(folders)):
for transcript_path in glob.glob(f'{folder}/*.txt'):
audio_filename = transcript_path.replace('.txt', '.wav')
with open(transcript_path) as f:
transcript = f.read().lower().strip()
data.append((audio_filename, transcript))
if compute_duration:
duration += get_duration(audio_filename)
return data, duration
def process_sid(sid_path, compute_duration=False):
print('Processing SID')
folders = [os.path.join(sid_path, f.path) for f in os.scandir(sid_path) if f.is_dir()]
data = []
duration = 0
for folder in tqdm(folders, total=len(folders)):
prompts = {}
with open(f'{folder}/prompts.txt') as f:
for l in f:
parts = l.strip().split('=')
idx = int(parts[0])
transcript = clean_text(' '.join(parts[1:]).lower())
if len(SPACES_PATTERN.sub('', transcript)) == 0:
continue
prompts[idx] = transcript
files = sorted(glob.glob(f'{folder}/*.wav'))
for i, audio_filename in enumerate(files):
transcript = prompts.get(i + 1)
if transcript is None:
print(f'Sid: Missing | empty {audio_filename}')
continue
data.append((audio_filename, transcript))
if compute_duration:
duration += get_duration(audio_filename)
return data, duration
def process_voxforge(voxforge_path, compute_duration):
print('Processing VoxForge')
folders = [os.path.join(voxforge_path, f.path) for f in os.scandir(voxforge_path) if f.is_dir()]
train = []
duration = 0
for folder in tqdm(folders, total=len(folders)):
has_etc = os.path.exists(os.path.join(folder, 'etc'))
prompt_file = os.path.join(folder, f'{"etc/" if has_etc else ""}PROMPTS')
prompts = {}
path_prefix = f'{folder}/{"wav/" if has_etc else ""}'
with open(prompt_file) as f:
for l in f:
parts = l.strip().split(' ')
file_index = parts[0].split('/')[-1]
transcript = ' '.join(parts[1:]).lower()
if len(SPACES_PATTERN.sub('', transcript)) == 0:
continue
prompts[f'{path_prefix}{file_index}.wav'] = ' '.join(parts[1:]).lower()
for audio_filename in glob.glob(f'{path_prefix}/*.wav'):
transcript = prompts.get(audio_filename)
if transcript is None:
print(f'Voxforge: Missing | empty {audio_filename}')
continue
train.append((audio_filename, transcript))
if compute_duration:
duration += get_duration(audio_filename)
return train, duration
def process_coral(coral_path, compute_duration):
print('Processing C-ORAL')
folders = [os.path.join(coral_path, f.path) for f in os.scandir(coral_path) if f.is_dir()]
data = []
duration = 0
for folder in tqdm(folders, total=len(folders)):
for transcript_path in glob.glob(f'{folder}/*.txt'):
audio_filename = transcript_path.replace('.txt', '.wav')
with open(transcript_path) as f:
transcript = clean_text(f.read().lower().strip())
data.append((audio_filename, transcript))
if compute_duration:
duration += get_duration(audio_filename)
return data, duration
def write_output_file(path, files):
output = ['PATH\tDURATION\tTRANSCRIPT']
output += ['\t'.join([file[0], '0', file[1]]) for file in files]
with open(path, 'w') as f:
f.write('\n'.join(output))
def write_lm_file(path, files):
output = []
for audio, transcript in tqdm(files, total=len(files)):
with open(transcript) as f:
output.append(f.read().strip())
with open(path, 'w') as f:
f.write('\n'.join(output))
def generate_datasets(alcaim_path, sid_path, voxforge_path, lapsbm_val_path, common_voice_path, random_seed, output_train, output_eval,
output_test, compute_duration, max_train, max_eval, coral_path):
train, eval, test = [], [], []
train_duration = 0
eval_duration = 0
test_duration = 0
if alcaim_path:
_train, _test, _train_duration, _test_duration = process_alcaim(alcaim_path, random_seed,
compute_duration=compute_duration)
train += _train
test += _test
train_duration += _train_duration
test_duration += _test_duration
if sid_path:
_train, _train_duration = process_sid(sid_path, compute_duration=compute_duration)
train += _train
train_duration += _train_duration
if lapsbm_val_path:
_eval, _eval_duration = process_generic(lapsbm_val_path, compute_duration=compute_duration)
eval += _eval
eval_duration += eval_duration
if voxforge_path:
_train, _train_duration = process_voxforge(voxforge_path, compute_duration=compute_duration)
train += _train
train_duration += _train_duration
if common_voice_path:
train += process_common_voice(common_voice_path, 'train.tsv')
train += process_common_voice(common_voice_path, 'dev.tsv')
test += process_common_voice(common_voice_path, 'test.tsv')
if coral_path:
_train, _train_duration = process_coral(coral_path, compute_duration)
train += _train
print(f'Total {len(train)} train files, eval {len(eval)}, {len(test)} test files')
if max_train > 0:
train = train[:max_train]
if max_eval > 0:
eval = eval[:max_eval]
write_output_file(output_train, train)
write_output_file(output_eval, eval)
write_output_file(output_test, test)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate datasets split")
parser.add_argument('--alcaim_path', type=str, help="CETUC dataset path")
parser.add_argument('--sid_path', type=str, help="SID dataset path")
parser.add_argument('--voxforge_path', type=str, help="SID dataset path")
parser.add_argument('--lapsbm_val_path', type=str, help="LapsBM val dataset path")
parser.add_argument('--common_voice_path', type=str, help="Common Voice dataset path")
parser.add_argument('--coral_path', type=str, help="C-ORAL dataset path")
parser.add_argument('--random_seed', type=int, default=42, help="Random seed")
parser.add_argument('--output_train', type=str, required=True, help='Output path file containing train files paths')
parser.add_argument('--output_eval', type=str, required=True, help='Output path file containing eval files paths')
parser.add_argument('--output_test', type=str, required=True, help='Output path file containing test files paths')
parser.add_argument('--compute_duration', action='store_true')
parser.add_argument('--max_train', type=int, default=-1, help='Max train files')
parser.add_argument('--max_eval', type=int, default=-1, help='Max eval files')
args = parser.parse_args()
kwargs = vars(args)
print('-' * 20)
print('Generating datasets with args: ')
for arg in vars(args):
print(f'{arg}: {getattr(args, arg)}')
print('-' * 20)
generate_datasets(**kwargs)
| StarcoderdataPython |
288717 | <filename>hoverpy/__init__.py
from .hp import *
from .decorators import * | StarcoderdataPython |
1694146 |
from ensemble.component.vehicle import Vehicle
from ensemble.handler.symuvia.stream import SimulatorRequest
# TODO: Check constructor alternatives
req = SimulatorRequest()
v1 = Vehicle(req, vehid=0)
| StarcoderdataPython |
6616370 | # Generated by Django 3.2.5 on 2021-07-09 03:30
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('registration', '0004_auto_20210709_0303'),
]
operations = [
migrations.AddField(
model_name='person',
name='birth_date',
field=models.DateField(default=datetime.datetime(2021, 7, 9, 3, 30, 21, 783368)),
),
migrations.AlterField(
model_name='person',
name='first_surname',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='person',
name='second_surname',
field=models.CharField(max_length=255, null=True),
),
]
| StarcoderdataPython |
5196284 | class Solution:
#accepted
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = ['a','e', 'i','o', 'u']
vwls = []
for ch in s:
if ch.lower() in vowels:
vwls.append(ch)
vwls = vwls[::-1]
j = 0
lst = [x for x in s]
for i in range(len(lst)):
if lst[i].lower() in vowels:
lst[i] = vwls[j]
j += 1
return ''.join(lst)
| StarcoderdataPython |
1843087 | import enum
class Status(enum.Enum):
"""Status of the game.
0 : Not accepted
1 : Accepted (default)
2 : Archived (default)
3 : Deleted
"""
not_accepted = 0
accepted = 1
archived = 2
deleted = 3
class Presentation(enum.Enum):
"""
0 : Display mods for that game in a grid on mod.io
1 : Display mods for that game in a table on mod.io
"""
grid = 0
table = 1
class Submission(enum.Enum):
"""
0 : Mod uploads must occur via a tool created by the game developers
1 : Mod uploads can occur from anywhere, including the website and API
"""
restricted = 0
unrestricted = 1
class Curation(enum.Enum):
"""
0 : No curation: Mods are immediately available to play
1 : Paid curation: Mods are immediately available to play unless
they choose to receive donations. These mods must be accepted to be listed
2 : Full curation: All mods must be accepted by someone to be listed
"""
no_curation = 0
paid_curation = 1
full_curation = 2
class Community(enum.IntFlag):
"""
0 : All of the options below are disabled
1 : Discussion board enabled
2 : Guides and news enabled
? : Above options can be added together to create custom settings (e.g 3 :
discussion board, guides and news enabled)
"""
def __repr__(self):
cls = self.__class__
if self._name_ is not None:
return self._name_
members, uncovered = enum._decompose(cls, self._value_)
return '|'.join([str(m._name_ or m._value_) for m in members])
__str__ = __repr__
disabled = 0
discussion_boards = 1
guides_news = 2
class Revenue(enum.IntFlag):
"""
0 : All of the options below are disabled
1 : Allow mods to be sold
2 : Allow mods to receive donations
4 : Allow mods to be traded
8 : Allow mods to control supply and scarcity
? : Above options can be added together to create custom settings (e.g 3 :
allow mods to be sold and receive donations)
"""
def __repr__(self):
cls = self.__class__
if self._name_ is not None:
return self._name_
members, uncovered = enum._decompose(cls, self._value_)
return '|'.join([str(m._name_ or m._value_) for m in members])
__str__ = __repr__
disabled = 0
sold = 1
donations = 2
traded = 4
full_control = 5
class APIAccess(enum.IntFlag):
"""
0 : All of the options below are disabled
1 : Allow 3rd parties to access this games API endpoints
2 : Allow mods to be downloaded directly (if disabled all download URLs will contain a frequently
changing verification hash to stop unauthorized use)
? : Above options can be added together to create custom settings (e.g 3 :
allow 3rd parties to access this games API endpoints and allow mods to be
downloaded directly)
"""
def __repr__(self):
cls = self.__class__
if self._name_ is not None:
return self._name_
members, uncovered = enum._decompose(cls, self._value_)
return '|'.join([str(m._name_ or m._value_) for m in members])
__str__ = __repr__
disabled = 0
third_party = 1
direct_downloads = 2
class MaturityOptions(enum.Enum):
"""
0 : Don't allow mod developpers to decide whether or not to flag their mod as
containing mature content (if game devs wish to handle it)
1 : Allow mod developpers to decide whether or not to flag their mod as
containing mature content
"""
forbidden = 0
allowed = 1
class Maturity(enum.IntFlag):
"""
0 : None
1 : Alcohol
2 : Drugs
4 : Violence
8 : Explicit
? : Above options can be added together to create custom settings (e.g 3 :
alcohol and drugs present)
"""
def __repr__(self):
cls = self.__class__
if self._name_ is not None:
return self._name_
members, uncovered = enum._decompose(cls, self._value_)
return '|'.join([str(m._name_ or m._value_) for m in members])
__str__ = __repr__
none = 0
alcohol = 1
drugs = 2
violence = 4
explicit = 8
class VirusStatus(enum.Enum):
"""
0 : Not scanned
1 : Scan complete
2 : In progress
3 : Too large to scan
4 : File not found
5 : Error Scanning
"""
not_scanned = 0
scan_complete = 1
in_progress = 2
too_large = 3
not_found = 4
error = 5
class Visibility(enum.Enum):
"""
0 : Hidden
1 : Public
"""
hidden = 0
public = 1
class Level(enum.Enum):
moderator = 1
creator = 4
admin = 8
class Report(enum.Enum):
"""
0 : Generic Report
1 : DMCA Report
"""
generic = 0
dmca = 1
class EventType(enum.Enum):
"""An enum to render all event types easy to compare."""
file_changed = 0
available = 1
unavailable = 2
edited = 3
deleted = 4
team_changed = 5
team_join = 6
team_leave = 7
subscribe = 8
unsubscribe = 9
other = 10
class RatingType(enum.Enum):
"""The type of rating submitted (good, bad, neutral)"""
good = 1
neutral = 0
bad = -1 | StarcoderdataPython |
58284 | <reponame>Harshvartak/TSEC-Codestorm<gh_stars>1-10
from django import forms
from .models import *
from django.forms import ModelForm
from django.contrib.auth import get_user_model
from crispy_forms.layout import Layout, Field, ButtonHolder, Submit
from crispy_forms.helper import FormHelper
from django.contrib.auth import authenticate
class FarmerForm(ModelForm):
password = forms.CharField(label='Password', widget=forms.PasswordInput)
confirm_password = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model=Farmer
fields=[ 'username', 'email','dob','is_farmer','aadhar_no','pan_no','password', 'confirm_password']
labels = {
"dob": "Date Of Birth",
'is_farmer': 'Register as a farmer',
'aadhar_no':'Enter Your Aadhar card number',
'pan_no':'Enter your PAN number',
}
def clean(self):
cleaned_data = super(FarmerForm, self).clean()
password = cleaned_data.get("password")
confirm_password = cleaned_data.get("confirm_password")
if password != confirm_password:
raise forms.ValidationError("Passwords don't match")
def __init__(self,*args,**kwargs):
super(FarmerForm,self).__init__(*args,**kwargs)
self.fields['username'].widget.attrs['class'] = 'form-control'
self.fields['username'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['username'].widget.attrs['placeholder'] = 'Username'
self.fields['email'].widget.attrs['class'] = 'form-control'
self.fields['email'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['email'].widget.attrs['placeholder'] = 'E-mail'
self.fields['password'].widget.attrs['class'] = 'form-control'
self.fields['password'].widget.attrs['id'] = '<PASSWORD>'
self.fields['password'].widget.attrs['placeholder'] = 'Password'
self.fields['confirm_password'].widget.attrs['class'] = 'form-control'
self.fields['confirm_password'].widget.attrs['id'] = 'exampleInputPassword1'
self.fields['confirm_password'].widget.attrs['placeholder'] = 'Confirm Password'
self.fields['dob'].widget.attrs['class'] = 'form-control'
self.fields['dob'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['dob'].widget.attrs['placeholder'] = 'YYYY-MM-DD'
self.fields['aadhar_no'].widget.attrs['class'] = 'form-control'
self.fields['aadhar_no'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['aadhar_no'].widget.attrs['placeholder'] = 'Aadhar-no'
self.fields['pan_no'].widget.attrs['class'] = 'form-control'
self.fields['pan_no'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['pan_no'].widget.attrs['placeholder'] = 'Pan Number'
self.fields['is_farmer'].widget.attrs['class'] = 'form-check-input'
self.fields['is_farmer'].widget.attrs['id'] = 'inlineRadio2'
class BuyerForm(ModelForm):
password = forms.CharField(label='Password', widget=forms.PasswordInput)
confirm_password = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model=Buyer
fields=[ 'username', 'email','dob','is_farmer','aadhar_no','pan_no','password', 'confirm_password']
labels = {
"dob": "Date Of Birth",
'is_farmer': 'Register as a farmer',
'aadhar_no':'Enter Your Aadhar card number',
'pan_no':'Enter Your PAN card number',
}
def clean(self):
cleaned_data = super(BuyerForm, self).clean()
password = <PASSWORD>_data.get("password")
confirm_password = <PASSWORD>_data.get("confirm_password")
if password != confirm_password:
raise forms.ValidationError("Passwords don't match")
def __init__(self,*args,**kwargs):
super(BuyerForm,self).__init__(*args,**kwargs)
self.fields['username'].widget.attrs['class'] = 'form-control'
self.fields['username'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['username'].widget.attrs['placeholder'] = 'Username'
self.fields['password'].widget.attrs['class'] = 'form-control'
self.fields['password'].widget.attrs['id'] = '<PASSWORD>'
self.fields['password'].widget.attrs['placeholder'] = 'Password'
class AccountAuthenticationForm(forms.ModelForm):
username=forms.CharField(widget=forms.TextInput(attrs={'autofocus': True,'class':'form-control','id':'submit','placeholder':"Username"}))
password = forms.CharField(
label="Password",
widget=forms.PasswordInput(attrs={'id':'exampleInputPassword1','class':'form-control','placeholder':'Password'}),
)
class Meta:
model = Account
fields = ('username', 'password')
def clean(self):
print(self.cleaned_data['username'])
print(self.cleaned_data['password'])
if self.is_valid():
print("here")
username = self.cleaned_data['username']
password = <PASSWORD>_data['password']
print(password)
print(authenticate(password=password,username=username))
if not authenticate(password=password,username=username):
raise forms.ValidationError("Invalid login")
def __init__(self,*args,**kwargs):
super(AccountAuthenticationForm,self).__init__(*args,**kwargs)
self.fields['username'].widget.attrs['class'] = 'form-control'
self.fields['username'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['username'].widget.attrs['placeholder'] = 'Username'
self.fields['password'].widget.attrs['class'] = 'form-control'
self.fields['password'].widget.attrs['id'] = '<PASSWORD>'
self.fields['password'].widget.attrs['placeholder'] = 'Password'
PRODUCT_QUANTITY_CHOICES = [(i, str(i)) for i in range(1, 21)]
class CartAddProductForm(forms.Form):
quantity = forms.TypedChoiceField(choices=PRODUCT_QUANTITY_CHOICES, coerce=int)
update = forms.BooleanField(required=False, initial=False, widget=forms.HiddenInput)
def __init__(self,*args,**kwargs):
super(CartAddProductForm,self).__init__(*args,**kwargs)
self.fields['quantity'].widget.attrs['class'] = 'btn btn-warning'
self.fields['update'].widget.attrs['class'] = 'btn btn-warning'
class CropForm(ModelForm):
class Meta:
model = Crops
fields= [ 'name', 'c_type','price','photo','quantity']
labels = {
'name':'Name of Your crop',
'c_type':'Variety of your crop',
'price':'Price per quintal you want to sell',
'quantity':'Quintals of Crop for sale',
'photo':'Image of the ready crop (Optional)',
}
def __init__(self,*args,**kwargs):
super(CropForm,self).__init__(*args,**kwargs)
self.fields['name'].widget.attrs['class'] = 'form-control'
self.fields['name'].widget.attrs['id'] = 'exampleInputEmail1'
self.fields['name'].widget.attrs['placeholder'] = 'Crop name'
self.fields['c_type'].widget.attrs['class'] = 'form-control'
self.fields['c_type'].widget.attrs['id'] = 'exampleInputPassword1'
self.fields['c_type'].widget.attrs['placeholder'] = 'Category'
self.fields['price'].widget.attrs['class'] = 'form-control'
self.fields['price'].widget.attrs['id'] = 'exampleInputPassword1'
self.fields['price'].widget.attrs['placeholder'] = 'Price'
self.fields['quantity'].widget.attrs['class'] = 'form-control'
self.fields['quantity'].widget.attrs['id'] = 'exampleInputPassword1'
self.fields['quantity'].widget.attrs['placeholder'] = 'Quantity'
# self.fields['photo'].widget.attrs['class'] = 'btn btn-primary'
class OrderCreateForm(forms.ModelForm):
class Meta:
model = Order
fields = ['first_name', 'last_name', 'email', 'address','postal_code', 'city']
def __init__(self,*args,**kwargs):
super(OrderCreateForm,self).__init__(*args,**kwargs)
self.fields['first_name'].widget.attrs['class'] = 'form-control'
self.fields['first_name'].widget.attrs['placeholder'] = '<NAME>'
self.fields['last_name'].widget.attrs['class'] = 'form-control'
self.fields['last_name'].widget.attrs['placeholder'] = '<NAME>'
self.fields['email'].widget.attrs['class'] = 'form-control'
self.fields['email'].widget.attrs['placeholder'] = 'Email'
self.fields['address'].widget.attrs['class'] = 'form-control'
self.fields['address'].widget.attrs['placeholder'] = 'Address'
self.fields['postal_code'].widget.attrs['class'] = 'form-control'
self.fields['postal_code'].widget.attrs['placeholder'] = 'Postal_Code'
self.fields['city'].widget.attrs['class'] = 'form-control'
self.fields['city'].widget.attrs['placeholder'] = 'City' | StarcoderdataPython |
12854535 | <reponame>Zhang-SJ930104/ymir
import os
from mir.scm.cmd import CmdScm
from mir.tools.code import MirCode
from mir.tools.errors import MirRuntimeError
def Scm(root_dir: str, scm_executable: str = None) -> CmdScm:
"""Returns SCM instance that corresponds to a repo at the specified
path.
Args:
root_dir (str): path to a root directory of the repo.
scm_excutable(str): "git".
Returns:
mir.scm.cmd.BaseScm: SCM instance.
"""
if scm_executable not in ["git"]:
raise MirRuntimeError(error_code=MirCode.RC_CMD_INVALID_ARGS,
error_message=f"args error: expected git, not {scm_executable}")
if not os.path.exists(root_dir):
os.makedirs(root_dir)
if not os.path.isdir(root_dir):
raise MirRuntimeError(error_code=MirCode.RC_CMD_INVALID_ARGS,
error_message=f"can not create dir: {root_dir}")
return CmdScm(root_dir, scm_executable)
| StarcoderdataPython |
4950128 | import string
import rsa
import base64
from urllib.parse import quote_plus, unquote
SIGN_TYPE = "SHA-256"
def order_data(payload):
lst = []
for key, value in payload.items():
lst.append("{}={}".format(key, value))
lst.sort()
order_payload = "&".join(lst)
return order_payload
def remove_order_data(payload):
lst = []
for key, value in payload.items():
if key == 'sign' or key == 'sign_type':
pass
else:
lst.append("{}={}".format(key, value))
lst.sort()
order_payload = "&".join(lst)
return order_payload
def sign(payload, private_key=None):
private_key = rsa.PrivateKey._load_pkcs1_pem(private_key)
sign = rsa.sign(payload.encode('utf-8'), private_key, SIGN_TYPE)
b64sing = base64.encodebytes(sign).decode("utf8").replace("\n", "")
return b64sing
def urlencode_data(payload, sign):
lst = []
for key, value in payload.items():
lst.append("{}={}".format(
key, quote_plus(value, encoding='utf-8')))
lst.sort()
order_payload = "&".join(lst)
if sign:
order_payload += "&sign=%s" % quote_plus(sign, encoding='utf-8')
return order_payload
def check_sign(payload, sign, public_key=None):
sign = base64.b64decode(sign)
pubkey = rsa.PublicKey.load_pkcs1_openssl_pem(public_key)
return rsa.verify(payload.encode('utf-8'), sign, pubkey)
def check_ali_sign(payload, sign, alipay_public_key=None):
sign = base64.b64decode(sign)
pubkey = rsa.PublicKey.load_pkcs1_openssl_pem(alipay_public_key)
res = False
try:
res = rsa.verify(payload.encode('utf-8'), sign, pubkey)
except Exception as e:
res = False
return res
| StarcoderdataPython |
11370141 | <reponame>KhronosGroup/COLLADA-CTS
# Copyright (c) 2012 The Khronos Group Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and /or associated documentation files (the "Materials "), to deal in the Materials without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Materials, and to permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
from Core.Gui.Grid.FTextRenderer import *
from Core.Logic.FJudgement import *
# used with an FExecutionGrid
class FJudgementRenderer(FTextRenderer):
def __init__(self):
FTextRenderer.__init__(self)
def Draw(self, grid, attr, dc, rect, row, col, isSelected):
# "judgement" is of type FJudgement.
judgement = grid.GetCellValue(row, col)
if (judgement == None):
FTextRenderer.Draw(self, grid, attr, dc, rect, row, col, isSelected)
return
# The result selects the background color
result = judgement.GetResult()
color = wx.Color(255, 230, 128)
if (result == FJudgement.PASSED): color = wx.Color(64, 255, 64)
elif (result == FJudgement.FAILED): color = wx.Color(255, 64, 64)
elif (result == FJudgement.NO_SCRIPT): color = wx.Color(218, 218, 218)
FTextRenderer.ColorDraw(self, dc, rect, color)
# Render the judgement log.
textArray = judgement.GetMessage().split("\n")
self.RenderText(grid, attr, dc, rect, row, col, isSelected,
len(textArray), textArray, None, None, None,
wx.Color(0, 0, 0))
def AddContext(self, grid, row, col, menu, position):
# "judgement" is of type FJudgement.
judgement = grid.GetCellValue(row, col)
if (judgement == None): return
# No context menu options for now.
def Clicked(self, grid, row, col, position):
# "judgement" is of type FJudgement.
judgement = grid.GetCellValue(row, col)
if (judgement == None): return
# Show the message in a standard message box.
dialog = wx.MessageDialog(grid, judgement.GetMessage(), grid.GetColLabelValue(col))
dialog.ShowModal()
# Not sure what to do with the statement below..
# (self.__GetToggleFunc(judgement[1], grid))(None)
| StarcoderdataPython |
12850271 | <gh_stars>0
def proceso(num, suma=0):
numero = []
for i in str(num):
exp = int(i) ** len(str(num))
numero.append(exp)
if len(numero) == len(str(num)):
total = sum(numero)
return num, total
numero.clear()
entrada = input()
datos = []
for i in range(int(entrada)):
entrada2 = input()
datos.append(entrada2)
for n in datos:
resul1, resul2 = proceso(int(n))
if resul1 == resul2:
print("Armstrong")
elif resul1 != resul2:
print("Not Armstrong")
| StarcoderdataPython |
6676178 | import os
import json
import random
import numpy as np
import pytest
import warnings
from pybullet_planning import INF
from pybullet_planning import load_pybullet, connect, wait_for_user, LockRenderer, has_gui, WorldSaver, HideOutput, \
reset_simulation, disconnect
from pybullet_planning import interpolate_poses, multiply, unit_pose, get_relative_pose
from pybullet_planning import interval_generator, sample_tool_ik
from pybullet_planning import Pose, Point, Euler, unit_pose
from pybullet_planning import joints_from_names, link_from_name, has_link, get_collision_fn, get_disabled_collisions, \
draw_pose, set_pose, set_joint_positions, dump_body, dump_world, get_body_body_disabled_collisions, create_obj, \
get_link_pose, clone_body
from pychoreo.process_model.cartesian_process import CartesianProcess, CartesianSubProcess
from pychoreo.process_model.trajectory import Trajectory, MotionTrajectory
from pychoreo.process_model.gen_fn import CartesianPoseGenFn
from pychoreo.utils.stream_utils import get_random_direction_generator, get_enumeration_pose_generator
from pychoreo.utils.parsing_utils import export_trajectory
from pychoreo.cartesian_planner.ladder_graph_interface import solve_ladder_graph_from_cartesian_process_list
from pychoreo.cartesian_planner.sparse_ladder_graph import SparseLadderGraph
import pychoreo_examples
from pychoreo_examples.extrusion.parsing import load_extrusion, create_elements_bodies, parse_saved_trajectory, \
parse_feasible_ee_maps, save_feasible_ee_maps
from pychoreo_examples.extrusion.visualization import set_extrusion_camera, draw_extrusion_sequence, display_trajectories
from pychoreo_examples.extrusion.stream import get_extrusion_ee_pose_compose_fn, get_ee_pose_enumerate_map_fn, \
build_extrusion_cartesian_process_sequence
from pychoreo_examples.extrusion.utils import max_valence_extrusion_direction_routing
from pychoreo_examples.extrusion.transition_planner import solve_transition_between_extrusion_processes
import ikfast_kuka_kr6_r900
def load_extrusion_end_effector(ee_urdf_path):
with HideOutput():
ee = load_pybullet(ee_urdf_path)
return ee
def build_extrusion_cartesian_process(elements, node_points, robot, sample_ik_fn, ik_joint_names, base_link_name, extrusion_end_effector, tool_from_root=None, viz_step=False):
# load EE body, for debugging purpose
orig_ee_body = load_extrusion_end_effector(extrusion_end_effector)
ee_body = clone_body(orig_ee_body, visual=False)
ik_joints = joints_from_names(robot, ik_joint_names)
cart_traj_dict = {}
for element in elements:
process_name = 'extrusion-E{}'.format(element)
element = tuple(element)
n1, n2 = element
path_pts = [node_points[n1], node_points[n2]]
# an example for EE pose random generation, yaw (rotation around the direction axis) is set to 0
random_dir_gen = get_random_direction_generator()
ee_pose_gen_fn = CartesianPoseGenFn(random_dir_gen,
get_extrusion_ee_pose_compose_fn(interpolate_poses, approach_distance=0.01, pos_step_size=0.003),
base_path_pts=path_pts)
# build three sub-processes: approach, extrusion, retreat
extrusion_sub_procs = [CartesianSubProcess(sub_process_name='approach-extrude'),
CartesianSubProcess(sub_process_name='extrude'),
CartesianSubProcess(sub_process_name='extrude-retreat')]
cart_process = CartesianProcess(process_name=process_name,
robot=robot, ik_joint_names=ik_joint_names,
sub_process_list=extrusion_sub_procs,
ee_pose_gen_fn=ee_pose_gen_fn, sample_ik_fn=sample_ik_fn,
element_identifier=element)
ee_poses = cart_process.sample_ee_poses()
for sp_id, sp in enumerate(ee_poses):
# print('E #{} - sub process #{}'.format(element, sp_id))
for ee_p in sp:
yaw = random.uniform(-np.pi, np.pi)
ee_p = multiply(ee_p, Pose(euler=Euler(yaw=yaw)), tool_from_root)
set_pose(ee_body, ee_p)
if has_gui(): wait_for_user()
# this should raise an not implemented error since we haven't specify the collision function yet
for sp in cart_process.sub_process_list:
with pytest.raises(NotImplementedError):
conf = [0] * 6
sp.collision_fn(conf)
ik_sols = cart_process.get_ik_sols(ee_poses, check_collision=False)
for sp_id, sp_jt_sols in enumerate(ik_sols):
for jt_sols in sp_jt_sols:
for jts in jt_sols:
set_joint_positions(robot, ik_joints, jts)
if has_gui(): wait_for_user()
cart_traj_dict[element] = cart_process
return cart_traj_dict
@pytest.mark.extrusion
# @pytest.mark.parametrize('solve_method', [('sparse_ladder_graph')])
# @pytest.mark.parametrize('solve_method', [('ladder_graph')])
@pytest.mark.parametrize('solve_method', [('ladder_graph'), ('sparse_ladder_graph')])
def test_extrusion_ladder_graph(viewer, extrusion_problem_path, extrusion_robot_data, extrusion_end_effector, solve_method):
sample_time = 30
sparse_time_out = 5 # 900
roll_disc = 10 # 60
pitch_disc = 10
yaw_sample_size = 5 if solve_method == 'ladder_graph' else INF
linear_step_size = 0.003 # m
jt_res = 0.1 # 0.01
radius = 1e-6 # 0.002
shrink = 0.01 # m
RRT_RESTARTS = 5
RRT_ITERATIONS = 40
# * create robot and pb environment
(robot_urdf, base_link_name, tool_root_link_name, ee_link_name, ik_joint_names, disabled_self_collision_link_names), \
(workspace_urdf, workspace_robot_disabled_link_names) = extrusion_robot_data
connect(use_gui=viewer)
with HideOutput():
robot = load_pybullet(robot_urdf, fixed_base=True)
workspace = load_pybullet(workspace_urdf, fixed_base=True)
ik_fn = ikfast_kuka_kr6_r900.get_ik
ik_joints = joints_from_names(robot, ik_joint_names)
# * printout bodies in the scene information
# dump_world()
initial_conf = [0.08, -1.57, 1.74, 0.08, 0.17, -0.08]
set_joint_positions(robot, ik_joints, initial_conf)
# * get tool TCP from flange (ee_link) transformation
# this is for the robot that has end effector specified in its URDF
root_link = link_from_name(robot, tool_root_link_name)
tool_link = link_from_name(robot, ee_link_name)
tool_from_root = get_relative_pose(robot, root_link, tool_link)
if has_gui() :
tcp_pose = get_link_pose(robot, tool_link)
draw_pose(tcp_pose)
# wait_for_user()
# * specify ik fn wrapper
def get_sample_ik_fn(robot, ik_fn, ik_joint_names, base_link_name, tool_from_root=None):
def sample_ik_fn(world_from_tcp):
if tool_from_root:
world_from_tcp = multiply(world_from_tcp, tool_from_root)
return sample_tool_ik(ik_fn, robot, ik_joint_names, base_link_name, world_from_tcp, get_all=True)
return sample_ik_fn
# ik generation function stays the same for all cartesian processes
sample_ik_fn = get_sample_ik_fn(robot, ik_fn, ik_joint_names, base_link_name, tool_from_root)
# * get problem & pre-computed json file paths
file_path, seq_file_path, ee_maps_file_path, _, = extrusion_problem_path
# * load shape (get nodal positions)
elements, node_points, ground_nodes = load_extrusion(file_path)
assert all(isinstance(e, tuple) and len(e) == 2 for e in elements)
assert all(isinstance(pt, np.ndarray) and len(pt) == 3 for pt in node_points)
assert all(isinstance(gn, int) for gn in ground_nodes)
# * create element bodies (collision geometries)
with LockRenderer():
draw_pose(unit_pose(), length=1.)
element_bodies = dict(zip(elements,
create_elements_bodies(node_points, elements, radius=radius, shrink=shrink)))
assert all(isinstance(e_body, int) for e_body in element_bodies.values())
set_extrusion_camera(node_points)
# * create cartesian processes without a sequence being given, with random pose generators
# this is just a demonstration to help us do some sanity check with visualization
# with WorldSaver():
# _ = build_extrusion_cartesian_process(elements, node_points, robot, sample_ik_fn, ik_joint_names,
# base_link_name, extrusion_end_effector, tool_from_root, viz_step=True)
# * load precomputed sequence
parsed_ee_fmaps = None
ee_pose_map_fn = None
try:
parsed_ee_fmaps, ee_pose_map_fn = parse_feasible_ee_maps(ee_maps_file_path)
if parsed_ee_fmaps is None:
with open(seq_file_path, 'r') as f:
seq_data = json.loads(f.read())
print('Precomputed sequence loaded: ', seq_file_path)
element_sequence = [tuple(e) for e in seq_data['plan']]
else:
print('Precomputed sequence and feasible_ee_maps loaded.')
element_sequence = [tuple(e) for e in parsed_ee_fmaps.keys()]
except:
warnings.warn('Parsing precomputed sequence file failed - using default element sequence.')
element_sequence = [tuple(e) for e in elements]
assert all(isinstance(e, tuple) and len(e) == 2 for e in element_sequence)
# * compute reverse flags based on the precomputed sequence
reverse_flags = max_valence_extrusion_direction_routing(element_sequence, elements, node_points, ground_nodes)
# * construct ignored body-body links for collision checking
# in this case, including self-collision between links of the robot
disabled_self_collisions = get_disabled_collisions(robot, disabled_self_collision_link_names)
# and links between the robot and the workspace (e.g. robot_base_link to base_plate)
extra_disabled_collisions = get_body_body_disabled_collisions(robot, workspace, workspace_robot_disabled_link_names)
ee_body = load_extrusion_end_effector(extrusion_end_effector)
# * building collision function based on the given sequence
with LockRenderer(False):
cart_process_seq, e_fmaps = build_extrusion_cartesian_process_sequence(
element_sequence, element_bodies, node_points, ground_nodes,
robot, ik_joint_names, sample_ik_fn, ee_body,
ee_fmaps=parsed_ee_fmaps, ee_pose_map_fn=ee_pose_map_fn,
roll_disc=roll_disc, pitch_disc=pitch_disc, yaw_sample_size=yaw_sample_size, sample_time=sample_time,
linear_step_size=linear_step_size, tool_from_root=tool_from_root,
self_collisions=True, disabled_collisions=disabled_self_collisions,
obstacles=[workspace], extra_disabled_collisions=extra_disabled_collisions,
reverse_flags=reverse_flags, verbose=True)
here = os.path.dirname(__file__)
save_dir = os.path.join(here, 'test_data')
save_feasible_ee_maps(e_fmaps, roll_disc, pitch_disc, save_dir, overwrite=True, shape_file_path=file_path, indent=None)
# sanity check
exist_nonfeasible = False
for element, fmap in e_fmaps.items():
if sum(fmap) == 0:
exist_nonfeasible = True
print('E#{} feasible map empty, precomputed sequence should have a feasible ee pose range!'.format(element))
assert not exist_nonfeasible
assert isinstance(cart_process_seq, list)
assert all(isinstance(cp, CartesianProcess) for cp in cart_process_seq)
assert all([cart.element_identifier == e for cart, e in zip(cart_process_seq, element_sequence)])
# * draw the pruned EE direction set
if has_gui():
# just move the element bodies and ee_body away to clear the visualized scene
set_pose(ee_body, unit_pose())
if not ee_pose_map_fn:
ee_pose_map_fn = get_ee_pose_enumerate_map_fn(roll_disc, pitch_disc)
for e_body in element_bodies.values(): set_pose(e_body, unit_pose())
draw_extrusion_sequence(node_points, element_bodies, element_sequence, e_fmaps, ee_pose_map_fn=ee_pose_map_fn,
line_width=5, direction_len=0.005, time_step=INF)
viz_inspect = False
with LockRenderer(not viz_inspect):
if solve_method == 'ladder_graph':
print('\n'+'#' * 10)
print('Solving with the vanilla ladder graph search algorithm.')
cart_process_seq = solve_ladder_graph_from_cartesian_process_list(cart_process_seq,
verbose=True, warning_pause=False, viz_inspect=viz_inspect, check_collision=True)
elif solve_method == 'sparse_ladder_graph':
print('\n'+'#' * 10)
print('Solving with the sparse ladder graph search algorithm.')
sparse_graph = SparseLadderGraph(cart_process_seq)
sparse_graph.find_sparse_path(verbose=True, vert_timeout=sample_time, sparse_sample_timeout=sparse_time_out)
cart_process_seq = sparse_graph.extract_solution(verbose=True)
else:
raise ValueError('Invalid solve method!')
assert all(isinstance(cp, CartesianProcess) for cp in cart_process_seq)
# * extract trajectory from CartProcesses and add tags
print_trajs = [[] for _ in range(len(cart_process_seq))]
for cp_id, cp in enumerate(cart_process_seq):
for sp_id, sp in enumerate(cp.sub_process_list):
assert sp.trajectory, '{}-{} does not have a Cartesian plan found!'.format(cp, sp)
print_trajs[cp_id].append(sp.trajectory)
print(sp.trajectory)
full_trajs = print_trajs
# * transition motion planning between extrusions
return2idle = True
transition_traj = solve_transition_between_extrusion_processes(robot, ik_joints, print_trajs, element_bodies, initial_conf,
disabled_collisions=disabled_self_collisions,
obstacles=[workspace], return2idle=return2idle,
resolutions=[jt_res]*len(ik_joints),
restarts=RRT_RESTARTS, iterations=RRT_ITERATIONS)
assert all(isinstance(tt, MotionTrajectory) for tt in transition_traj)
if return2idle:
transition_traj[-1].tag = 'return2idle'
assert len(transition_traj)-1 == len(print_trajs)
else:
assert len(transition_traj) == len(print_trajs)
# * weave the Cartesian and transition processses together
for cp_id, print_trajs in enumerate(full_trajs):
print_trajs.insert(0, transition_traj[cp_id])
if return2idle:
full_trajs[-1].append(transition_traj[-1])
here = os.path.dirname(__file__)
save_dir = os.path.join(here, 'results')
export_trajectory(save_dir, full_trajs, ee_link_name, indent=None, shape_file_path=file_path, include_robot_data=False, include_link_path=True)
# * disconnect and close pybullet engine used for planning, visualizing trajectories will start a new one
reset_simulation()
disconnect()
# visualize plan
if viewer:
display_trajectories(robot_urdf, ik_joint_names, ee_link_name, node_points, ground_nodes, full_trajs,
workspace_urdf=workspace_urdf, animate=True, cart_time_step=0.02, tr_time_step=0.05)
@pytest.mark.extrusion_resolve_trans
def test_resolve_trans(viewer, extrusion_problem_path, extrusion_robot_data):
jt_res = 0.01 # 0.01
shrink = 0.00 # m
RRT_RESTARTS = 5
RRT_ITERATIONS = 40
# * create robot and pb environment
(robot_urdf, base_link_name, tool_root_link_name, ee_link_name, ik_joint_names, disabled_self_collision_link_names), \
(workspace_urdf, workspace_robot_disabled_link_names) = extrusion_robot_data
# * get problem & pre-computed json file paths
file_path, _, _, result_file_name = extrusion_problem_path
# * load shape (get nodal positions)
elements, node_points, ground_nodes = load_extrusion(file_path)
# * parse saved trajectory results
here = os.path.dirname(__file__)
save_file_path = os.path.join(here, 'results', result_file_name)
connect(use_gui=False)
with HideOutput():
robot = load_pybullet(robot_urdf, fixed_base=True)
workspace = load_pybullet(workspace_urdf, fixed_base=True)
ik_joints = joints_from_names(robot, ik_joint_names)
initial_conf = [0.08, -1.57, 1.74, 0.08, 0.17, -0.08]
disabled_self_collisions = get_disabled_collisions(robot, disabled_self_collision_link_names)
extra_disabled_collisions = get_body_body_disabled_collisions(robot, workspace, workspace_robot_disabled_link_names)
# * create element bodies (collision geometries)
with LockRenderer():
element_bodies = dict(zip(elements,
create_elements_bodies(node_points, elements, radius=0.002, shrink=shrink)))
assert all(isinstance(e_body, int) for e_body in element_bodies.values())
set_extrusion_camera(node_points)
# * parse saved trajectory
old_full_trajs = parse_saved_trajectory(save_file_path)
print_trajs = []
for cp_id, cp_trajs in enumerate(old_full_trajs):
cp_print_trajs = []
for trajectory in cp_trajs:
if not isinstance(trajectory, MotionTrajectory):
cp_print_trajs.append(trajectory)
print_trajs.append(cp_print_trajs)
full_trajs = print_trajs
# * transition motion planning between extrusions
return2idle = True
transition_traj = solve_transition_between_extrusion_processes(robot, ik_joints, print_trajs, element_bodies, initial_conf,
disabled_collisions=disabled_self_collisions,
obstacles=[workspace], return2idle=return2idle,
resolutions=[jt_res]*len(ik_joints),
restarts=RRT_RESTARTS, iterations=RRT_ITERATIONS)
assert all(isinstance(tt, MotionTrajectory) for tt in transition_traj)
if return2idle:
transition_traj[-1].tag = 'return2idle'
assert len(transition_traj)-1 == len(print_trajs)
else:
assert len(transition_traj) == len(print_trajs)
# * weave the Cartesian and transition processses together
for cp_id, ctrajs in enumerate(full_trajs):
ctrajs.insert(0, transition_traj[cp_id])
if return2idle:
full_trajs[-1].append(transition_traj[-1])
here = os.path.dirname(__file__)
save_dir = os.path.join(here, 'results')
export_trajectory(save_dir, full_trajs, ee_link_name,
indent=None, shape_file_path=file_path, include_robot_data=False, include_link_path=True)
# * disconnect and close pybullet engine used for planning, visualizing trajectories will start a new one
reset_simulation()
disconnect()
# visualize plan
if viewer:
display_trajectories(robot_urdf, ik_joint_names, ee_link_name, node_points, ground_nodes, full_trajs,
workspace_urdf=workspace_urdf, animate=True, cart_time_step=0.07, tr_time_step=0.01)
@pytest.mark.extrusion_viz
def test_parse_and_visualize_results(viewer, extrusion_problem_path, extrusion_robot_data, extrusion_end_effector):
# * create robot and pb environment
(robot_urdf, base_link_name, tool_root_link_name, ee_link_name, ik_joint_names, disabled_self_collision_link_names), \
(workspace_urdf, workspace_robot_disabled_link_names) = extrusion_robot_data
# * get problem & pre-computed json file paths
file_path, _, _, result_file_name = extrusion_problem_path
# * load shape (get nodal positions)
_, node_points, ground_nodes = load_extrusion(file_path)
# * parse saved trajectory results
here = os.path.dirname(__file__)
save_file_path = os.path.join(here, 'results', result_file_name)
# parse without connect
with pytest.warns(UserWarning, match='Pybullet environment not connected*'):
full_trajs = parse_saved_trajectory(save_file_path)
# parse with connect but robot body not added
connect(use_gui=False)
with pytest.raises(ValueError):
full_trajs = parse_saved_trajectory(save_file_path)
disconnect()
# parse with connect
connect(use_gui=False)
with HideOutput():
robot = load_pybullet(robot_urdf, fixed_base=True)
full_trajs = parse_saved_trajectory(save_file_path)
disconnect()
# visualize plan
if viewer:
display_trajectories(robot_urdf, ik_joint_names, ee_link_name, node_points, ground_nodes, full_trajs,
workspace_urdf=workspace_urdf, animate=True, cart_time_step=0.07, tr_time_step=0.01)
| StarcoderdataPython |
199153 | amount = 20
num=1
def setup():
size(640, 640)
stroke(0, 150, 255, 100)
def draw():
global num, amount
fill(0, 40)
rect(-1, -1, width+1, height+1)
maxX = map(mouseX, 0, width, 1, 250)
translate(width/2, height/2)
for i in range(0,360,amount):
x = sin(radians(i+num)) * maxX
y = cos(radians(i+num)) * maxX
x2 = sin(radians(i+amount-num)) * maxX
y2 = cos(radians(i+amount-num)) * maxX
noFill()
bezier(x, y, x-x2, y-y2, x2-x, y2-y, x2, y2)
bezier(x, y, x+x2, y+y2, x2+x, y2+y, x2, y2)
fill(0, 150, 255)
ellipse(x, y, 5, 5)
ellipse(x2, y2, 5, 5)
num += 0.5;
| StarcoderdataPython |
3254191 | <filename>apps/linux/vim/plugins/ultisnips/snippets_snippets.py
from talon import Context
ctx = Context()
ctx.matches = r"""
tag: user.vim_ultisnips
mode: user.snippets
mode: command
and code.language: snippets
"""
# spoken name -> snippet name
ultisnips_snippets = {
"snippet": "usnip",
"visual": "vis",
}
private_snippets = {}
ctx.lists["user.snippets"] = {**ultisnips_snippets, **private_snippets}
| StarcoderdataPython |
391593 | from collections import Counter
import json
import os.path as osp
import time
import torch
import numpy as np
from mmal.data_utils import CustomDataset
from mmal.dist_utils import gather
from mmal.uncertainty_utils import calculate_entropy_np, get_unique_indices
import mmcv
from mmcv.runner import get_dist_info
from mmdet.datasets import build_dataloader
# DEFAULT VARIABLES
MAX_IMAGE_HEIGHT = 640
MAX_IMAGE_WIDTH = 640
MIN_IMAGE_HEIGHT = 128
MIN_IMAGE_WIDTH = 128
def read_ann(args):
"""Retrieve information from the annotation file"""
ann_path = osp.join(args.train_dataset_dir, "annotations/train.json")
with open(ann_path, "r") as fin:
annotations = json.load(fin)
image_ratios = []
for image in annotations["images"]:
image_ratio = round(image["width"] / image["height"], 1) # round to 1 d.p.
image_ratios.append(image_ratio)
counter = Counter(image_ratios)
image_ratio, _ = counter.most_common(n=1)[0] # most common ratio
return {
"image_ratio": image_ratio,
"is_single_ratio": len(counter) == 1,
"num_classes": len(annotations["categories"])
}
def get_image_size(args, image_ratio, is_single_ratio):
"""Get the most appropriate image size for training"""
max_image_height = args.max_image_height
max_image_width = args.max_image_width
# If images of varied sizes, we better transform them into square images
if not is_single_ratio:
image_width = image_height = min(max_image_height, max_image_width)
# If single image ratio, we want to maintain this aspect ratio
else:
max_image_ratio = round(max_image_width / max_image_height, 1)
if image_ratio >= max_image_ratio:
image_width = max_image_width
image_height = round(image_width / image_ratio)
else:
image_height = max_image_height
image_width = round(image_height * image_ratio)
# Make image width and height divisible by 32
image_width = (image_width // 32) * 32
image_height = (image_height // 32) * 32
# Fall back to the max image resolution if failed to derive
# an appropriate image resolution
if image_width < MIN_IMAGE_WIDTH or image_height < MIN_IMAGE_HEIGHT:
image_width = max_image_width
image_height = max_image_height
return (image_height, image_width)
def custom_logic_pretraining(cfg, args, logger, orig_batch_size):
"""Set custom attributes for the config in-place"""
# Read the annotation file
ann_info = read_ann(args)
# Auto infer an appropriate image sizes
image_size = get_image_size(
args, ann_info["image_ratio"], ann_info["is_single_ratio"])
logger.info(
f"Chosen image size (h, w): {image_size}, is_single_ratio: "
f"{ann_info['is_single_ratio']}"
)
# Set image sizes
assert cfg.data.train.pipeline[2].type == "Resize"
cfg.data.train.pipeline[2].img_scale = image_size
assert cfg.data.val.pipeline[1].type == "MultiScaleFlipAug"
cfg.data.val.pipeline[1].img_scale = image_size
assert cfg.data.test.pipeline[1].type == "MultiScaleFlipAug"
cfg.data.test.pipeline[1].img_scale = image_size
# If single ratio, enable torch.cudnn.benchmark
# Otherwise disable it
# See https://discuss.pytorch.org/t/what-does-torch-backends-cudnn-benchmark-do/5936
torch.backends.cudnn.benchmark = ann_info["is_single_ratio"]
# Set data paths
img_dir = osp.join(args.train_dataset_dir, "images/default/")
train_ann_file = osp.join(args.train_dataset_dir, "annotations/train.json")
val_ann_file = osp.join(args.train_dataset_dir, "annotations/val.json")
assert osp.exists(img_dir) and osp.exists(train_ann_file) and \
osp.exists(val_ann_file)
cfg.data.train.ann_file = train_ann_file
cfg.data.train.img_prefix = img_dir
cfg.data.val.ann_file = val_ann_file
cfg.data.val.img_prefix = img_dir
cfg.data.test.ann_file = val_ann_file
cfg.data.test.img_prefix = img_dir
# Set number of classes
cfg.model.roi_head.bbox_head.num_classes = ann_info["num_classes"]
# Checkpoint
cfg.checkpoint_config.interval = int(2 ** 32) # don't save after every epoch
cfg.evaluation.save_best = "bbox_mAP" # save best model based on this metric
# Learning rate rescaling; note that in MMLab, all configs are
# to be used with 8 GPUs
if not args.no_autoscale_lr:
batch_size = cfg.data.samples_per_gpu
num_gpus = len(cfg.gpu_ids)
cfg.optimizer['lr'] = cfg.optimizer['lr'] * \
(batch_size * num_gpus) / (orig_batch_size * 8)
logger.info(
f"Learning rate has been rescaled to {cfg.optimizer['lr']}"
)
# Allow specifying backbone path
if args.backbone_path is not None:
assert cfg.model.backbone.init_cfg.checkpoint \
== "torchvision://resnet50"
cfg.model.backbone.init_cfg.checkpoint = args.backbone_path
# NMS class agnostic
assert cfg.model.test_cfg.rcnn.nms.type == "nms"
cfg.model.test_cfg.rcnn.nms.class_agnostic = True
def custom_logic_posttraining(runner, cfg, logger):
"""Post-training custom logic"""
pass
@torch.no_grad()
def _active_learning_inference(model, data, device):
# pred_bboxes: list[list[ndarray]]; probs: list[list[ndarray]]
# outer: sample-level; inner: class-level
pred_bboxes, probs = model.simple_test(
img=data['img'][0].to(device),
img_metas=data['img_metas'][0].data[0],
rescale=True,
return_probs=True,
)
# Unpack
assert len(pred_bboxes) == len(probs) == 1
pred_bboxes = pred_bboxes[0]
probs = probs[0]
# Calculate entropy
entropys = [
calculate_entropy_np(
prob, dim=1, normalized=True, assert_normalized=True,
) / np.log(len(model.CLASSES) + 1) # rescale; +1 for background class
for prob in probs
]
entropys_ = np.concatenate(entropys)
if len(entropys_) == 0: # no predictions
max_entropy = -1
else:
max_entropy = entropys_.max()
return {
"boxes": pred_bboxes,
"probs": probs,
"uncertainties": entropys,
"final_uncertainty": max_entropy,
}
@torch.no_grad()
def active_learning_inference(cfg, model, data_dir, patterns, logger):
# Prepare
model.eval()
device = next(model.parameters()).device
dataset = CustomDataset(cfg, data_dir, patterns, logger)
dataloader = build_dataloader(
dataset,
samples_per_gpu=1, # always 1
workers_per_gpu=cfg.data.workers_per_gpu,
num_gpus=len(cfg.gpu_ids),
dist=cfg.distributed,
shuffle=False,
persistent_workers=cfg.data.get('persistent_workers', False),
)
# Adapted from mmdet/apis/test.py
rank, world_size = get_dist_info()
if rank == 0:
prog_bar = mmcv.ProgressBar(len(dataset))
time.sleep(0.5) # This line can prevent deadlock problem in some cases.
# Inference
keys = []
preds = []
for data in dataloader:
assert len(data["img_metas"]) == len(data["img"]) == 1 # single-scale
path = data["img_metas"][0].data[0][0]["ori_filename"]
keys.append(osp.split(path)[1])
pred = _active_learning_inference(model, data, device)
preds.append(pred)
if rank == 0:
for _ in range(world_size):
prog_bar.update()
# Gather from multiple GPUs
keys, preds = gather(
[keys, preds],
device=device,
)
keys = sum(keys, [])
preds = sum(preds, [])
# Remove duplicates
unique_indices = get_unique_indices(keys, device=device)
keys = [keys[i] for i in unique_indices]
preds = [preds[i] for i in unique_indices]
assert len(keys) == len(preds) == len(dataset)
results = dict(zip(keys, preds))
return results
| StarcoderdataPython |
8073378 | <filename>weave/setup.py
#!/usr/bin/env python
from __future__ import absolute_import, print_function
from os.path import join
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('weave',parent_package,top_path)
config.add_data_dir('tests')
config.add_data_dir('scxx')
config.add_data_dir(join('blitz','blitz'))
return config
| StarcoderdataPython |
5120151 | <gh_stars>100-1000
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for transform_fn_io."""
import os
import apache_beam as beam
from apache_beam.testing import util as beam_test_util
import tensorflow as tf
import tensorflow_transform as tft
from tensorflow_transform.beam import tft_unit
from tensorflow_transform.beam.tft_beam_io import beam_metadata_io
from tensorflow_transform.beam.tft_beam_io import transform_fn_io
from tensorflow_transform.beam.tft_beam_io import test_metadata
from tensorflow_transform.tf_metadata import metadata_io
from tensorflow.python.lib.io import file_io # pylint: disable=g-direct-tensorflow-import
mock = tf.compat.v1.test.mock
# TODO(varshaan): Remove global variable and use a class attribute.
_COPY_TREE_TO_UNIQUE_TEMP_DIR_CALLED = False
class TransformFnIoTest(tft_unit.TransformTestCase):
def testReadTransformFn(self):
path = self.get_temp_dir()
# NOTE: we don't need to create or write to the transform_fn directory since
# ReadTransformFn never inspects this directory.
transform_fn_dir = os.path.join(
path, tft.TFTransformOutput.TRANSFORM_FN_DIR)
transformed_metadata_dir = os.path.join(
path, tft.TFTransformOutput.TRANSFORMED_METADATA_DIR)
metadata_io.write_metadata(test_metadata.COMPLETE_METADATA,
transformed_metadata_dir)
with beam.Pipeline() as pipeline:
saved_model_dir_pcoll, metadata = (
pipeline | transform_fn_io.ReadTransformFn(path))
beam_test_util.assert_that(
saved_model_dir_pcoll,
beam_test_util.equal_to([transform_fn_dir]),
label='AssertSavedModelDir')
# NOTE: metadata is currently read in a non-deferred manner.
self.assertEqual(metadata, test_metadata.COMPLETE_METADATA)
def testWriteTransformFn(self):
transform_output_dir = os.path.join(self.get_temp_dir(), 'output')
with beam.Pipeline() as pipeline:
# Create an empty directory for the source saved model dir.
saved_model_dir = os.path.join(self.get_temp_dir(), 'source')
file_io.recursive_create_dir(saved_model_dir)
saved_model_dir_pcoll = (
pipeline | 'CreateSavedModelDir' >> beam.Create([saved_model_dir]))
# Combine test metadata with a dict of PCollections resolving futures.
deferred_metadata = pipeline | 'CreateDeferredMetadata' >> beam.Create(
[test_metadata.COMPLETE_METADATA])
metadata = beam_metadata_io.BeamDatasetMetadata(
test_metadata.INCOMPLETE_METADATA, deferred_metadata, {})
_ = ((saved_model_dir_pcoll, metadata)
| transform_fn_io.WriteTransformFn(transform_output_dir))
# Test reading with TFTransformOutput
tf_transform_output = tft.TFTransformOutput(transform_output_dir)
metadata = tf_transform_output.transformed_metadata
self.assertEqual(metadata, test_metadata.COMPLETE_METADATA)
transform_fn_dir = tf_transform_output.transform_savedmodel_dir
self.assertTrue(file_io.file_exists(transform_fn_dir))
self.assertTrue(file_io.is_directory(transform_fn_dir))
def testWriteTransformFnIsIdempotent(self):
transform_output_dir = os.path.join(self.get_temp_dir(), 'output')
def mock_write_metadata_expand(unused_self, unused_metadata):
raise ArithmeticError('Some error')
with beam.Pipeline() as pipeline:
# Create an empty directory for the source saved model dir.
saved_model_dir = os.path.join(self.get_temp_dir(), 'source')
saved_model_dir_pcoll = (
pipeline | 'CreateSavedModelDir' >> beam.Create([saved_model_dir]))
with mock.patch.object(transform_fn_io.beam_metadata_io.WriteMetadata,
'expand', mock_write_metadata_expand):
with self.assertRaisesRegexp(ArithmeticError, 'Some error'):
_ = ((saved_model_dir_pcoll, object())
| transform_fn_io.WriteTransformFn(transform_output_dir))
self.assertFalse(file_io.file_exists(transform_output_dir))
def testWriteTransformFnIsRetryable(self):
tft.test_case.skip_if_external_environment(
'Retries are currently not available on this environment.')
original_copy_tree_to_unique_temp_dir = (
transform_fn_io._copy_tree_to_unique_temp_dir)
def mock_copy_tree_to_unique_temp_dir(source, base_temp_dir_path):
"""Mocks transform_fn_io._copy_tree to fail the first time it is called by this test, thus forcing a retry which should succeed."""
global _COPY_TREE_TO_UNIQUE_TEMP_DIR_CALLED
if not _COPY_TREE_TO_UNIQUE_TEMP_DIR_CALLED:
_COPY_TREE_TO_UNIQUE_TEMP_DIR_CALLED = True
original_copy_tree_to_unique_temp_dir(source, base_temp_dir_path)
raise ArithmeticError('Some error')
return original_copy_tree_to_unique_temp_dir(source, base_temp_dir_path)
with self._makeTestPipeline() as pipeline:
transform_output_dir = os.path.join(self.get_temp_dir(), 'output')
# Create an empty directory for the source saved model dir.
saved_model_dir = os.path.join(self.get_temp_dir(), 'source')
file_io.recursive_create_dir(saved_model_dir)
saved_model_path = os.path.join(saved_model_dir, 'saved_model')
with file_io.FileIO(saved_model_path, mode='w') as f:
f.write('some content')
saved_model_dir_pcoll = (
pipeline | 'CreateSavedModelDir' >> beam.Create([saved_model_dir]))
# Combine test metadata with a dict of PCollections resolving futures.
deferred_metadata = pipeline | 'CreateDeferredMetadata' >> beam.Create(
[test_metadata.COMPLETE_METADATA])
metadata = beam_metadata_io.BeamDatasetMetadata(
test_metadata.INCOMPLETE_METADATA, deferred_metadata, {})
with mock.patch.object(transform_fn_io, '_copy_tree_to_unique_temp_dir',
mock_copy_tree_to_unique_temp_dir):
_ = ((saved_model_dir_pcoll, metadata)
| transform_fn_io.WriteTransformFn(transform_output_dir))
# Test reading with TFTransformOutput
tf_transform_output = tft.TFTransformOutput(transform_output_dir)
metadata = tf_transform_output.transformed_metadata
self.assertEqual(metadata, test_metadata.COMPLETE_METADATA)
transform_fn_dir = tf_transform_output.transform_savedmodel_dir
self.assertTrue(file_io.file_exists(transform_fn_dir))
self.assertTrue(file_io.is_directory(transform_fn_dir))
# Check temp directory created by failed run was cleaned up.
self.assertEqual(2, len(file_io.list_directory(transform_output_dir)))
if __name__ == '__main__':
tf.test.main()
| StarcoderdataPython |
4988995 | <filename>AttentiveChrome/models.py
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from pdb import set_trace as stop
def batch_product(iput, mat2):
result = None
for i in range(iput.size()[0]):
op = torch.mm(iput[i], mat2)
op = op.unsqueeze(0)
if(result is None):
result = op
else:
result = torch.cat((result,op),0)
return result.squeeze(2)
class rec_attention(nn.Module):
# attention with bin context vector per HM and HM context vector
def __init__(self,hm,args):
super(rec_attention,self).__init__()
self.num_directions=2 if args.bidirectional else 1
if (hm==False):
self.bin_rep_size=args.bin_rnn_size*self.num_directions
else:
self.bin_rep_size=args.bin_rnn_size
self.bin_context_vector=nn.Parameter(torch.Tensor(self.bin_rep_size,1),requires_grad=True)
self.softmax=nn.Softmax(dim=1)
self.bin_context_vector.data.uniform_(-0.1, 0.1)
def forward(self,iput):
alpha=self.softmax(batch_product(iput,self.bin_context_vector))
[batch_size,source_length,bin_rep_size2]=iput.size()
repres=torch.bmm(alpha.unsqueeze(2).view(batch_size,-1,source_length),iput)
return repres,alpha
class recurrent_encoder(nn.Module):
# modular LSTM encoder
def __init__(self,n_bins,ip_bin_size,hm,args):
super(recurrent_encoder,self).__init__()
self.bin_rnn_size=args.bin_rnn_size
self.ipsize=ip_bin_size
self.seq_length=n_bins
self.num_directions=2 if args.bidirectional else 1
if (hm==False):
self.bin_rnn_size=args.bin_rnn_size
else:
self.bin_rnn_size=args.bin_rnn_size // 2
self.bin_rep_size=self.bin_rnn_size*self.num_directions
self.rnn=nn.LSTM(self.ipsize,self.bin_rnn_size,num_layers=args.num_layers,dropout=args.dropout,bidirectional=args.bidirectional)
self.bin_attention=rec_attention(hm,args)
def outputlength(self):
return self.bin_rep_size
def forward(self,single_hm,hidden=None):
bin_output, hidden = self.rnn(single_hm,hidden)
bin_output = bin_output.permute(1,0,2)
hm_rep,bin_alpha = self.bin_attention(bin_output)
return hm_rep,bin_alpha
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class att_chrome(nn.Module):
def __init__(self,args):
super(att_chrome,self).__init__()
self.n_hms=args.n_hms
self.n_bins=args.n_bins
self.ip_bin_size=1
self.rnn_hms=nn.ModuleList()
for i in range(self.n_hms):
self.rnn_hms.append(recurrent_encoder(self.n_bins,self.ip_bin_size,False,args))
self.opsize = self.rnn_hms[0].outputlength()
self.hm_level_rnn_1=recurrent_encoder(self.n_hms,self.opsize,True,args)
self.opsize2=self.hm_level_rnn_1.outputlength()
self.diffopsize=2*(self.opsize2)
self.fdiff1_1=nn.Linear(self.opsize2,1)
def forward(self,iput):
bin_a=None
level1_rep=None
[batch_size,_,_]=iput.size()
for hm,hm_encdr in enumerate(self.rnn_hms):
hmod=iput[:,:,hm].contiguous()
hmod=torch.t(hmod).unsqueeze(2)
op,a= hm_encdr(hmod)
if level1_rep is None:
level1_rep=op
bin_a=a
else:
level1_rep=torch.cat((level1_rep,op),1)
bin_a=torch.cat((bin_a,a),1)
level1_rep=level1_rep.permute(1,0,2)
final_rep_1,hm_level_attention_1=self.hm_level_rnn_1(level1_rep)
final_rep_1=final_rep_1.squeeze(1)
prediction_m=((self.fdiff1_1(final_rep_1)))
return torch.sigmoid(prediction_m)
args_dict = {'lr': 0.0001, 'model_name': 'attchrome', 'clip': 1, 'epochs': 2, 'batch_size': 10, 'dropout': 0.5, 'cell_1': 'Cell1', 'save_root': 'Results/Cell1', 'data_root': 'data/', 'gpuid': 0, 'gpu': 0, 'n_hms': 5, 'n_bins': 200, 'bin_rnn_size': 32, 'num_layers': 1, 'unidirectional': False, 'save_attention_maps': False, 'attentionfilename': 'beta_attention.txt', 'test_on_saved_model': False, 'bidirectional': True, 'dataset': 'Cell1'}
att_chrome_args = AttrDict(args_dict)
att_chrome_model = att_chrome(att_chrome_args)
| StarcoderdataPython |
1687434 | import sys
import dask.dataframe as dd
from handling_data.handling_data import HandlingData
from automl.mltrons_automl import MltronsAutoml
ddf = dd.read_csv("titanic.csv")
target_variable = 'Survived'
problem_type = 'Classification'
h = HandlingData(ddf, target_variable, problem_type)
train_pool, test_pool, order_of_features = h.init_data_handling()
print(order_of_features)
sys.exit()
auto_ml = MltronsAutoml(problem_type, target_variable, order_of_features)
auto_ml.fit(train_pool, test_pool)
### list of models
#auto_ml.models
### getting prediction
auto_ml.models[0].predict(test_pool)
###
| StarcoderdataPython |
9775046 | from django.test import TestCase
from app.numbercalc import add_number, subtract_number
class AddTest(TestCase):
def test_add_number(self):
"""this function will test the add_number function
and will add 2 numbers"""
self.assertEqual(add_number(5, 5), 10)
def test_subtract_number(self):
"""this function will test the subtract function
and will subtract one number from another"""
self.assertEqual(subtract_number(8, 5), 3)
| StarcoderdataPython |
9789607 | <filename>tests/unit_tests/data_steward/cdr_cleaner/cleaning_rules/update_family_history_qa_codes_test.py
import unittest
import constants.cdr_cleaner.clean_cdr as cdr_consts
from cdr_cleaner.cleaning_rules import update_family_history_qa_codes as family_history
class UpdateFamilyHistory(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('**************************************************************')
print(cls.__name__)
print('**************************************************************')
def setUp(self):
self.dataset_id = 'dataset_id'
self.project_id = 'project_id'
def test_get_update_family_history_qa_queries(self):
actual_dict = family_history.get_update_family_history_qa_queries(
self.project_id, self.dataset_id)
actual = actual_dict[0][cdr_consts.QUERY]
expected = family_history.UPDATE_FAMILY_HISTORY_QUERY.format(
project_id=self.project_id, dataset_id=self.dataset_id)
self.assertEqual(expected, actual)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.