hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bf8c599c273af6c193d1ac14fe5c4e51e760ab21
| 3,427
|
py
|
Python
|
operators/arithmetic.py
|
RafaelSilva7/img-processing
|
288e4a41f2c88bdff4f16c76a5aa13f703593e98
|
[
"MIT"
] | null | null | null |
operators/arithmetic.py
|
RafaelSilva7/img-processing
|
288e4a41f2c88bdff4f16c76a5aa13f703593e98
|
[
"MIT"
] | 3
|
2021-03-19T00:35:36.000Z
|
2022-01-13T01:14:17.000Z
|
operators/arithmetic.py
|
RafaelSilva7/img-processing
|
288e4a41f2c88bdff4f16c76a5aa13f703593e98
|
[
"MIT"
] | null | null | null |
from PIL import Image
def add(img1, img2, weight1=1, weight2=1):
""" """
print('\tadd(img1, img2, weight1=1, weight2=1):')
width = img1.size[0] if img1.size[0] >= img2.size[0] else img2.size[0]
height = img1.size[1] if img1.size[1] >= img2.size[1] else img2.size[1]
print(f'\tNew size of image: ({width},{height})\n')
img_output = Image.new('L', (width, height))
for j in range(height):
for i in range(width):
p1 = img1.getpixel((i,j)) if i < img1.size[0] and j < img1.size[1] else 0
p2 = img2.getpixel((i,j)) if i < img2.size[0] and j < img2.size[1] else 0
new = round((p1*weight1) + (p2*weight2))
if new < 0:
new = 0
elif new > 255:
new = 255
img_output.putpixel((i,j), new)
return img_output
def division(img1, img2, weight1=1, weight2=1):
""" """
print('\tdivison(img1, img2, weight1=1, weight2=1):')
width = img1.size[0] if img1.size[0] >= img2.size[0] else img2.size[0]
height = img1.size[1] if img1.size[1] >= img2.size[1] else img2.size[1]
print(f'\tNew size of image: ({width},{height})\n')
img_output = Image.new('L', (width, height))
for j in range(height):
for i in range(width):
p1 = img1.getpixel((i,j)) if i < img1.size[0] and j < img1.size[1] else 0
p2 = img2.getpixel((i,j)) if i < img2.size[0] and j < img2.size[1] else 0
new = round((p1*weight1) / (p2*weight2))
if new < 0:
new = 0
elif new > 255:
new = 255
img_output.putpixel((i,j), new)
return img_output
def multiply(img1, img2, weight1=1, weight2=1):
""" """
print(f'\tmultiply(img1, img2, weight1={weight1}, weight2={weight2}):')
width = img1.size[0] if img1.size[0] >= img2.size[0] else img2.size[0]
height = img1.size[1] if img1.size[1] >= img2.size[1] else img2.size[1]
print(f'\tNew size of image: ({width},{height})\n')
img_output = Image.new('L', (width, height))
for j in range(height):
for i in range(width):
p1 = img1.getpixel((i,j)) if i < img1.size[0] and j < img1.size[1] else 0
p2 = img2.getpixel((i,j)) if i < img2.size[0] and j < img2.size[1] else 0
new = round((p1*weight1) * (p2*weight2))
if new < 0:
new = 0
elif new > 255:
new = 255
img_output.putpixel((i,j), new)
return img_output
def subtract(img1, img2, weight1=1, weight2=1):
""" """
print(f'\tsubtract(img1, img2, weight1={weight1}, weight2={weight2}):')
width = img1.size[0] if img1.size[0] >= img2.size[0] else img2.size[0]
height = img1.size[1] if img1.size[1] >= img2.size[1] else img2.size[1]
print(f'\tNew size of image: ({width},{height})\n')
img_output = Image.new('L', (width, height))
for j in range(height):
for i in range(width):
p1 = img1.getpixel((i,j)) if i < img1.size[0] and j < img1.size[1] else 0
p2 = img2.getpixel((i,j)) if i < img2.size[0] and j < img2.size[1] else 0
new = round((p1*weight1) - (p2*weight2))
if new < 0:
new = 0
elif new > 255:
new = 255
img_output.putpixel((i,j), new)
return img_output
| 34.27
| 85
| 0.531369
| 530
| 3,427
| 3.413208
| 0.088679
| 0.106136
| 0.059701
| 0.05749
| 0.956882
| 0.956882
| 0.956882
| 0.92482
| 0.891653
| 0.891653
| 0
| 0.090414
| 0.302889
| 3,427
| 100
| 86
| 34.27
| 0.666806
| 0
| 0
| 0.811594
| 0
| 0
| 0.11
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.014493
| 0
| 0.130435
| 0.115942
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
784b97e077eacb57c7ec9c415d9a7c1764824942
| 70,554
|
py
|
Python
|
sdk/python/pulumi_proxmoxve/vm/virtual_machine.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_proxmoxve/vm/virtual_machine.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-11-23T07:11:46.000Z
|
2022-02-10T09:18:13.000Z
|
sdk/python/pulumi_proxmoxve/vm/virtual_machine.py
|
muhlba91/pulumi-proxmoxve
|
f17723c42b46c004be43ea0d39ff30ea176dd529
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['VirtualMachineArgs', 'VirtualMachine']
@pulumi.input_type
class VirtualMachineArgs:
def __init__(__self__, *,
node_name: pulumi.Input[str],
acpi: Optional[pulumi.Input[bool]] = None,
agent: Optional[pulumi.Input['VirtualMachineAgentArgs']] = None,
audio_device: Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']] = None,
bios: Optional[pulumi.Input[str]] = None,
cdrom: Optional[pulumi.Input['VirtualMachineCdromArgs']] = None,
clone: Optional[pulumi.Input['VirtualMachineCloneArgs']] = None,
cpu: Optional[pulumi.Input['VirtualMachineCpuArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]] = None,
initialization: Optional[pulumi.Input['VirtualMachineInitializationArgs']] = None,
keyboard_layout: Optional[pulumi.Input[str]] = None,
memory: Optional[pulumi.Input['VirtualMachineMemoryArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
network_devices: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]] = None,
on_boot: Optional[pulumi.Input[bool]] = None,
operating_system: Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']] = None,
pool_id: Optional[pulumi.Input[str]] = None,
reboot: Optional[pulumi.Input[bool]] = None,
serial_devices: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]] = None,
started: Optional[pulumi.Input[bool]] = None,
tablet_device: Optional[pulumi.Input[bool]] = None,
template: Optional[pulumi.Input[bool]] = None,
timeout_clone: Optional[pulumi.Input[int]] = None,
timeout_move_disk: Optional[pulumi.Input[int]] = None,
timeout_reboot: Optional[pulumi.Input[int]] = None,
timeout_shutdown_vm: Optional[pulumi.Input[int]] = None,
timeout_start_vm: Optional[pulumi.Input[int]] = None,
timeout_stop_vm: Optional[pulumi.Input[int]] = None,
vga: Optional[pulumi.Input['VirtualMachineVgaArgs']] = None,
vm_id: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a VirtualMachine resource.
:param pulumi.Input[str] node_name: The node name
:param pulumi.Input[bool] acpi: Whether to enable ACPI
:param pulumi.Input['VirtualMachineAgentArgs'] agent: The QEMU agent configuration
:param pulumi.Input['VirtualMachineAudioDeviceArgs'] audio_device: The audio devices
:param pulumi.Input[str] bios: The BIOS implementation
:param pulumi.Input['VirtualMachineCdromArgs'] cdrom: The CDROM drive
:param pulumi.Input['VirtualMachineCloneArgs'] clone: The cloning configuration
:param pulumi.Input['VirtualMachineCpuArgs'] cpu: The CPU allocation
:param pulumi.Input[str] description: The description
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]] disks: The disk devices
:param pulumi.Input['VirtualMachineInitializationArgs'] initialization: The cloud-init configuration
:param pulumi.Input[str] keyboard_layout: The keyboard layout
:param pulumi.Input['VirtualMachineMemoryArgs'] memory: The memory allocation
:param pulumi.Input[str] name: The name
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]] network_devices: The network devices
:param pulumi.Input[bool] on_boot: Start VM on Node boot
:param pulumi.Input['VirtualMachineOperatingSystemArgs'] operating_system: The operating system configuration
:param pulumi.Input[str] pool_id: The ID of the pool to assign the virtual machine to
:param pulumi.Input[bool] reboot: Wether to reboot vm after creation
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]] serial_devices: The serial devices
:param pulumi.Input[bool] started: Whether to start the virtual machine
:param pulumi.Input[bool] tablet_device: Whether to enable the USB tablet device
:param pulumi.Input[bool] template: Whether to create a template
:param pulumi.Input[int] timeout_clone: Clone VM timeout
:param pulumi.Input[int] timeout_move_disk: MoveDisk timeout
:param pulumi.Input[int] timeout_reboot: Reboot timeout
:param pulumi.Input[int] timeout_shutdown_vm: Shutdown timeout
:param pulumi.Input[int] timeout_start_vm: Start VM timeout
:param pulumi.Input[int] timeout_stop_vm: Stop VM timeout
:param pulumi.Input['VirtualMachineVgaArgs'] vga: The VGA configuration
:param pulumi.Input[int] vm_id: The VM identifier
"""
pulumi.set(__self__, "node_name", node_name)
if acpi is not None:
pulumi.set(__self__, "acpi", acpi)
if agent is not None:
pulumi.set(__self__, "agent", agent)
if audio_device is not None:
pulumi.set(__self__, "audio_device", audio_device)
if bios is not None:
pulumi.set(__self__, "bios", bios)
if cdrom is not None:
pulumi.set(__self__, "cdrom", cdrom)
if clone is not None:
pulumi.set(__self__, "clone", clone)
if cpu is not None:
pulumi.set(__self__, "cpu", cpu)
if description is not None:
pulumi.set(__self__, "description", description)
if disks is not None:
pulumi.set(__self__, "disks", disks)
if initialization is not None:
pulumi.set(__self__, "initialization", initialization)
if keyboard_layout is not None:
pulumi.set(__self__, "keyboard_layout", keyboard_layout)
if memory is not None:
pulumi.set(__self__, "memory", memory)
if name is not None:
pulumi.set(__self__, "name", name)
if network_devices is not None:
pulumi.set(__self__, "network_devices", network_devices)
if on_boot is not None:
pulumi.set(__self__, "on_boot", on_boot)
if operating_system is not None:
pulumi.set(__self__, "operating_system", operating_system)
if pool_id is not None:
pulumi.set(__self__, "pool_id", pool_id)
if reboot is not None:
pulumi.set(__self__, "reboot", reboot)
if serial_devices is not None:
pulumi.set(__self__, "serial_devices", serial_devices)
if started is not None:
pulumi.set(__self__, "started", started)
if tablet_device is not None:
pulumi.set(__self__, "tablet_device", tablet_device)
if template is not None:
pulumi.set(__self__, "template", template)
if timeout_clone is not None:
pulumi.set(__self__, "timeout_clone", timeout_clone)
if timeout_move_disk is not None:
pulumi.set(__self__, "timeout_move_disk", timeout_move_disk)
if timeout_reboot is not None:
pulumi.set(__self__, "timeout_reboot", timeout_reboot)
if timeout_shutdown_vm is not None:
pulumi.set(__self__, "timeout_shutdown_vm", timeout_shutdown_vm)
if timeout_start_vm is not None:
pulumi.set(__self__, "timeout_start_vm", timeout_start_vm)
if timeout_stop_vm is not None:
pulumi.set(__self__, "timeout_stop_vm", timeout_stop_vm)
if vga is not None:
pulumi.set(__self__, "vga", vga)
if vm_id is not None:
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter(name="nodeName")
def node_name(self) -> pulumi.Input[str]:
"""
The node name
"""
return pulumi.get(self, "node_name")
@node_name.setter
def node_name(self, value: pulumi.Input[str]):
pulumi.set(self, "node_name", value)
@property
@pulumi.getter
def acpi(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable ACPI
"""
return pulumi.get(self, "acpi")
@acpi.setter
def acpi(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "acpi", value)
@property
@pulumi.getter
def agent(self) -> Optional[pulumi.Input['VirtualMachineAgentArgs']]:
"""
The QEMU agent configuration
"""
return pulumi.get(self, "agent")
@agent.setter
def agent(self, value: Optional[pulumi.Input['VirtualMachineAgentArgs']]):
pulumi.set(self, "agent", value)
@property
@pulumi.getter(name="audioDevice")
def audio_device(self) -> Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']]:
"""
The audio devices
"""
return pulumi.get(self, "audio_device")
@audio_device.setter
def audio_device(self, value: Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']]):
pulumi.set(self, "audio_device", value)
@property
@pulumi.getter
def bios(self) -> Optional[pulumi.Input[str]]:
"""
The BIOS implementation
"""
return pulumi.get(self, "bios")
@bios.setter
def bios(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bios", value)
@property
@pulumi.getter
def cdrom(self) -> Optional[pulumi.Input['VirtualMachineCdromArgs']]:
"""
The CDROM drive
"""
return pulumi.get(self, "cdrom")
@cdrom.setter
def cdrom(self, value: Optional[pulumi.Input['VirtualMachineCdromArgs']]):
pulumi.set(self, "cdrom", value)
@property
@pulumi.getter
def clone(self) -> Optional[pulumi.Input['VirtualMachineCloneArgs']]:
"""
The cloning configuration
"""
return pulumi.get(self, "clone")
@clone.setter
def clone(self, value: Optional[pulumi.Input['VirtualMachineCloneArgs']]):
pulumi.set(self, "clone", value)
@property
@pulumi.getter
def cpu(self) -> Optional[pulumi.Input['VirtualMachineCpuArgs']]:
"""
The CPU allocation
"""
return pulumi.get(self, "cpu")
@cpu.setter
def cpu(self, value: Optional[pulumi.Input['VirtualMachineCpuArgs']]):
pulumi.set(self, "cpu", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]]:
"""
The disk devices
"""
return pulumi.get(self, "disks")
@disks.setter
def disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]]):
pulumi.set(self, "disks", value)
@property
@pulumi.getter
def initialization(self) -> Optional[pulumi.Input['VirtualMachineInitializationArgs']]:
"""
The cloud-init configuration
"""
return pulumi.get(self, "initialization")
@initialization.setter
def initialization(self, value: Optional[pulumi.Input['VirtualMachineInitializationArgs']]):
pulumi.set(self, "initialization", value)
@property
@pulumi.getter(name="keyboardLayout")
def keyboard_layout(self) -> Optional[pulumi.Input[str]]:
"""
The keyboard layout
"""
return pulumi.get(self, "keyboard_layout")
@keyboard_layout.setter
def keyboard_layout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "keyboard_layout", value)
@property
@pulumi.getter
def memory(self) -> Optional[pulumi.Input['VirtualMachineMemoryArgs']]:
"""
The memory allocation
"""
return pulumi.get(self, "memory")
@memory.setter
def memory(self, value: Optional[pulumi.Input['VirtualMachineMemoryArgs']]):
pulumi.set(self, "memory", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkDevices")
def network_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]]:
"""
The network devices
"""
return pulumi.get(self, "network_devices")
@network_devices.setter
def network_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]]):
pulumi.set(self, "network_devices", value)
@property
@pulumi.getter(name="onBoot")
def on_boot(self) -> Optional[pulumi.Input[bool]]:
"""
Start VM on Node boot
"""
return pulumi.get(self, "on_boot")
@on_boot.setter
def on_boot(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "on_boot", value)
@property
@pulumi.getter(name="operatingSystem")
def operating_system(self) -> Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']]:
"""
The operating system configuration
"""
return pulumi.get(self, "operating_system")
@operating_system.setter
def operating_system(self, value: Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']]):
pulumi.set(self, "operating_system", value)
@property
@pulumi.getter(name="poolId")
def pool_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the pool to assign the virtual machine to
"""
return pulumi.get(self, "pool_id")
@pool_id.setter
def pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pool_id", value)
@property
@pulumi.getter
def reboot(self) -> Optional[pulumi.Input[bool]]:
"""
Wether to reboot vm after creation
"""
return pulumi.get(self, "reboot")
@reboot.setter
def reboot(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reboot", value)
@property
@pulumi.getter(name="serialDevices")
def serial_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]]:
"""
The serial devices
"""
return pulumi.get(self, "serial_devices")
@serial_devices.setter
def serial_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]]):
pulumi.set(self, "serial_devices", value)
@property
@pulumi.getter
def started(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to start the virtual machine
"""
return pulumi.get(self, "started")
@started.setter
def started(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "started", value)
@property
@pulumi.getter(name="tabletDevice")
def tablet_device(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable the USB tablet device
"""
return pulumi.get(self, "tablet_device")
@tablet_device.setter
def tablet_device(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tablet_device", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a template
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="timeoutClone")
def timeout_clone(self) -> Optional[pulumi.Input[int]]:
"""
Clone VM timeout
"""
return pulumi.get(self, "timeout_clone")
@timeout_clone.setter
def timeout_clone(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_clone", value)
@property
@pulumi.getter(name="timeoutMoveDisk")
def timeout_move_disk(self) -> Optional[pulumi.Input[int]]:
"""
MoveDisk timeout
"""
return pulumi.get(self, "timeout_move_disk")
@timeout_move_disk.setter
def timeout_move_disk(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_move_disk", value)
@property
@pulumi.getter(name="timeoutReboot")
def timeout_reboot(self) -> Optional[pulumi.Input[int]]:
"""
Reboot timeout
"""
return pulumi.get(self, "timeout_reboot")
@timeout_reboot.setter
def timeout_reboot(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_reboot", value)
@property
@pulumi.getter(name="timeoutShutdownVm")
def timeout_shutdown_vm(self) -> Optional[pulumi.Input[int]]:
"""
Shutdown timeout
"""
return pulumi.get(self, "timeout_shutdown_vm")
@timeout_shutdown_vm.setter
def timeout_shutdown_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_shutdown_vm", value)
@property
@pulumi.getter(name="timeoutStartVm")
def timeout_start_vm(self) -> Optional[pulumi.Input[int]]:
"""
Start VM timeout
"""
return pulumi.get(self, "timeout_start_vm")
@timeout_start_vm.setter
def timeout_start_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_start_vm", value)
@property
@pulumi.getter(name="timeoutStopVm")
def timeout_stop_vm(self) -> Optional[pulumi.Input[int]]:
"""
Stop VM timeout
"""
return pulumi.get(self, "timeout_stop_vm")
@timeout_stop_vm.setter
def timeout_stop_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_stop_vm", value)
@property
@pulumi.getter
def vga(self) -> Optional[pulumi.Input['VirtualMachineVgaArgs']]:
"""
The VGA configuration
"""
return pulumi.get(self, "vga")
@vga.setter
def vga(self, value: Optional[pulumi.Input['VirtualMachineVgaArgs']]):
pulumi.set(self, "vga", value)
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> Optional[pulumi.Input[int]]:
"""
The VM identifier
"""
return pulumi.get(self, "vm_id")
@vm_id.setter
def vm_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vm_id", value)
@pulumi.input_type
class _VirtualMachineState:
def __init__(__self__, *,
acpi: Optional[pulumi.Input[bool]] = None,
agent: Optional[pulumi.Input['VirtualMachineAgentArgs']] = None,
audio_device: Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']] = None,
bios: Optional[pulumi.Input[str]] = None,
cdrom: Optional[pulumi.Input['VirtualMachineCdromArgs']] = None,
clone: Optional[pulumi.Input['VirtualMachineCloneArgs']] = None,
cpu: Optional[pulumi.Input['VirtualMachineCpuArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]] = None,
initialization: Optional[pulumi.Input['VirtualMachineInitializationArgs']] = None,
ipv4_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
ipv6_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
keyboard_layout: Optional[pulumi.Input[str]] = None,
mac_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
memory: Optional[pulumi.Input['VirtualMachineMemoryArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
network_devices: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]] = None,
network_interface_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
node_name: Optional[pulumi.Input[str]] = None,
on_boot: Optional[pulumi.Input[bool]] = None,
operating_system: Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']] = None,
pool_id: Optional[pulumi.Input[str]] = None,
reboot: Optional[pulumi.Input[bool]] = None,
serial_devices: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]] = None,
started: Optional[pulumi.Input[bool]] = None,
tablet_device: Optional[pulumi.Input[bool]] = None,
template: Optional[pulumi.Input[bool]] = None,
timeout_clone: Optional[pulumi.Input[int]] = None,
timeout_move_disk: Optional[pulumi.Input[int]] = None,
timeout_reboot: Optional[pulumi.Input[int]] = None,
timeout_shutdown_vm: Optional[pulumi.Input[int]] = None,
timeout_start_vm: Optional[pulumi.Input[int]] = None,
timeout_stop_vm: Optional[pulumi.Input[int]] = None,
vga: Optional[pulumi.Input['VirtualMachineVgaArgs']] = None,
vm_id: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering VirtualMachine resources.
:param pulumi.Input[bool] acpi: Whether to enable ACPI
:param pulumi.Input['VirtualMachineAgentArgs'] agent: The QEMU agent configuration
:param pulumi.Input['VirtualMachineAudioDeviceArgs'] audio_device: The audio devices
:param pulumi.Input[str] bios: The BIOS implementation
:param pulumi.Input['VirtualMachineCdromArgs'] cdrom: The CDROM drive
:param pulumi.Input['VirtualMachineCloneArgs'] clone: The cloning configuration
:param pulumi.Input['VirtualMachineCpuArgs'] cpu: The CPU allocation
:param pulumi.Input[str] description: The description
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]] disks: The disk devices
:param pulumi.Input['VirtualMachineInitializationArgs'] initialization: The cloud-init configuration
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] ipv4_addresses: The IPv4 addresses published by the QEMU agent
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] ipv6_addresses: The IPv6 addresses published by the QEMU agent
:param pulumi.Input[str] keyboard_layout: The keyboard layout
:param pulumi.Input[Sequence[pulumi.Input[str]]] mac_addresses: The MAC addresses for the network interfaces
:param pulumi.Input['VirtualMachineMemoryArgs'] memory: The memory allocation
:param pulumi.Input[str] name: The name
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]] network_devices: The network devices
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_interface_names: The network interface names published by the QEMU agent
:param pulumi.Input[str] node_name: The node name
:param pulumi.Input[bool] on_boot: Start VM on Node boot
:param pulumi.Input['VirtualMachineOperatingSystemArgs'] operating_system: The operating system configuration
:param pulumi.Input[str] pool_id: The ID of the pool to assign the virtual machine to
:param pulumi.Input[bool] reboot: Wether to reboot vm after creation
:param pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]] serial_devices: The serial devices
:param pulumi.Input[bool] started: Whether to start the virtual machine
:param pulumi.Input[bool] tablet_device: Whether to enable the USB tablet device
:param pulumi.Input[bool] template: Whether to create a template
:param pulumi.Input[int] timeout_clone: Clone VM timeout
:param pulumi.Input[int] timeout_move_disk: MoveDisk timeout
:param pulumi.Input[int] timeout_reboot: Reboot timeout
:param pulumi.Input[int] timeout_shutdown_vm: Shutdown timeout
:param pulumi.Input[int] timeout_start_vm: Start VM timeout
:param pulumi.Input[int] timeout_stop_vm: Stop VM timeout
:param pulumi.Input['VirtualMachineVgaArgs'] vga: The VGA configuration
:param pulumi.Input[int] vm_id: The VM identifier
"""
if acpi is not None:
pulumi.set(__self__, "acpi", acpi)
if agent is not None:
pulumi.set(__self__, "agent", agent)
if audio_device is not None:
pulumi.set(__self__, "audio_device", audio_device)
if bios is not None:
pulumi.set(__self__, "bios", bios)
if cdrom is not None:
pulumi.set(__self__, "cdrom", cdrom)
if clone is not None:
pulumi.set(__self__, "clone", clone)
if cpu is not None:
pulumi.set(__self__, "cpu", cpu)
if description is not None:
pulumi.set(__self__, "description", description)
if disks is not None:
pulumi.set(__self__, "disks", disks)
if initialization is not None:
pulumi.set(__self__, "initialization", initialization)
if ipv4_addresses is not None:
pulumi.set(__self__, "ipv4_addresses", ipv4_addresses)
if ipv6_addresses is not None:
pulumi.set(__self__, "ipv6_addresses", ipv6_addresses)
if keyboard_layout is not None:
pulumi.set(__self__, "keyboard_layout", keyboard_layout)
if mac_addresses is not None:
pulumi.set(__self__, "mac_addresses", mac_addresses)
if memory is not None:
pulumi.set(__self__, "memory", memory)
if name is not None:
pulumi.set(__self__, "name", name)
if network_devices is not None:
pulumi.set(__self__, "network_devices", network_devices)
if network_interface_names is not None:
pulumi.set(__self__, "network_interface_names", network_interface_names)
if node_name is not None:
pulumi.set(__self__, "node_name", node_name)
if on_boot is not None:
pulumi.set(__self__, "on_boot", on_boot)
if operating_system is not None:
pulumi.set(__self__, "operating_system", operating_system)
if pool_id is not None:
pulumi.set(__self__, "pool_id", pool_id)
if reboot is not None:
pulumi.set(__self__, "reboot", reboot)
if serial_devices is not None:
pulumi.set(__self__, "serial_devices", serial_devices)
if started is not None:
pulumi.set(__self__, "started", started)
if tablet_device is not None:
pulumi.set(__self__, "tablet_device", tablet_device)
if template is not None:
pulumi.set(__self__, "template", template)
if timeout_clone is not None:
pulumi.set(__self__, "timeout_clone", timeout_clone)
if timeout_move_disk is not None:
pulumi.set(__self__, "timeout_move_disk", timeout_move_disk)
if timeout_reboot is not None:
pulumi.set(__self__, "timeout_reboot", timeout_reboot)
if timeout_shutdown_vm is not None:
pulumi.set(__self__, "timeout_shutdown_vm", timeout_shutdown_vm)
if timeout_start_vm is not None:
pulumi.set(__self__, "timeout_start_vm", timeout_start_vm)
if timeout_stop_vm is not None:
pulumi.set(__self__, "timeout_stop_vm", timeout_stop_vm)
if vga is not None:
pulumi.set(__self__, "vga", vga)
if vm_id is not None:
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter
def acpi(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable ACPI
"""
return pulumi.get(self, "acpi")
@acpi.setter
def acpi(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "acpi", value)
@property
@pulumi.getter
def agent(self) -> Optional[pulumi.Input['VirtualMachineAgentArgs']]:
"""
The QEMU agent configuration
"""
return pulumi.get(self, "agent")
@agent.setter
def agent(self, value: Optional[pulumi.Input['VirtualMachineAgentArgs']]):
pulumi.set(self, "agent", value)
@property
@pulumi.getter(name="audioDevice")
def audio_device(self) -> Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']]:
"""
The audio devices
"""
return pulumi.get(self, "audio_device")
@audio_device.setter
def audio_device(self, value: Optional[pulumi.Input['VirtualMachineAudioDeviceArgs']]):
pulumi.set(self, "audio_device", value)
@property
@pulumi.getter
def bios(self) -> Optional[pulumi.Input[str]]:
"""
The BIOS implementation
"""
return pulumi.get(self, "bios")
@bios.setter
def bios(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bios", value)
@property
@pulumi.getter
def cdrom(self) -> Optional[pulumi.Input['VirtualMachineCdromArgs']]:
"""
The CDROM drive
"""
return pulumi.get(self, "cdrom")
@cdrom.setter
def cdrom(self, value: Optional[pulumi.Input['VirtualMachineCdromArgs']]):
pulumi.set(self, "cdrom", value)
@property
@pulumi.getter
def clone(self) -> Optional[pulumi.Input['VirtualMachineCloneArgs']]:
"""
The cloning configuration
"""
return pulumi.get(self, "clone")
@clone.setter
def clone(self, value: Optional[pulumi.Input['VirtualMachineCloneArgs']]):
pulumi.set(self, "clone", value)
@property
@pulumi.getter
def cpu(self) -> Optional[pulumi.Input['VirtualMachineCpuArgs']]:
"""
The CPU allocation
"""
return pulumi.get(self, "cpu")
@cpu.setter
def cpu(self, value: Optional[pulumi.Input['VirtualMachineCpuArgs']]):
pulumi.set(self, "cpu", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]]:
"""
The disk devices
"""
return pulumi.get(self, "disks")
@disks.setter
def disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineDiskArgs']]]]):
pulumi.set(self, "disks", value)
@property
@pulumi.getter
def initialization(self) -> Optional[pulumi.Input['VirtualMachineInitializationArgs']]:
"""
The cloud-init configuration
"""
return pulumi.get(self, "initialization")
@initialization.setter
def initialization(self, value: Optional[pulumi.Input['VirtualMachineInitializationArgs']]):
pulumi.set(self, "initialization", value)
@property
@pulumi.getter(name="ipv4Addresses")
def ipv4_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]:
"""
The IPv4 addresses published by the QEMU agent
"""
return pulumi.get(self, "ipv4_addresses")
@ipv4_addresses.setter
def ipv4_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]):
pulumi.set(self, "ipv4_addresses", value)
@property
@pulumi.getter(name="ipv6Addresses")
def ipv6_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]:
"""
The IPv6 addresses published by the QEMU agent
"""
return pulumi.get(self, "ipv6_addresses")
@ipv6_addresses.setter
def ipv6_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]]):
pulumi.set(self, "ipv6_addresses", value)
@property
@pulumi.getter(name="keyboardLayout")
def keyboard_layout(self) -> Optional[pulumi.Input[str]]:
"""
The keyboard layout
"""
return pulumi.get(self, "keyboard_layout")
@keyboard_layout.setter
def keyboard_layout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "keyboard_layout", value)
@property
@pulumi.getter(name="macAddresses")
def mac_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MAC addresses for the network interfaces
"""
return pulumi.get(self, "mac_addresses")
@mac_addresses.setter
def mac_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "mac_addresses", value)
@property
@pulumi.getter
def memory(self) -> Optional[pulumi.Input['VirtualMachineMemoryArgs']]:
"""
The memory allocation
"""
return pulumi.get(self, "memory")
@memory.setter
def memory(self, value: Optional[pulumi.Input['VirtualMachineMemoryArgs']]):
pulumi.set(self, "memory", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkDevices")
def network_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]]:
"""
The network devices
"""
return pulumi.get(self, "network_devices")
@network_devices.setter
def network_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineNetworkDeviceArgs']]]]):
pulumi.set(self, "network_devices", value)
@property
@pulumi.getter(name="networkInterfaceNames")
def network_interface_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The network interface names published by the QEMU agent
"""
return pulumi.get(self, "network_interface_names")
@network_interface_names.setter
def network_interface_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_interface_names", value)
@property
@pulumi.getter(name="nodeName")
def node_name(self) -> Optional[pulumi.Input[str]]:
"""
The node name
"""
return pulumi.get(self, "node_name")
@node_name.setter
def node_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_name", value)
@property
@pulumi.getter(name="onBoot")
def on_boot(self) -> Optional[pulumi.Input[bool]]:
"""
Start VM on Node boot
"""
return pulumi.get(self, "on_boot")
@on_boot.setter
def on_boot(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "on_boot", value)
@property
@pulumi.getter(name="operatingSystem")
def operating_system(self) -> Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']]:
"""
The operating system configuration
"""
return pulumi.get(self, "operating_system")
@operating_system.setter
def operating_system(self, value: Optional[pulumi.Input['VirtualMachineOperatingSystemArgs']]):
pulumi.set(self, "operating_system", value)
@property
@pulumi.getter(name="poolId")
def pool_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the pool to assign the virtual machine to
"""
return pulumi.get(self, "pool_id")
@pool_id.setter
def pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pool_id", value)
@property
@pulumi.getter
def reboot(self) -> Optional[pulumi.Input[bool]]:
"""
Wether to reboot vm after creation
"""
return pulumi.get(self, "reboot")
@reboot.setter
def reboot(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reboot", value)
@property
@pulumi.getter(name="serialDevices")
def serial_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]]:
"""
The serial devices
"""
return pulumi.get(self, "serial_devices")
@serial_devices.setter
def serial_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualMachineSerialDeviceArgs']]]]):
pulumi.set(self, "serial_devices", value)
@property
@pulumi.getter
def started(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to start the virtual machine
"""
return pulumi.get(self, "started")
@started.setter
def started(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "started", value)
@property
@pulumi.getter(name="tabletDevice")
def tablet_device(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable the USB tablet device
"""
return pulumi.get(self, "tablet_device")
@tablet_device.setter
def tablet_device(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tablet_device", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a template
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="timeoutClone")
def timeout_clone(self) -> Optional[pulumi.Input[int]]:
"""
Clone VM timeout
"""
return pulumi.get(self, "timeout_clone")
@timeout_clone.setter
def timeout_clone(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_clone", value)
@property
@pulumi.getter(name="timeoutMoveDisk")
def timeout_move_disk(self) -> Optional[pulumi.Input[int]]:
"""
MoveDisk timeout
"""
return pulumi.get(self, "timeout_move_disk")
@timeout_move_disk.setter
def timeout_move_disk(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_move_disk", value)
@property
@pulumi.getter(name="timeoutReboot")
def timeout_reboot(self) -> Optional[pulumi.Input[int]]:
"""
Reboot timeout
"""
return pulumi.get(self, "timeout_reboot")
@timeout_reboot.setter
def timeout_reboot(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_reboot", value)
@property
@pulumi.getter(name="timeoutShutdownVm")
def timeout_shutdown_vm(self) -> Optional[pulumi.Input[int]]:
"""
Shutdown timeout
"""
return pulumi.get(self, "timeout_shutdown_vm")
@timeout_shutdown_vm.setter
def timeout_shutdown_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_shutdown_vm", value)
@property
@pulumi.getter(name="timeoutStartVm")
def timeout_start_vm(self) -> Optional[pulumi.Input[int]]:
"""
Start VM timeout
"""
return pulumi.get(self, "timeout_start_vm")
@timeout_start_vm.setter
def timeout_start_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_start_vm", value)
@property
@pulumi.getter(name="timeoutStopVm")
def timeout_stop_vm(self) -> Optional[pulumi.Input[int]]:
"""
Stop VM timeout
"""
return pulumi.get(self, "timeout_stop_vm")
@timeout_stop_vm.setter
def timeout_stop_vm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_stop_vm", value)
@property
@pulumi.getter
def vga(self) -> Optional[pulumi.Input['VirtualMachineVgaArgs']]:
"""
The VGA configuration
"""
return pulumi.get(self, "vga")
@vga.setter
def vga(self, value: Optional[pulumi.Input['VirtualMachineVgaArgs']]):
pulumi.set(self, "vga", value)
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> Optional[pulumi.Input[int]]:
"""
The VM identifier
"""
return pulumi.get(self, "vm_id")
@vm_id.setter
def vm_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vm_id", value)
class VirtualMachine(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acpi: Optional[pulumi.Input[bool]] = None,
agent: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAgentArgs']]] = None,
audio_device: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAudioDeviceArgs']]] = None,
bios: Optional[pulumi.Input[str]] = None,
cdrom: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCdromArgs']]] = None,
clone: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCloneArgs']]] = None,
cpu: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCpuArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineDiskArgs']]]]] = None,
initialization: Optional[pulumi.Input[pulumi.InputType['VirtualMachineInitializationArgs']]] = None,
keyboard_layout: Optional[pulumi.Input[str]] = None,
memory: Optional[pulumi.Input[pulumi.InputType['VirtualMachineMemoryArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
network_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineNetworkDeviceArgs']]]]] = None,
node_name: Optional[pulumi.Input[str]] = None,
on_boot: Optional[pulumi.Input[bool]] = None,
operating_system: Optional[pulumi.Input[pulumi.InputType['VirtualMachineOperatingSystemArgs']]] = None,
pool_id: Optional[pulumi.Input[str]] = None,
reboot: Optional[pulumi.Input[bool]] = None,
serial_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineSerialDeviceArgs']]]]] = None,
started: Optional[pulumi.Input[bool]] = None,
tablet_device: Optional[pulumi.Input[bool]] = None,
template: Optional[pulumi.Input[bool]] = None,
timeout_clone: Optional[pulumi.Input[int]] = None,
timeout_move_disk: Optional[pulumi.Input[int]] = None,
timeout_reboot: Optional[pulumi.Input[int]] = None,
timeout_shutdown_vm: Optional[pulumi.Input[int]] = None,
timeout_start_vm: Optional[pulumi.Input[int]] = None,
timeout_stop_vm: Optional[pulumi.Input[int]] = None,
vga: Optional[pulumi.Input[pulumi.InputType['VirtualMachineVgaArgs']]] = None,
vm_id: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Create a VirtualMachine resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] acpi: Whether to enable ACPI
:param pulumi.Input[pulumi.InputType['VirtualMachineAgentArgs']] agent: The QEMU agent configuration
:param pulumi.Input[pulumi.InputType['VirtualMachineAudioDeviceArgs']] audio_device: The audio devices
:param pulumi.Input[str] bios: The BIOS implementation
:param pulumi.Input[pulumi.InputType['VirtualMachineCdromArgs']] cdrom: The CDROM drive
:param pulumi.Input[pulumi.InputType['VirtualMachineCloneArgs']] clone: The cloning configuration
:param pulumi.Input[pulumi.InputType['VirtualMachineCpuArgs']] cpu: The CPU allocation
:param pulumi.Input[str] description: The description
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineDiskArgs']]]] disks: The disk devices
:param pulumi.Input[pulumi.InputType['VirtualMachineInitializationArgs']] initialization: The cloud-init configuration
:param pulumi.Input[str] keyboard_layout: The keyboard layout
:param pulumi.Input[pulumi.InputType['VirtualMachineMemoryArgs']] memory: The memory allocation
:param pulumi.Input[str] name: The name
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineNetworkDeviceArgs']]]] network_devices: The network devices
:param pulumi.Input[str] node_name: The node name
:param pulumi.Input[bool] on_boot: Start VM on Node boot
:param pulumi.Input[pulumi.InputType['VirtualMachineOperatingSystemArgs']] operating_system: The operating system configuration
:param pulumi.Input[str] pool_id: The ID of the pool to assign the virtual machine to
:param pulumi.Input[bool] reboot: Wether to reboot vm after creation
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineSerialDeviceArgs']]]] serial_devices: The serial devices
:param pulumi.Input[bool] started: Whether to start the virtual machine
:param pulumi.Input[bool] tablet_device: Whether to enable the USB tablet device
:param pulumi.Input[bool] template: Whether to create a template
:param pulumi.Input[int] timeout_clone: Clone VM timeout
:param pulumi.Input[int] timeout_move_disk: MoveDisk timeout
:param pulumi.Input[int] timeout_reboot: Reboot timeout
:param pulumi.Input[int] timeout_shutdown_vm: Shutdown timeout
:param pulumi.Input[int] timeout_start_vm: Start VM timeout
:param pulumi.Input[int] timeout_stop_vm: Stop VM timeout
:param pulumi.Input[pulumi.InputType['VirtualMachineVgaArgs']] vga: The VGA configuration
:param pulumi.Input[int] vm_id: The VM identifier
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VirtualMachineArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a VirtualMachine resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param VirtualMachineArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VirtualMachineArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acpi: Optional[pulumi.Input[bool]] = None,
agent: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAgentArgs']]] = None,
audio_device: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAudioDeviceArgs']]] = None,
bios: Optional[pulumi.Input[str]] = None,
cdrom: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCdromArgs']]] = None,
clone: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCloneArgs']]] = None,
cpu: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCpuArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineDiskArgs']]]]] = None,
initialization: Optional[pulumi.Input[pulumi.InputType['VirtualMachineInitializationArgs']]] = None,
keyboard_layout: Optional[pulumi.Input[str]] = None,
memory: Optional[pulumi.Input[pulumi.InputType['VirtualMachineMemoryArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
network_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineNetworkDeviceArgs']]]]] = None,
node_name: Optional[pulumi.Input[str]] = None,
on_boot: Optional[pulumi.Input[bool]] = None,
operating_system: Optional[pulumi.Input[pulumi.InputType['VirtualMachineOperatingSystemArgs']]] = None,
pool_id: Optional[pulumi.Input[str]] = None,
reboot: Optional[pulumi.Input[bool]] = None,
serial_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineSerialDeviceArgs']]]]] = None,
started: Optional[pulumi.Input[bool]] = None,
tablet_device: Optional[pulumi.Input[bool]] = None,
template: Optional[pulumi.Input[bool]] = None,
timeout_clone: Optional[pulumi.Input[int]] = None,
timeout_move_disk: Optional[pulumi.Input[int]] = None,
timeout_reboot: Optional[pulumi.Input[int]] = None,
timeout_shutdown_vm: Optional[pulumi.Input[int]] = None,
timeout_start_vm: Optional[pulumi.Input[int]] = None,
timeout_stop_vm: Optional[pulumi.Input[int]] = None,
vga: Optional[pulumi.Input[pulumi.InputType['VirtualMachineVgaArgs']]] = None,
vm_id: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.plugin_download_url is None:
opts.plugin_download_url = _utilities.get_plugin_download_url()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VirtualMachineArgs.__new__(VirtualMachineArgs)
__props__.__dict__["acpi"] = acpi
__props__.__dict__["agent"] = agent
__props__.__dict__["audio_device"] = audio_device
__props__.__dict__["bios"] = bios
__props__.__dict__["cdrom"] = cdrom
__props__.__dict__["clone"] = clone
__props__.__dict__["cpu"] = cpu
__props__.__dict__["description"] = description
__props__.__dict__["disks"] = disks
__props__.__dict__["initialization"] = initialization
__props__.__dict__["keyboard_layout"] = keyboard_layout
__props__.__dict__["memory"] = memory
__props__.__dict__["name"] = name
__props__.__dict__["network_devices"] = network_devices
if node_name is None and not opts.urn:
raise TypeError("Missing required property 'node_name'")
__props__.__dict__["node_name"] = node_name
__props__.__dict__["on_boot"] = on_boot
__props__.__dict__["operating_system"] = operating_system
__props__.__dict__["pool_id"] = pool_id
__props__.__dict__["reboot"] = reboot
__props__.__dict__["serial_devices"] = serial_devices
__props__.__dict__["started"] = started
__props__.__dict__["tablet_device"] = tablet_device
__props__.__dict__["template"] = template
__props__.__dict__["timeout_clone"] = timeout_clone
__props__.__dict__["timeout_move_disk"] = timeout_move_disk
__props__.__dict__["timeout_reboot"] = timeout_reboot
__props__.__dict__["timeout_shutdown_vm"] = timeout_shutdown_vm
__props__.__dict__["timeout_start_vm"] = timeout_start_vm
__props__.__dict__["timeout_stop_vm"] = timeout_stop_vm
__props__.__dict__["vga"] = vga
__props__.__dict__["vm_id"] = vm_id
__props__.__dict__["ipv4_addresses"] = None
__props__.__dict__["ipv6_addresses"] = None
__props__.__dict__["mac_addresses"] = None
__props__.__dict__["network_interface_names"] = None
super(VirtualMachine, __self__).__init__(
'proxmoxve:VM/virtualMachine:VirtualMachine',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acpi: Optional[pulumi.Input[bool]] = None,
agent: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAgentArgs']]] = None,
audio_device: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAudioDeviceArgs']]] = None,
bios: Optional[pulumi.Input[str]] = None,
cdrom: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCdromArgs']]] = None,
clone: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCloneArgs']]] = None,
cpu: Optional[pulumi.Input[pulumi.InputType['VirtualMachineCpuArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineDiskArgs']]]]] = None,
initialization: Optional[pulumi.Input[pulumi.InputType['VirtualMachineInitializationArgs']]] = None,
ipv4_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
ipv6_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
keyboard_layout: Optional[pulumi.Input[str]] = None,
mac_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
memory: Optional[pulumi.Input[pulumi.InputType['VirtualMachineMemoryArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
network_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineNetworkDeviceArgs']]]]] = None,
network_interface_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
node_name: Optional[pulumi.Input[str]] = None,
on_boot: Optional[pulumi.Input[bool]] = None,
operating_system: Optional[pulumi.Input[pulumi.InputType['VirtualMachineOperatingSystemArgs']]] = None,
pool_id: Optional[pulumi.Input[str]] = None,
reboot: Optional[pulumi.Input[bool]] = None,
serial_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineSerialDeviceArgs']]]]] = None,
started: Optional[pulumi.Input[bool]] = None,
tablet_device: Optional[pulumi.Input[bool]] = None,
template: Optional[pulumi.Input[bool]] = None,
timeout_clone: Optional[pulumi.Input[int]] = None,
timeout_move_disk: Optional[pulumi.Input[int]] = None,
timeout_reboot: Optional[pulumi.Input[int]] = None,
timeout_shutdown_vm: Optional[pulumi.Input[int]] = None,
timeout_start_vm: Optional[pulumi.Input[int]] = None,
timeout_stop_vm: Optional[pulumi.Input[int]] = None,
vga: Optional[pulumi.Input[pulumi.InputType['VirtualMachineVgaArgs']]] = None,
vm_id: Optional[pulumi.Input[int]] = None) -> 'VirtualMachine':
"""
Get an existing VirtualMachine resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] acpi: Whether to enable ACPI
:param pulumi.Input[pulumi.InputType['VirtualMachineAgentArgs']] agent: The QEMU agent configuration
:param pulumi.Input[pulumi.InputType['VirtualMachineAudioDeviceArgs']] audio_device: The audio devices
:param pulumi.Input[str] bios: The BIOS implementation
:param pulumi.Input[pulumi.InputType['VirtualMachineCdromArgs']] cdrom: The CDROM drive
:param pulumi.Input[pulumi.InputType['VirtualMachineCloneArgs']] clone: The cloning configuration
:param pulumi.Input[pulumi.InputType['VirtualMachineCpuArgs']] cpu: The CPU allocation
:param pulumi.Input[str] description: The description
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineDiskArgs']]]] disks: The disk devices
:param pulumi.Input[pulumi.InputType['VirtualMachineInitializationArgs']] initialization: The cloud-init configuration
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] ipv4_addresses: The IPv4 addresses published by the QEMU agent
:param pulumi.Input[Sequence[pulumi.Input[Sequence[pulumi.Input[str]]]]] ipv6_addresses: The IPv6 addresses published by the QEMU agent
:param pulumi.Input[str] keyboard_layout: The keyboard layout
:param pulumi.Input[Sequence[pulumi.Input[str]]] mac_addresses: The MAC addresses for the network interfaces
:param pulumi.Input[pulumi.InputType['VirtualMachineMemoryArgs']] memory: The memory allocation
:param pulumi.Input[str] name: The name
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineNetworkDeviceArgs']]]] network_devices: The network devices
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_interface_names: The network interface names published by the QEMU agent
:param pulumi.Input[str] node_name: The node name
:param pulumi.Input[bool] on_boot: Start VM on Node boot
:param pulumi.Input[pulumi.InputType['VirtualMachineOperatingSystemArgs']] operating_system: The operating system configuration
:param pulumi.Input[str] pool_id: The ID of the pool to assign the virtual machine to
:param pulumi.Input[bool] reboot: Wether to reboot vm after creation
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualMachineSerialDeviceArgs']]]] serial_devices: The serial devices
:param pulumi.Input[bool] started: Whether to start the virtual machine
:param pulumi.Input[bool] tablet_device: Whether to enable the USB tablet device
:param pulumi.Input[bool] template: Whether to create a template
:param pulumi.Input[int] timeout_clone: Clone VM timeout
:param pulumi.Input[int] timeout_move_disk: MoveDisk timeout
:param pulumi.Input[int] timeout_reboot: Reboot timeout
:param pulumi.Input[int] timeout_shutdown_vm: Shutdown timeout
:param pulumi.Input[int] timeout_start_vm: Start VM timeout
:param pulumi.Input[int] timeout_stop_vm: Stop VM timeout
:param pulumi.Input[pulumi.InputType['VirtualMachineVgaArgs']] vga: The VGA configuration
:param pulumi.Input[int] vm_id: The VM identifier
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VirtualMachineState.__new__(_VirtualMachineState)
__props__.__dict__["acpi"] = acpi
__props__.__dict__["agent"] = agent
__props__.__dict__["audio_device"] = audio_device
__props__.__dict__["bios"] = bios
__props__.__dict__["cdrom"] = cdrom
__props__.__dict__["clone"] = clone
__props__.__dict__["cpu"] = cpu
__props__.__dict__["description"] = description
__props__.__dict__["disks"] = disks
__props__.__dict__["initialization"] = initialization
__props__.__dict__["ipv4_addresses"] = ipv4_addresses
__props__.__dict__["ipv6_addresses"] = ipv6_addresses
__props__.__dict__["keyboard_layout"] = keyboard_layout
__props__.__dict__["mac_addresses"] = mac_addresses
__props__.__dict__["memory"] = memory
__props__.__dict__["name"] = name
__props__.__dict__["network_devices"] = network_devices
__props__.__dict__["network_interface_names"] = network_interface_names
__props__.__dict__["node_name"] = node_name
__props__.__dict__["on_boot"] = on_boot
__props__.__dict__["operating_system"] = operating_system
__props__.__dict__["pool_id"] = pool_id
__props__.__dict__["reboot"] = reboot
__props__.__dict__["serial_devices"] = serial_devices
__props__.__dict__["started"] = started
__props__.__dict__["tablet_device"] = tablet_device
__props__.__dict__["template"] = template
__props__.__dict__["timeout_clone"] = timeout_clone
__props__.__dict__["timeout_move_disk"] = timeout_move_disk
__props__.__dict__["timeout_reboot"] = timeout_reboot
__props__.__dict__["timeout_shutdown_vm"] = timeout_shutdown_vm
__props__.__dict__["timeout_start_vm"] = timeout_start_vm
__props__.__dict__["timeout_stop_vm"] = timeout_stop_vm
__props__.__dict__["vga"] = vga
__props__.__dict__["vm_id"] = vm_id
return VirtualMachine(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def acpi(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable ACPI
"""
return pulumi.get(self, "acpi")
@property
@pulumi.getter
def agent(self) -> pulumi.Output[Optional['outputs.VirtualMachineAgent']]:
"""
The QEMU agent configuration
"""
return pulumi.get(self, "agent")
@property
@pulumi.getter(name="audioDevice")
def audio_device(self) -> pulumi.Output[Optional['outputs.VirtualMachineAudioDevice']]:
"""
The audio devices
"""
return pulumi.get(self, "audio_device")
@property
@pulumi.getter
def bios(self) -> pulumi.Output[Optional[str]]:
"""
The BIOS implementation
"""
return pulumi.get(self, "bios")
@property
@pulumi.getter
def cdrom(self) -> pulumi.Output[Optional['outputs.VirtualMachineCdrom']]:
"""
The CDROM drive
"""
return pulumi.get(self, "cdrom")
@property
@pulumi.getter
def clone(self) -> pulumi.Output[Optional['outputs.VirtualMachineClone']]:
"""
The cloning configuration
"""
return pulumi.get(self, "clone")
@property
@pulumi.getter
def cpu(self) -> pulumi.Output[Optional['outputs.VirtualMachineCpu']]:
"""
The CPU allocation
"""
return pulumi.get(self, "cpu")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def disks(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualMachineDisk']]]:
"""
The disk devices
"""
return pulumi.get(self, "disks")
@property
@pulumi.getter
def initialization(self) -> pulumi.Output[Optional['outputs.VirtualMachineInitialization']]:
"""
The cloud-init configuration
"""
return pulumi.get(self, "initialization")
@property
@pulumi.getter(name="ipv4Addresses")
def ipv4_addresses(self) -> pulumi.Output[Sequence[Sequence[str]]]:
"""
The IPv4 addresses published by the QEMU agent
"""
return pulumi.get(self, "ipv4_addresses")
@property
@pulumi.getter(name="ipv6Addresses")
def ipv6_addresses(self) -> pulumi.Output[Sequence[Sequence[str]]]:
"""
The IPv6 addresses published by the QEMU agent
"""
return pulumi.get(self, "ipv6_addresses")
@property
@pulumi.getter(name="keyboardLayout")
def keyboard_layout(self) -> pulumi.Output[Optional[str]]:
"""
The keyboard layout
"""
return pulumi.get(self, "keyboard_layout")
@property
@pulumi.getter(name="macAddresses")
def mac_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
The MAC addresses for the network interfaces
"""
return pulumi.get(self, "mac_addresses")
@property
@pulumi.getter
def memory(self) -> pulumi.Output[Optional['outputs.VirtualMachineMemory']]:
"""
The memory allocation
"""
return pulumi.get(self, "memory")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkDevices")
def network_devices(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualMachineNetworkDevice']]]:
"""
The network devices
"""
return pulumi.get(self, "network_devices")
@property
@pulumi.getter(name="networkInterfaceNames")
def network_interface_names(self) -> pulumi.Output[Sequence[str]]:
"""
The network interface names published by the QEMU agent
"""
return pulumi.get(self, "network_interface_names")
@property
@pulumi.getter(name="nodeName")
def node_name(self) -> pulumi.Output[str]:
"""
The node name
"""
return pulumi.get(self, "node_name")
@property
@pulumi.getter(name="onBoot")
def on_boot(self) -> pulumi.Output[Optional[bool]]:
"""
Start VM on Node boot
"""
return pulumi.get(self, "on_boot")
@property
@pulumi.getter(name="operatingSystem")
def operating_system(self) -> pulumi.Output[Optional['outputs.VirtualMachineOperatingSystem']]:
"""
The operating system configuration
"""
return pulumi.get(self, "operating_system")
@property
@pulumi.getter(name="poolId")
def pool_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the pool to assign the virtual machine to
"""
return pulumi.get(self, "pool_id")
@property
@pulumi.getter
def reboot(self) -> pulumi.Output[Optional[bool]]:
"""
Wether to reboot vm after creation
"""
return pulumi.get(self, "reboot")
@property
@pulumi.getter(name="serialDevices")
def serial_devices(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualMachineSerialDevice']]]:
"""
The serial devices
"""
return pulumi.get(self, "serial_devices")
@property
@pulumi.getter
def started(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to start the virtual machine
"""
return pulumi.get(self, "started")
@property
@pulumi.getter(name="tabletDevice")
def tablet_device(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable the USB tablet device
"""
return pulumi.get(self, "tablet_device")
@property
@pulumi.getter
def template(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to create a template
"""
return pulumi.get(self, "template")
@property
@pulumi.getter(name="timeoutClone")
def timeout_clone(self) -> pulumi.Output[Optional[int]]:
"""
Clone VM timeout
"""
return pulumi.get(self, "timeout_clone")
@property
@pulumi.getter(name="timeoutMoveDisk")
def timeout_move_disk(self) -> pulumi.Output[Optional[int]]:
"""
MoveDisk timeout
"""
return pulumi.get(self, "timeout_move_disk")
@property
@pulumi.getter(name="timeoutReboot")
def timeout_reboot(self) -> pulumi.Output[Optional[int]]:
"""
Reboot timeout
"""
return pulumi.get(self, "timeout_reboot")
@property
@pulumi.getter(name="timeoutShutdownVm")
def timeout_shutdown_vm(self) -> pulumi.Output[Optional[int]]:
"""
Shutdown timeout
"""
return pulumi.get(self, "timeout_shutdown_vm")
@property
@pulumi.getter(name="timeoutStartVm")
def timeout_start_vm(self) -> pulumi.Output[Optional[int]]:
"""
Start VM timeout
"""
return pulumi.get(self, "timeout_start_vm")
@property
@pulumi.getter(name="timeoutStopVm")
def timeout_stop_vm(self) -> pulumi.Output[Optional[int]]:
"""
Stop VM timeout
"""
return pulumi.get(self, "timeout_stop_vm")
@property
@pulumi.getter
def vga(self) -> pulumi.Output[Optional['outputs.VirtualMachineVga']]:
"""
The VGA configuration
"""
return pulumi.get(self, "vga")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> pulumi.Output[Optional[int]]:
"""
The VM identifier
"""
return pulumi.get(self, "vm_id")
| 42.172146
| 143
| 0.646016
| 7,632
| 70,554
| 5.751441
| 0.031053
| 0.126803
| 0.126393
| 0.043718
| 0.946668
| 0.930402
| 0.912131
| 0.90311
| 0.889258
| 0.85905
| 0
| 0.000874
| 0.237719
| 70,554
| 1,672
| 144
| 42.197368
| 0.815291
| 0.198529
| 0
| 0.884354
| 1
| 0
| 0.135644
| 0.06399
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169096
| false
| 0.000972
| 0.006803
| 0
| 0.27794
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
15a9f378314b36187a0fb11148d37e24055417f8
| 140
|
py
|
Python
|
src/sympais/methods/__init__.py
|
ethanluoyc/sympais
|
68bc696434c86edb8457a3c74473c810b2c5c8f2
|
[
"MIT"
] | 5
|
2021-06-04T23:24:41.000Z
|
2021-12-13T21:39:57.000Z
|
src/sympais/methods/__init__.py
|
ethanluoyc/sympais
|
68bc696434c86edb8457a3c74473c810b2c5c8f2
|
[
"MIT"
] | 24
|
2021-07-12T02:08:34.000Z
|
2021-12-20T02:14:54.000Z
|
src/sympais/methods/__init__.py
|
ethanluoyc/sympais
|
68bc696434c86edb8457a3c74473c810b2c5c8f2
|
[
"MIT"
] | 1
|
2021-07-31T10:34:19.000Z
|
2021-07-31T10:34:19.000Z
|
from .dmc import run_dmc
from .importance import run_sympais
from .importance import run_sympais_hmc
from .stratified import run_stratified
| 28
| 39
| 0.857143
| 21
| 140
| 5.47619
| 0.380952
| 0.313043
| 0.347826
| 0.4
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 140
| 4
| 40
| 35
| 0.927419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ec878942a5ea0d410ffc47a86dfa09815126897c
| 12,861
|
py
|
Python
|
metrics/csc_metric.py
|
hanztup/csc_cbert
|
4e1846cb4ca5ddf159191b5097347a4271add9ff
|
[
"MIT"
] | 3
|
2021-09-09T03:00:22.000Z
|
2021-12-28T08:39:43.000Z
|
metrics/csc_metric.py
|
hanztup/csc_cbert
|
4e1846cb4ca5ddf159191b5097347a4271add9ff
|
[
"MIT"
] | 1
|
2021-09-15T08:57:35.000Z
|
2021-09-16T05:55:04.000Z
|
metrics/csc_metric.py
|
hanztup/csc_cbert
|
4e1846cb4ca5ddf159191b5097347a4271add9ff
|
[
"MIT"
] | 1
|
2021-09-13T14:17:21.000Z
|
2021-09-13T14:17:21.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@file : metrics/ner.py
@author: xiaoya li
@contact : xiaoya_li@shannonai.com
@date : 2021/01/14 16:13
@version: 1.0
@desc :
"""
import torch
from typing import Any, List
from pytorch_lightning.metrics.metric import TensorMetric
class MetricForCSC(TensorMetric):
"""
compute span-level F1 scores for named entity recognition task.
"""
def __init__(self, entity_labels: List[str] = None, reduce_group: Any = None, reduce_op: Any = None, save_prediction = False):
super(MetricForCSC, self).__init__(name="metric_for_csc", reduce_group=reduce_group, reduce_op=reduce_op)
self.num_labels = len(entity_labels)
self.entity_labels = entity_labels
self.tags2label = {label_idx: label_item for label_idx, label_item in enumerate(entity_labels)}
self.save_prediction = save_prediction
if save_prediction:
self.pred_entity_lst = []
self.gold_entity_lst = []
def forward(self, pred_sequence_labels, gold_sequence_labels, sequence_mask=None):
"""
Args:
pred_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
gold_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
sequence_mask: Optional[torch.LongTensor], shape of [batch_size, sequence_len].
1 for non-[PAD] tokens; 0 for [PAD] tokens
"""
true_positive, false_positive, true_negative, false_negative = 0, 0, 0, 0
pred_sequence_labels = pred_sequence_labels.to("cpu").numpy().tolist()
gold_sequence_labels = gold_sequence_labels.to("cpu").numpy().tolist()
if sequence_mask is not None:
sequence_mask = sequence_mask.to("cpu").numpy().tolist()
# [1, 1, 1, 0, 0, 0]
for item_idx, (pred_label_item, gold_label_item) in enumerate(zip(pred_sequence_labels, gold_sequence_labels)):
if sequence_mask is not None:
sequence_mask_item = sequence_mask[item_idx]
try:
token_end_pos = sequence_mask_item.index(0) - 1 # before [PAD] always has an [SEP] token.
except:
token_end_pos = len(sequence_mask_item)
else:
token_end_pos = len(gold_label_item)
pred_label_item = [self.tags2label[tmp] for tmp in pred_label_item[1:token_end_pos]]
gold_label_item = [self.tags2label[tmp] for tmp in gold_label_item[1:token_end_pos]]
pred_entities = collect_labels(pred_label_item)
gold_entities = collect_labels(gold_label_item)
if self.save_prediction:
self.pred_entity_lst.append(pred_entities)
self.gold_entity_lst.append(gold_entities)
tp, fp, fn = count_confusion_matrix(pred_label_item, gold_label_item)
true_positive += tp
false_positive += fp
false_negative += fn
batch_confusion_matrix = torch.LongTensor([true_positive, false_positive, false_negative])
return batch_confusion_matrix
def compute_f1_using_confusion_matrix(self, true_positive, false_positive, false_negative, prefix="dev"):
"""
compute f1 scores.
Description:
f1: 2 * precision * recall / (precision + recall)
- precision = true_positive / true_positive + false_positive
- recall = true_positive / true_positive + false_negative
Returns:
precision, recall, f1
"""
precision = true_positive / (true_positive + false_positive + 1e-13)
recall = true_positive / (true_positive + false_negative + 1e-13)
f1 = 2 * precision * recall / (precision + recall + 1e-13)
if self.save_prediction and prefix == "test":
entity_tuple = (self.gold_entity_lst, self.pred_entity_lst)
return precision, recall, f1, entity_tuple
return precision, recall, f1
class MetricForCSC_Corrector(TensorMetric):
"""
compute span-level F1 scores for named entity recognition task.
"""
def __init__(self, entity_labels, reduce_group=None, reduce_op=None, save_prediction=False):
super(MetricForCSC_Corrector, self).__init__(name="metric_for_csc_corrector", reduce_group=reduce_group, reduce_op=reduce_op)
self.num_labels = len(entity_labels)
self.entity_labels = entity_labels
self.tags2label = {label_idx: label_item for label_item, label_idx in entity_labels.items()}
self.save_prediction = save_prediction
if save_prediction:
self.pred_entity_lst = []
self.gold_entity_lst = []
def forward(self, pred_sequence_labels, gold_sequence_labels, sequence_mask=None):
"""
Args:
pred_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
gold_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
sequence_mask: Optional[torch.LongTensor], shape of [batch_size, sequence_len].
1 for non-[PAD] tokens; 0 for [PAD] tokens
"""
# true_positive, false_positive, true_negative, false_negative = 0, 0, 0, 0
pp, rr, ff = 0., 0., 0.
pred_sequence_labels = pred_sequence_labels.to("cpu").numpy().tolist()
gold_sequence_labels = gold_sequence_labels.to("cpu").numpy().tolist()
if sequence_mask is not None:
sequence_mask = sequence_mask.to("cpu").numpy().tolist()
for item_idx, (pred_label_item, gold_label_item) in enumerate(zip(pred_sequence_labels, gold_sequence_labels)):
if sequence_mask is not None:
sequence_mask_item = sequence_mask[item_idx]
try:
token_end_pos = sequence_mask_item.index(0) - 1 # before [PAD] always has an [SEP] token.
except:
token_end_pos = len(sequence_mask_item)
else:
token_end_pos = len(gold_label_item)
pred_label_item = pred_label_item[1:token_end_pos]
gold_label_item = gold_label_item[1:token_end_pos]
pred_entities = ''.join([self.tags2label[tmp] for tmp in pred_label_item])
gold_entities = ''.join([self.tags2label[tmp] for tmp in gold_label_item])
if self.save_prediction:
self.pred_entity_lst.append(pred_entities)
self.gold_entity_lst.append(gold_entities)
pi, ri, fi = count_confusion_matrix_for_corrector(pred_label_item, gold_label_item)
pp += pi
rr += ri
ff += fi
batch_confusion_matrix = torch.tensor([pp, rr, ff])
return batch_confusion_matrix
def compute_f1_using_confusion_matrix(self, true_positive, false_positive, false_negative, prefix="dev"):
"""
compute f1 scores.
Description:
f1: 2 * precision * recall / (precision + recall)
- precision = true_positive / true_positive + false_positive
- recall = true_positive / true_positive + false_negative
Returns:
precision, recall, f1
"""
precision = true_positive / (true_positive + false_positive + 1e-13)
recall = true_positive / (true_positive + false_negative + 1e-13)
f1 = 2 * precision * recall / (precision + recall + 1e-13)
if self.save_prediction and prefix == "test":
entity_tuple = (self.gold_entity_lst, self.pred_entity_lst)
return precision, recall, f1, entity_tuple
return precision, recall, f1
class MetricForCSC_Detector(TensorMetric):
"""
compute span-level F1 scores for named entity recognition task (CGED).
"""
def __init__(self, entity_labels: List[str] = None, reduce_group: Any = None, reduce_op: Any = None, save_prediction = False):
super(MetricForCSC_Detector, self).__init__(name="metric_for_csc_detector", reduce_group=reduce_group, reduce_op=reduce_op)
self.num_labels = len(entity_labels)
self.entity_labels = entity_labels
self.tags2label = {label_idx: label_item for label_idx, label_item in enumerate(entity_labels)}
self.save_prediction = save_prediction
if save_prediction:
self.pred_entity_lst = []
self.gold_entity_lst = []
def forward(self, pred_sequence_labels, gold_sequence_labels, sequence_mask=None):
"""
Args:
pred_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
gold_sequence_labels: torch.LongTensor, shape of [batch_size, sequence_len]
sequence_mask: Optional[torch.LongTensor], shape of [batch_size, sequence_len].
1 for non-[PAD] tokens; 0 for [PAD] tokens
"""
pp, rr, ff = 0., 0., 0.
pred_sequence_labels = pred_sequence_labels.to("cpu").numpy().tolist()
gold_sequence_labels = gold_sequence_labels.to("cpu").numpy().tolist()
if sequence_mask is not None:
sequence_mask = sequence_mask.to("cpu").numpy().tolist()
# [1, 1, 1, 0, 0, 0]
for item_idx, (pred_label_item, gold_label_item) in enumerate(zip(pred_sequence_labels, gold_sequence_labels)):
if sequence_mask is not None:
sequence_mask_item = sequence_mask[item_idx]
try:
token_end_pos = sequence_mask_item.index(0) - 1 # before [PAD] always has an [SEP] token.
except:
token_end_pos = len(sequence_mask_item)
else:
token_end_pos = len(gold_label_item)
pred_label_item = pred_label_item[1:token_end_pos]
gold_label_item = gold_label_item[1:token_end_pos]
pred_entities = ' '.join([self.tags2label[tmp] for tmp in pred_label_item])
gold_entities = ' '.join([self.tags2label[tmp] for tmp in gold_label_item])
if self.save_prediction:
self.pred_entity_lst.append(pred_entities)
self.gold_entity_lst.append(gold_entities)
pi, ri, fi = count_confusion_matrix_for_detector(pred_label_item, gold_label_item)
pp += pi
rr += ri
ff += fi
batch_confusion_matrix = torch.LongTensor([pp, rr, ff])
return batch_confusion_matrix
def compute_f1_using_confusion_matrix(self, true_positive, false_positive, false_negative, prefix="dev"):
"""
compute f1 scores.
Description:
f1: 2 * precision * recall / (precision + recall)
- precision = true_positive / true_positive + false_positive
- recall = true_positive / true_positive + false_negative
Returns:
precision, recall, f1
"""
precision = true_positive / (true_positive + false_positive + 1e-13)
recall = true_positive / (true_positive + false_negative + 1e-13)
f1 = 2 * precision * recall / (precision + recall + 1e-13)
if self.save_prediction and prefix == "test":
entity_tuple = (self.gold_entity_lst, self.pred_entity_lst)
return precision, recall, f1, entity_tuple
return precision, recall, f1
def count_confusion_matrix_for_detector(pred_labels, gold_labels):
acc = 0.
for i, (pred, gold) in enumerate(zip(pred_labels, gold_labels)):
if pred == gold:
acc += 1
pi = acc / (len(pred_labels) + 1e-13)
ri = acc / (len(gold_labels) + 1e-13)
fi = 2 * pi * ri / (pi + ri + 1e-13)
return pi, ri, fi
def count_confusion_matrix_for_corrector(pred_labels, gold_labels):
acc = 0.
for i, (pred, gold) in enumerate(zip(pred_labels, gold_labels)):
if pred == gold:
acc += 1
pi = acc / (len(pred_labels) + 1e-13)
ri = acc / (len(gold_labels) + 1e-13)
fi = 2 * pi * ri / (pi + ri + 1e-13)
return pi, ri, fi
def count_confusion_matrix(pred_labels, gold_labels):
true_positive, true_negative, false_positive, false_negative = 0, 0, 0, 0
for i, (pred, gold) in enumerate(zip(pred_labels, gold_labels)):
if gold == "1":
if pred == gold:
true_positive += 1
else:
false_negative += 1
elif gold == "0":
if pred == gold:
true_negative += 1
else:
false_positive += 1
else:
raise ValueError("Wrong label value.")
return true_positive, false_positive, false_negative
def collect_labels(label_sequence):
positive_locations = []
for idx, label in enumerate(label_sequence):
if label != '1':
continue
positive_locations.append(idx)
return positive_locations
| 42.167213
| 133
| 0.634865
| 1,601
| 12,861
| 4.778264
| 0.096814
| 0.045882
| 0.042222
| 0.042484
| 0.900131
| 0.890588
| 0.85268
| 0.85268
| 0.85268
| 0.841307
| 0
| 0.017212
| 0.272685
| 12,861
| 304
| 134
| 42.305921
| 0.80062
| 0.178524
| 0
| 0.758242
| 0
| 0
| 0.013034
| 0.004641
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.016484
| 0
| 0.175824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
17821dcbf89800f4fa441a96cdf3400a747e9541
| 6,859
|
py
|
Python
|
carbondesign/tests/test_tabs_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_tabs_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
carbondesign/tests/test_tabs_html.py
|
dozymoe/django-carbondesign
|
34aed0cfdccfa90fcb5bf2bbd347229815f1417b
|
[
"MIT"
] | null | null | null |
# pylint:disable=missing-module-docstring,missing-class-docstring,missing-function-docstring
from .base import compare_template, SimpleTestCase
class TabsHtmlTest(SimpleTestCase):
maxDiff = None
def test_default(self):
template = """
{% load carbondesign %}
{% Tabs %}
{% Slot 'header' %}
{% TabItem target="tab-panel-1" active=True %}
Tab label 1
{% endTabItem %}
{% TabItem target="tab-panel-2" %}
Tab label 2
{% endTabItem %}
{% TabItem target="tab-panel-3" %}
Tab label 3
{% endTabItem %}
{% TabItem target="tab-panel-4" disabled=True %}
Tab label 4
{% endTabItem %}
{% endSlot %}
{% TabContent id="tab-panel-1" active=True %}
Content for first tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-2" %}
Content for second tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-3" %}
Content for third tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-4" %}
Content for fourth tab goes here.
{% endTabContent %}
{% endTabs %}
"""
expected = """
<div data-tabs class="bx--tabs">
<div class="bx--tabs-trigger" tabindex="0">
<a href="javascript:void(0)" class="bx--tabs-trigger-text" tabindex="-1"></a>
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16"
height="16" viewBox="0 0 16 16" aria-hidden="true">
<path d="M8 11L3 6 3.7 5.3 8 9.6 12.3 5.3 13 6z"></path>
</svg>
</div>
<ul class="bx--tabs__nav bx--tabs__nav--hidden" role="tablist">
<li class="bx--tabs__nav-item bx--tabs__nav-item--selected" data-target="#tab-panel-1"
role="tab" aria-selected="true">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-1" id="tab-link-tab-panel-1">
Tab label 1
</a>
</li>
<li class="bx--tabs__nav-item" data-target="#tab-panel-2"
role="tab">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-2" id="tab-link-tab-panel-2">
Tab label 2
</a>
</li>
<li class="bx--tabs__nav-item" data-target="#tab-panel-3"
role="tab">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-3" id="tab-link-tab-panel-3">
Tab label 3
</a>
</li>
<li class="bx--tabs__nav-item bx--tabs__nav-item--disabled" data-target="#tab-panel-4"
role="tab" aria-disabled="true">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-4" id="tab-link-tab-panel-4">
Tab label 4
</a>
</li>
</ul>
</div>
<div class="bx--tab-content">
<div id="tab-panel-1" role="tabpanel" aria-labelledby="tab-link-tab-panel-1" aria-hidden="false">
<div>
Content for first tab goes here.
</div>
</div>
<div id="tab-panel-2" role="tabpanel" aria-labelledby="tab-link-tab-panel-2" aria-hidden="true" hidden="">
<div>
Content for second tab goes here.
</div>
</div>
<div id="tab-panel-3" role="tabpanel" aria-labelledby="tab-link-tab-panel-3" aria-hidden="true" hidden="">
<div>
Content for third tab goes here.
</div>
</div>
<div id="tab-panel-4" role="tabpanel" aria-labelledby="tab-link-tab-panel-4" aria-hidden="true" hidden="">
<div>
Content for fourth tab goes here.
</div>
</div>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
def test_container(self):
template = """
{% load carbondesign %}
{% Tabs container=True %}
{% Slot 'header' %}
{% TabItem target="tab-panel-1" active=True %}
Tab label 1
{% endTabItem %}
{% TabItem target="tab-panel-2" %}
Tab label 2
{% endTabItem %}
{% TabItem target="tab-panel-3" %}
Tab label 3
{% endTabItem %}
{% TabItem target="tab-panel-4" disabled=True %}
Tab label 4
{% endTabItem %}
{% endSlot %}
{% TabContent id="tab-panel-1" active=True %}
Content for first tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-2" %}
Content for second tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-3" %}
Content for third tab goes here.
{% endTabContent %}
{% TabContent id="tab-panel-4" %}
Content for fourth tab goes here.
{% endTabContent %}
{% endTabs %}
"""
expected = """
<div data-tabs class="bx--tabs bx--tabs--container">
<div class="bx--tabs-trigger" tabindex="0">
<a href="javascript:void(0)" class="bx--tabs-trigger-text" tabindex="-1"></a>
<svg focusable="false" preserveAspectRatio="xMidYMid meet"
xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16"
height="16" viewBox="0 0 16 16" aria-hidden="true">
<path d="M8 11L3 6 3.7 5.3 8 9.6 12.3 5.3 13 6z"></path>
</svg>
</div>
<ul class="bx--tabs__nav bx--tabs__nav--hidden" role="tablist">
<li class="bx--tabs__nav-item bx--tabs__nav-item--selected" data-target="#tab-panel-1"
role="tab" aria-selected="true">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-1" id="tab-link-tab-panel-1">
Tab label 1
</a>
</li>
<li class="bx--tabs__nav-item" data-target="#tab-panel-2"
role="tab">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-2" id="tab-link-tab-panel-2">
Tab label 2
</a>
</li>
<li class="bx--tabs__nav-item" data-target="#tab-panel-3"
role="tab">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-3" id="tab-link-tab-panel-3">
Tab label 3
</a>
</li>
<li class="bx--tabs__nav-item bx--tabs__nav-item--disabled" data-target="#tab-panel-4"
role="tab" aria-disabled="true">
<a tabindex="0" class="bx--tabs__nav-link" href="javascript:void(0)"
role="tab" aria-controls="tab-panel-4" id="tab-link-tab-panel-4">
Tab label 4
</a>
</li>
</ul>
</div>
<div class="bx--tab-content">
<div id="tab-panel-1" role="tabpanel" aria-labelledby="tab-link-tab-panel-1" aria-hidden="false">
<div>
Content for first tab goes here.
</div>
</div>
<div id="tab-panel-2" role="tabpanel" aria-labelledby="tab-link-tab-panel-2" aria-hidden="true" hidden="">
<div>
Content for second tab goes here.
</div>
</div>
<div id="tab-panel-3" role="tabpanel" aria-labelledby="tab-link-tab-panel-3" aria-hidden="true" hidden="">
<div>
Content for third tab goes here.
</div>
</div>
<div id="tab-panel-4" role="tabpanel" aria-labelledby="tab-link-tab-panel-4" aria-hidden="true" hidden="">
<div>
Content for fourth tab goes here.
</div>
</div>
</div>
"""
rendered = compare_template(template, expected)
self.assertEqual(*rendered)
| 33.296117
| 106
| 0.630704
| 1,025
| 6,859
| 4.16878
| 0.106341
| 0.104844
| 0.061783
| 0.058975
| 0.947812
| 0.932834
| 0.932834
| 0.932834
| 0.932834
| 0.932834
| 0
| 0.028914
| 0.173057
| 6,859
| 205
| 107
| 33.458537
| 0.724436
| 0.013121
| 0
| 0.954774
| 0
| 0.170854
| 0.932614
| 0.306635
| 0
| 0
| 0
| 0
| 0.01005
| 1
| 0.01005
| false
| 0
| 0.005025
| 0
| 0.025126
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bd86b266bd4a7af79472127aeb35da6d6f1854e9
| 143
|
py
|
Python
|
privx_api/__init__.py
|
hokenssh/privx-sdk-for-python
|
24627d25c0343f350c9b2396677344b771f8aec6
|
[
"Apache-2.0"
] | 4
|
2020-06-15T17:14:18.000Z
|
2021-12-20T12:12:56.000Z
|
privx_api/__init__.py
|
hokenssh/privx-sdk-for-python
|
24627d25c0343f350c9b2396677344b771f8aec6
|
[
"Apache-2.0"
] | 5
|
2019-11-25T07:04:07.000Z
|
2021-05-19T08:09:53.000Z
|
privx_api/__init__.py
|
hokenssh/privx-sdk-for-python
|
24627d25c0343f350c9b2396677344b771f8aec6
|
[
"Apache-2.0"
] | 23
|
2019-11-22T08:17:58.000Z
|
2022-02-21T15:50:36.000Z
|
from privx_api.exceptions import InternalAPIException
from privx_api.privx_api import PrivXAPI
from privx_api.response import PrivXAPIResponse
| 35.75
| 53
| 0.895105
| 19
| 143
| 6.526316
| 0.473684
| 0.258065
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083916
| 143
| 3
| 54
| 47.666667
| 0.946565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd8de301c3cec4bcb65927d703f7d3678cd9a328
| 175
|
py
|
Python
|
app/main/views.py
|
JokerQyou/mai
|
3f6d04c6d2f023c194136dff6e0024f4dc3332bc
|
[
"Unlicense"
] | null | null | null |
app/main/views.py
|
JokerQyou/mai
|
3f6d04c6d2f023c194136dff6e0024f4dc3332bc
|
[
"Unlicense"
] | null | null | null |
app/main/views.py
|
JokerQyou/mai
|
3f6d04c6d2f023c194136dff6e0024f4dc3332bc
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
from flask import render_template
from . import main
@main.route('/', methods=('GET', ))
def view_index():
return render_template('index.html')
| 17.5
| 40
| 0.662857
| 23
| 175
| 4.913043
| 0.73913
| 0.247788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006757
| 0.154286
| 175
| 9
| 41
| 19.444444
| 0.756757
| 0.12
| 0
| 0
| 0
| 0
| 0.092105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
bd922ee6d693e63da78bbf7fcc14d3f844684903
| 284
|
py
|
Python
|
src/config/__init__.py
|
AjJordy/squeezeDet-master
|
2977f9ec82be7f9d56e36a3faf98443e36e09926
|
[
"BSD-2-Clause"
] | null | null | null |
src/config/__init__.py
|
AjJordy/squeezeDet-master
|
2977f9ec82be7f9d56e36a3faf98443e36e09926
|
[
"BSD-2-Clause"
] | null | null | null |
src/config/__init__.py
|
AjJordy/squeezeDet-master
|
2977f9ec82be7f9d56e36a3faf98443e36e09926
|
[
"BSD-2-Clause"
] | null | null | null |
from config.kitti_model_config import kitti_model_config
from config.kitti_squeezeDet_config import kitti_squeezeDet_config
from config.kitti_squeezeDetPlus_config import kitti_squeezeDetPlus_config
from config.coco_config import coco_config
from config.ball_config import ball_config
| 56.8
| 74
| 0.915493
| 41
| 284
| 5.95122
| 0.219512
| 0.204918
| 0.262295
| 0.172131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066901
| 284
| 5
| 75
| 56.8
| 0.920755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd9b6a5a57d1a46b793f8c545d38c2039447fe51
| 6,887
|
py
|
Python
|
threshold_test.py
|
koshini/polya-social-contagion
|
ad3915a59611589160e5c7f5e6a1d82489e6e1b2
|
[
"MIT"
] | null | null | null |
threshold_test.py
|
koshini/polya-social-contagion
|
ad3915a59611589160e5c7f5e6a1d82489e6e1b2
|
[
"MIT"
] | 1
|
2019-04-03T20:45:05.000Z
|
2019-04-07T18:06:13.000Z
|
threshold_test.py
|
koshini/polya-social-contagion
|
ad3915a59611589160e5c7f5e6a1d82489e6e1b2
|
[
"MIT"
] | null | null | null |
import networkx as nx
from graph_generator import get_graph
from simulation import simulate
import time
import os
def main():
################################################################################################
print('----------------facebook')
topology = 'facebook'
dir = topology + '/'
if not os.path.exists(dir):
os.mkdir(dir)
print("Directory ", dir, " Created ")
else:
print("Directory ", dir, " already exists")
red_mult = 1
black_mult = 1
graph_file = dir + str(red_mult) + '_' + str(black_mult) + '.json'
G = get_graph(graph_file, topology)
iterations = 500
runs = 50
node_count = nx.number_of_nodes(G)
red_budget = node_count * 10
black_budget = node_count * 10
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write('red budget: ' + str(red_budget) + '\n')
f.write('black budget: ' +str(black_budget) + '\n')
f.write('\n')
strat_dict_list = []
# threshold = 0.4
# portion = 0.05
# strat_dict_list.append({
# 'red_budget': red_budget,
# 'black_budget': black_budget,
# 'red_strat': 'bot',
# 'black_strat': 'centrality_threshold',
# 'threshold': threshold,
# 'portion': portion
# })
#
#
# threshold = 0.4
# portion = 0.1
# strat_dict_list.append({
# 'red_budget': red_budget,
# 'black_budget': black_budget,
# 'red_strat': 'bot',
# 'black_strat': 'centrality_threshold',
# 'threshold': threshold,
# 'portion': portion
# })
threshold = 0.4
portion = 0.5
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
for strat in strat_dict_list:
print(str(strat))
start = time.time()
prefix = str(strat['threshold']) + '_' + str(strat['portion'])
simulate(graph_file, prefix, topology, strat, iterations, runs)
elapsed_time = time.time() - start
print(elapsed_time)
print()
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write(str(strat) + '\n')
f.write(str(elapsed_time) + '\n')
f.write('\n')
################################################################################################
print('----------------twitter')
topology = 'twitter'
dir = topology + '/'
if not os.path.exists(dir):
os.mkdir(dir)
print("Directory ", dir, " Created ")
else:
print("Directory ", dir, " already exists")
red_mult = 1
black_mult = 1
graph_file = dir + str(red_mult) + '_' + str(black_mult) + '.json'
G = get_graph(graph_file, topology)
iterations = 100
runs = 50
node_count = nx.number_of_nodes(G)
red_budget = node_count * 10
black_budget = node_count * 10
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write('red budget: ' + str(red_budget) + '\n')
f.write('black budget: ' +str(black_budget) + '\n')
f.write('\n')
strat_dict_list = []
threshold = 0.4
portion = 0.05
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
threshold = 0.4
portion = 0.1
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
threshold = 0.4
portion = 0.5
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
for strat in strat_dict_list:
print(str(strat))
start = time.time()
prefix = str(strat['threshold']) + '_' + str(strat['portion'])
simulate(graph_file, prefix, topology, strat, iterations, runs)
elapsed_time = time.time() - start
print(elapsed_time)
print()
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write(str(strat) + '\n')
f.write(str(elapsed_time) + '\n')
f.write('\n')
################################################################################################
print('----------------meetup')
topology = 'meetup'
dir = topology + '/'
if not os.path.exists(dir):
os.mkdir(dir)
print("Directory ", dir, " Created ")
else:
print("Directory ", dir, " already exists")
red_mult = 1
black_mult = 1
graph_file = dir + str(red_mult) + '_' + str(black_mult) + '.json'
G = get_graph(graph_file, topology)
iterations = 500
runs = 50
node_count = nx.number_of_nodes(G)
red_budget = node_count * 10
black_budget = node_count * 10
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write('red budget: ' + str(red_budget) + '\n')
f.write('black budget: ' +str(black_budget) + '\n')
f.write('\n')
strat_dict_list = []
threshold = 0.4
portion = 0.02
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
threshold = 0.4
portion = 0.1
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
threshold = 0.4
portion = 0.5
strat_dict_list.append({
'red_budget': red_budget,
'black_budget': black_budget,
'red_strat': 'bot',
'black_strat': 'centrality_threshold',
'threshold': threshold,
'portion': portion
})
for strat in strat_dict_list:
print(str(strat))
start = time.time()
prefix = str(strat['threshold']) + '_' + str(strat['portion'])
simulate(graph_file, prefix, topology, strat, iterations, runs)
elapsed_time = time.time() - start
print(elapsed_time)
print()
log_file = dir + 'log.txt'
with open(log_file, 'a') as f:
f.write(str(strat) + '\n')
f.write(str(elapsed_time) + '\n')
f.write('\n')
if __name__ == "__main__":
main()
| 26.902344
| 96
| 0.532452
| 777
| 6,887
| 4.486486
| 0.097812
| 0.069707
| 0.08778
| 0.046472
| 0.944636
| 0.944636
| 0.944636
| 0.944636
| 0.944636
| 0.944636
| 0
| 0.014607
| 0.284304
| 6,887
| 256
| 97
| 26.902344
| 0.692635
| 0.069697
| 0
| 0.908602
| 0
| 0
| 0.1691
| 0.011317
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005376
| false
| 0
| 0.026882
| 0
| 0.032258
| 0.096774
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da0c36ecd078e542bb279ef86dc169eb56639a53
| 22
|
py
|
Python
|
test_commit.py
|
Spy142/python_lesson_5
|
ef89e0842f92fa3f8ebf2f9fb02587fc3cec97e9
|
[
"MIT"
] | null | null | null |
test_commit.py
|
Spy142/python_lesson_5
|
ef89e0842f92fa3f8ebf2f9fb02587fc3cec97e9
|
[
"MIT"
] | null | null | null |
test_commit.py
|
Spy142/python_lesson_5
|
ef89e0842f92fa3f8ebf2f9fb02587fc3cec97e9
|
[
"MIT"
] | null | null | null |
print(37 // (37**0.5))
| 22
| 22
| 0.5
| 5
| 22
| 2.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3
| 0.090909
| 22
| 1
| 22
| 22
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
da8436acf06c898ec56c680d34aa7f8719e925f7
| 207
|
py
|
Python
|
habitat_audio/__init__.py
|
SAGNIKMJR/move2hear-active-AV-separation
|
3c6887aeb94b2a07983469bfd517ca277bd4124a
|
[
"MIT"
] | 8
|
2021-10-05T08:03:32.000Z
|
2022-02-22T07:08:19.000Z
|
habitat_audio/__init__.py
|
SAGNIKMJR/move2hear-active-AV-separation
|
3c6887aeb94b2a07983469bfd517ca277bd4124a
|
[
"MIT"
] | 1
|
2021-12-02T00:21:48.000Z
|
2021-12-28T19:07:14.000Z
|
habitat_audio/__init__.py
|
SAGNIKMJR/move2hear-active-AV-separation
|
3c6887aeb94b2a07983469bfd517ca277bd4124a
|
[
"MIT"
] | null | null | null |
from habitat_audio.action_space_separation import *
from habitat_audio.simulator_train import *
from habitat_audio.simulator_eval import *
from habitat_audio.dataset import *
from habitat_audio.task import *
| 41.4
| 51
| 0.859903
| 29
| 207
| 5.827586
| 0.413793
| 0.325444
| 0.473373
| 0.52071
| 0.366864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091787
| 207
| 5
| 52
| 41.4
| 0.898936
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e50ef842761aaee0aa268347e8f1a8ba042e766e
| 32,278
|
py
|
Python
|
test_polytopes.py
|
colefranks/tensorscaling
|
d93d6e35ee1b3dd782670ac792fe91546d335688
|
[
"MIT"
] | 3
|
2019-11-19T18:18:41.000Z
|
2021-02-23T21:27:53.000Z
|
test_polytopes.py
|
catch22/tensorscaling
|
5317a9732000b6417bed27ef0ac7b7f20e7b7659
|
[
"MIT"
] | 5
|
2021-04-22T08:36:14.000Z
|
2022-03-29T10:47:22.000Z
|
test_polytopes.py
|
catch22/tensorscaling
|
5317a9732000b6417bed27ef0ac7b7f20e7b7659
|
[
"MIT"
] | 2
|
2020-06-12T21:17:49.000Z
|
2021-02-23T15:35:26.000Z
|
from tensorscaling import *
import numpy as np
import scipy.linalg
import pytest
from functools import wraps
def oracle(shape):
def decorate(f):
f.shape = shape
return f
return decorate
# 2x2x2 Kronecker polytope
@oracle([2, 2, 2])
def oracle_222(target):
p, q, r = target[0][0], target[1][0], target[2][0]
if p + q > 1 + r:
return False
if p + r > 1 + q:
return False
if q + r > 1 + p:
return False
return True
# 2x2x2 entanglement polytope of W state |100> + |010> + |001>
@oracle([2, 2, 2])
def oracle_222_w(target):
if not oracle_222(target):
return False
return target[0][0] + target[1][0] + target[2][0] >= 2
w_tensor = np.array([0, 1, 1, 0, 1, 0, 0, 0]).reshape([2, 2, 2]) / np.sqrt(3)
@oracle([2, 2, 3])
def oracle_223(target):
la = target[0][0] - 1 / 2
mu = target[1][0] - 1 / 2
nu = target[2]
if 2 * la > nu[0] + nu[1] - nu[2]:
return False
if 2 * mu > nu[0] + nu[1] - nu[2]:
return False
if la + mu > nu[0]:
return False
if la - mu > nu[1]:
return False
if mu - la > nu[1]:
return False
if la - mu > nu[0] - nu[2]:
return False
if mu - la > nu[0] - nu[2]:
return False
return True
@oracle([2, 2, 3])
def oracle_223_interesting(target):
if not oracle_223(target):
return False
if (
(target[0][0] + target[1][0] + target[2][0] + target[2][1] < 2)
or (target[0][0] + target[2][0] < 1)
or (target[1][0] + target[2][0] < 1)
):
return False
return True
tensor_223_interesting = np.zeros([2, 2, 3])
tensor_223_interesting[0, 0, 0] = 1 / np.sqrt(3)
tensor_223_interesting[0, 1, 1] = 1 / np.sqrt(3)
tensor_223_interesting[1, 1, 2] = 1 / np.sqrt(3)
@oracle([2, 2, 4])
def oracle_224(target):
la = target[0][0] - 1 / 2
mu = target[1][0] - 1 / 2
nu = target[2]
if 2 * la > nu[0] + nu[1] - nu[2] - nu[3]:
return False
if 2 * mu > nu[0] + nu[1] - nu[2] - nu[3]:
return False
if la + mu > nu[0] - nu[3]:
return False
if la - mu > nu[1] - nu[3]:
return False
if mu - la > nu[1] - nu[3]:
return False
if la - mu > nu[0] - nu[2]:
return False
if mu - la > nu[0] - nu[2]:
return False
return True
@oracle([3, 3, 3])
def oracle_333(target):
target = np.array(target).reshape(-1)
if np.dot((-2, 1, 1, -2, 1, 1, 2, -1, -1), target) < -2:
return False
if np.dot((-2, 1, 1, -1, -1, 2, 1, 1, -2), target) < -2:
return False
if np.dot((-2, 1, 1, -1, 2, -1, 1, -2, 1), target) < -2:
return False
if np.dot((-2, 1, 1, 1, -2, 1, -1, 2, -1), target) < -2:
return False
if np.dot((-2, 1, 1, 1, 1, -2, -1, -1, 2), target) < -2:
return False
if np.dot((-2, 1, 1, 2, -1, -1, -2, 1, 1), target) < -2:
return False
if np.dot((-1, -1, 2, -2, 1, 1, 1, 1, -2), target) < -2:
return False
if np.dot((-1, -1, 2, 0, 0, 0, 0, 0, 0), target) < -1:
return False
if np.dot((-1, -1, 2, 1, 1, -2, -2, 1, 1), target) < -2:
return False
if np.dot((-1, 0, 1, -1, 1, 0, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, -1, 1, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 1, 0, -1, -1, 1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 1, 0, -1, 0, -1, 1), target) < -1:
return False
if np.dot((-1, 1, 0, -1, 0, 1, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 1, 0, -1, 1, 0, 1, -1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, -1, 1, 0, 1, -1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 1, -1, 0, -1, 1), target) < -1:
return False
if np.dot((-1, 1, 0, 1, -1, 0, -1, 1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 1, 0, -1, -1, 0, 1), target) < -1:
return False
if np.dot((-1, 2, -1, -2, 1, 1, 1, -2, 1), target) < -2:
return False
if np.dot((-1, 2, -1, 1, -2, 1, -2, 1, 1), target) < -2:
return False
if np.dot((0, -1, 1, -1, 0, 1, 1, 0, -1), target) < -1:
return False
if np.dot((0, -1, 1, -1, 1, 0, 0, 1, -1), target) < -1:
return False
if np.dot((0, -1, 1, 0, 1, -1, -1, 1, 0), target) < -1:
return False
if np.dot((0, -1, 1, 1, 0, -1, -1, 0, 1), target) < -1:
return False
if np.dot((0, 0, 0, -1, -1, 2, 0, 0, 0), target) < -1:
return False
if np.dot((0, 0, 0, 0, 0, 0, -1, -1, 2), target) < -1:
return False
if np.dot((0, 0, 0, 0, 0, 0, 0, 1, -1), target) < 0:
return False
if np.dot((0, 0, 0, 0, 0, 0, 1, -1, 0), target) < 0:
return False
if np.dot((0, 0, 0, 0, 1, -1, 0, 0, 0), target) < 0:
return False
if np.dot((0, 0, 0, 1, -1, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 1, -1, -1, 1, 0, 0, -1, 1), target) < -1:
return False
if np.dot((0, 1, -1, 0, -1, 1, -1, 1, 0), target) < -1:
return False
if np.dot((0, 1, -1, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((1, -2, 1, -2, 1, 1, -1, 2, -1), target) < -2:
return False
if np.dot((1, -2, 1, -1, 2, -1, -2, 1, 1), target) < -2:
return False
if np.dot((1, -1, 0, -1, 1, 0, -1, 1, 0), target) < -1:
return False
if np.dot((1, -1, 0, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((1, 0, -1, -1, 0, 1, -1, 1, 0), target) < -1:
return False
if np.dot((1, 0, -1, -1, 0, 1, 0, -1, 1), target) < -1:
return False
if np.dot((1, 0, -1, -1, 1, 0, -1, 0, 1), target) < -1:
return False
if np.dot((1, 0, -1, 0, -1, 1, -1, 0, 1), target) < -1:
return False
if np.dot((1, 1, -2, -2, 1, 1, -1, -1, 2), target) < -2:
return False
if np.dot((1, 1, -2, -1, -1, 2, -2, 1, 1), target) < -2:
return False
if np.dot((2, -1, -1, -2, 1, 1, -2, 1, 1), target) < -2:
return False
return True
@oracle([4, 4, 4])
def oracle_444(target):
target = np.array(target).reshape(-1)
if np.dot((-5, -1, 3, 3, -5, 3, 3, -1, 5, 1, -3, -3), target) < -5:
return False
if np.dot((-5, -1, 3, 3, 1, -3, -3, 5, 3, 3, -1, -5), target) < -5:
return False
if np.dot((-5, -1, 3, 3, 3, 3, -1, -5, 1, -3, -3, 5), target) < -5:
return False
if np.dot((-5, -1, 3, 3, 5, 1, -3, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-5, 3, -1, 3, -5, 3, -1, 3, 5, 1, -3, -3), target) < -5:
return False
if np.dot((-5, 3, -1, 3, -5, 3, 3, -1, 5, -3, 1, -3), target) < -5:
return False
if np.dot((-5, 3, -1, 3, -3, 1, -3, 5, 3, 3, -1, -5), target) < -5:
return False
if np.dot((-5, 3, -1, 3, -3, 5, 1, -3, 3, -5, 3, -1), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 1, -3, -3, 5, 3, -1, 3, -5), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 1, -3, 5, -3, 3, -1, -5, 3), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 3, -5, 3, -1, -3, 5, 1, -3), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 3, -1, -5, 3, 1, -3, 5, -3), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 3, -1, 3, -5, 1, -3, -3, 5), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 3, 3, -1, -5, -3, 1, -3, 5), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 5, -3, 1, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-5, 3, -1, 3, 5, 1, -3, -3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -5, -1, 3, 3, 5, 1, -3, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -5, 3, -1, 3, 5, -3, 1, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -5, 3, 3, -1, 5, -3, -3, 1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, -3, 1, 5, 3, 3, -1, -5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, -3, 5, 1, 3, 3, -5, -1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, 1, -3, 5, 3, -1, 3, -5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, 1, 5, -3, 3, -1, -5, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, 5, -3, 1, 3, -5, 3, -1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -3, 5, 1, -3, 3, -5, -1, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -1, -5, 3, 3, 1, 5, -3, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -1, 3, -5, 3, 1, -3, 5, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, -1, 3, 3, -5, 1, -3, -3, 5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 1, -3, -3, 5, -1, 3, 3, -5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 1, -3, 5, -3, -1, 3, -5, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 1, 5, -3, -3, -1, -5, 3, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, -5, -1, 3, -3, 5, 1, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, -5, 3, -1, -3, 5, -3, 1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, -1, -5, 3, -3, 1, 5, -3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, -1, 3, -5, -3, 1, -3, 5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, 3, -5, -1, -3, -3, 5, 1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 3, 3, -1, -5, -3, -3, 1, 5), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 5, -3, -3, 1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 5, -3, 1, -3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((-5, 3, 3, -1, 5, 1, -3, -3, -5, -1, 3, 3), target) < -5:
return False
if np.dot((-3, -3, 1, 5, -5, 3, 3, -1, 3, 3, -1, -5), target) < -5:
return False
if np.dot((-3, -3, 1, 5, 3, 3, -1, -5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, -3, 5, 1, -5, 3, 3, -1, 3, 3, -5, -1), target) < -5:
return False
if np.dot((-3, -3, 5, 1, 3, 3, -5, -1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, -1, 3, 1, -3, 3, 1, -1, 3, 1, -1, -3), target) < -3:
return False
if np.dot((-3, -1, 3, 1, 1, -1, -3, 3, 3, 1, -1, -3), target) < -3:
return False
if np.dot((-3, -1, 3, 1, 3, 1, -1, -3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-3, -1, 3, 1, 3, 1, -1, -3, 1, -1, -3, 3), target) < -3:
return False
if np.dot((-3, 1, -3, 5, -5, 3, -1, 3, 3, 3, -1, -5), target) < -5:
return False
if np.dot((-3, 1, -3, 5, -5, 3, 3, -1, 3, -1, 3, -5), target) < -5:
return False
if np.dot((-3, 1, -3, 5, 3, -1, 3, -5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, 1, -3, 5, 3, 3, -1, -5, -5, 3, -1, 3), target) < -5:
return False
if np.dot((-3, 1, 1, 1, -3, 1, 1, 1, 3, -1, -1, -1), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -2, -2, 2, 2, 2, 2, -2, -2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -2, 2, -2, 2, 2, -2, 2, -2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -2, 2, 2, -2, 2, -2, -2, 2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -1, -1, -1, 3, 1, 1, 1, -3), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -1, -1, 3, -1, 1, 1, -3, 1), target) < -3:
return False
if np.dot((-3, 1, 1, 1, -1, 3, -1, -1, 1, -3, 1, 1), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 1, -3, 1, 1, -1, 3, -1, -1), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 1, 1, -3, 1, -1, -1, 3, -1), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 1, 1, 1, -3, -1, -1, -1, 3), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 2, -2, -2, 2, -2, 2, 2, -2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 2, -2, 2, -2, -2, 2, -2, 2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 2, 2, -2, -2, -2, -2, 2, 2), target) < -3:
return False
if np.dot((-3, 1, 1, 1, 3, -1, -1, -1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-3, 1, 5, -3, -5, 3, 3, -1, 3, -1, -5, 3), target) < -5:
return False
if np.dot((-3, 1, 5, -3, 3, -1, -5, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, 3, 1, -1, -3, -1, 3, 1, 3, 1, -1, -3), target) < -3:
return False
if np.dot((-3, 3, 1, -1, -3, 3, 1, -1, 3, -1, -3, 1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, -1, -3, 1, 3, 3, 1, -1, -3), target) < -3:
return False
if np.dot((-3, 3, 1, -1, -1, -3, 3, 1, 1, 3, -1, -3), target) < -3:
return False
if np.dot((-3, 3, 1, -1, -1, -3, 3, 1, 3, 1, -3, -1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, -1, 3, 1, -3, 1, -1, -3, 3), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 1, -1, -3, 3, -1, 3, 1, -3), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 1, 3, -1, -3, -1, -3, 3, 1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 3, -1, -3, 1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 3, 1, -3, -1, -1, -3, 3, 1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 3, 1, -1, -3, -3, -1, 3, 1), target) < -3:
return False
if np.dot((-3, 3, 1, -1, 3, 1, -1, -3, -1, -3, 1, 3), target) < -3:
return False
if np.dot((-3, 5, -3, 1, -5, 3, 3, -1, 3, -5, 3, -1), target) < -5:
return False
if np.dot((-3, 5, -3, 1, 3, -5, 3, -1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, 5, 1, -3, -5, 3, -1, 3, 3, -5, 3, -1), target) < -5:
return False
if np.dot((-3, 5, 1, -3, -5, 3, 3, -1, 3, -5, -1, 3), target) < -5:
return False
if np.dot((-3, 5, 1, -3, 3, -5, -1, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-3, 5, 1, -3, 3, -5, 3, -1, -5, 3, -1, 3), target) < -5:
return False
if np.dot((-2, -2, 2, 2, -3, 1, 1, 1, 2, 2, -2, -2), target) < -3:
return False
if np.dot((-2, -2, 2, 2, -2, 2, 2, -2, 1, 1, -3, 1), target) < -3:
return False
if np.dot((-2, -2, 2, 2, 1, 1, -3, 1, -2, 2, 2, -2), target) < -3:
return False
if np.dot((-2, -2, 2, 2, 2, 2, -2, -2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-2, 2, -2, 2, -3, 1, 1, 1, 2, -2, 2, -2), target) < -3:
return False
if np.dot((-2, 2, -2, 2, -2, 2, 2, -2, 1, -3, 1, 1), target) < -3:
return False
if np.dot((-2, 2, -2, 2, 1, -3, 1, 1, -2, 2, 2, -2), target) < -3:
return False
if np.dot((-2, 2, -2, 2, 2, -2, 2, -2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-2, 2, 2, -2, -3, 1, 1, 1, 2, -2, -2, 2), target) < -3:
return False
if np.dot((-2, 2, 2, -2, -2, -2, 2, 2, 1, 1, -3, 1), target) < -3:
return False
if np.dot((-2, 2, 2, -2, -2, 2, -2, 2, 1, -3, 1, 1), target) < -3:
return False
if np.dot((-2, 2, 2, -2, 1, -3, 1, 1, -2, 2, -2, 2), target) < -3:
return False
if np.dot((-2, 2, 2, -2, 1, 1, -3, 1, -2, -2, 2, 2), target) < -3:
return False
if np.dot((-2, 2, 2, -2, 2, -2, -2, 2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-1, -5, 3, 3, -5, 3, 3, -1, 1, 5, -3, -3), target) < -5:
return False
if np.dot((-1, -5, 3, 3, 1, 5, -3, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-1, -3, 1, 3, -3, 3, 1, -1, 3, 1, -1, -3), target) < -3:
return False
if np.dot((-1, -3, 1, 3, 3, 1, -1, -3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-1, -3, 3, 1, -3, 3, 1, -1, 1, 3, -1, -3), target) < -3:
return False
if np.dot((-1, -3, 3, 1, -3, 3, 1, -1, 3, 1, -3, -1), target) < -3:
return False
if np.dot((-1, -3, 3, 1, 1, 3, -1, -3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-1, -3, 3, 1, 3, 1, -3, -1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-1, -1, -1, 3, -3, 1, 1, 1, 1, 1, 1, -3), target) < -3:
return False
if np.dot((-1, -1, -1, 3, 0, 0, 0, 0, 0, 0, 0, 0), target) < -1:
return False
if np.dot((-1, -1, -1, 3, 1, 1, 1, -3, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-1, -1, 3, -1, -3, 1, 1, 1, 1, 1, -3, 1), target) < -3:
return False
if np.dot((-1, -1, 3, -1, 1, 1, -3, 1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-1, 0, 0, 1, -1, 1, 0, 0, 1, 0, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 0, 1, 0, 0, -1, 1, 1, 0, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 0, 1, 1, 0, 0, -1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((-1, 0, 0, 1, 1, 0, 0, -1, 0, 0, -1, 1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, -1, 0, 1, 0, 1, 0, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, -1, 1, 0, 0, 1, 0, -1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, -1, 0, 1, 1, 0, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, -1, 1, 0, 0, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, -1, 1, 0, 1, 0, -1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, 0, -1, 1, 0, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, 1, 0, -1, 0, -1, 1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 0, 1, 0, -1, 0, 0, -1, 1), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 1, 0, -1, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 1, 0, -1, 0, 0, -1, 1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 1, 0, 0, -1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((-1, 0, 1, 0, 1, 0, 0, -1, 0, -1, 0, 1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, -1, 0, 0, 1, 1, 0, 0, -1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, -1, 0, 1, 0, 1, 0, -1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, -1, 1, 0, 0, 1, -1, 0, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, -1, 0, 1, 0, 1, 0, -1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, -1, 1, 0, 0, 1, -1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, 0, -1, 1, 0, 0, 1, -1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, 0, 1, -1, 0, 0, -1, 1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, 1, -1, 0, 0, -1, 1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 0, 1, 0, -1, 0, -1, 0, 1), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 1, -1, 0, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 1, 0, -1, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((-1, 1, 0, 0, 1, 0, 0, -1, -1, 0, 0, 1), target) < -1:
return False
if np.dot((-1, 3, -5, 3, -5, 3, 3, -1, 1, -3, 5, -3), target) < -5:
return False
if np.dot((-1, 3, -5, 3, 1, -3, 5, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((-1, 3, -1, -1, -3, 1, 1, 1, 1, -3, 1, 1), target) < -3:
return False
if np.dot((-1, 3, -1, -1, 1, -3, 1, 1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((-1, 3, 1, -3, -3, 3, 1, -1, 1, -1, -3, 3), target) < -3:
return False
if np.dot((-1, 3, 1, -3, 1, -1, -3, 3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((-1, 3, 3, -5, -5, 3, 3, -1, 1, -3, -3, 5), target) < -5:
return False
if np.dot((-1, 3, 3, -5, 1, -3, -3, 5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((0, -1, 0, 1, -1, 0, 1, 0, 1, 0, 0, -1), target) < -1:
return False
if np.dot((0, -1, 0, 1, -1, 1, 0, 0, 0, 1, 0, -1), target) < -1:
return False
if np.dot((0, -1, 0, 1, 0, 1, 0, -1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, -1, 0, 1, 1, 0, 0, -1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((0, -1, 1, 0, -1, 0, 1, 0, 0, 1, 0, -1), target) < -1:
return False
if np.dot((0, -1, 1, 0, -1, 0, 1, 0, 1, 0, -1, 0), target) < -1:
return False
if np.dot((0, -1, 1, 0, -1, 1, 0, 0, 0, 1, -1, 0), target) < -1:
return False
if np.dot((0, -1, 1, 0, 0, 1, -1, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, -1, 1, 0, 0, 1, 0, -1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((0, -1, 1, 0, 1, 0, -1, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((0, 0, -1, 1, -1, 0, 0, 1, 1, 0, 0, -1), target) < -1:
return False
if np.dot((0, 0, -1, 1, -1, 0, 1, 0, 0, 1, 0, -1), target) < -1:
return False
if np.dot((0, 0, -1, 1, -1, 1, 0, 0, 0, 0, 1, -1), target) < -1:
return False
if np.dot((0, 0, -1, 1, 0, 0, 1, -1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, 0, -1, 1, 0, 1, 0, -1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((0, 0, -1, 1, 1, 0, 0, -1, -1, 0, 0, 1), target) < -1:
return False
if np.dot((0, 0, 0, 0, -1, -1, -1, 3, 0, 0, 0, 0), target) < -1:
return False
if np.dot((0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1, 3), target) < -1:
return False
if np.dot((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1), target) < 0:
return False
if np.dot((0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0), target) < 0:
return False
if np.dot((0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, 0), target) < 0:
return False
if np.dot((0, 0, 0, 0, 0, 0, 1, -1, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 0, 0, 0, 0, 1, -1, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 0, 0, 0, 1, -1, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 0, 1, -1, -1, 1, 0, 0, 0, 0, -1, 1), target) < -1:
return False
if np.dot((0, 0, 1, -1, 0, 0, -1, 1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, 0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 1, -1, 0, -1, 1, 0, 0, 0, -1, 1, 0), target) < -1:
return False
if np.dot((0, 1, -1, 0, 0, -1, 1, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((0, 1, 0, -1, -1, 0, 1, 0, 0, -1, 1, 0), target) < -1:
return False
if np.dot((0, 1, 0, -1, -1, 0, 1, 0, 0, 0, -1, 1), target) < -1:
return False
if np.dot((0, 1, 0, -1, -1, 1, 0, 0, 0, -1, 0, 1), target) < -1:
return False
if np.dot((0, 1, 0, -1, 0, -1, 0, 1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((0, 1, 0, -1, 0, -1, 1, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((0, 1, 0, -1, 0, 0, -1, 1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((1, -3, -3, 5, -5, -1, 3, 3, 3, 3, -1, -5), target) < -5:
return False
if np.dot((1, -3, -3, 5, -5, 3, -1, 3, 3, -1, 3, -5), target) < -5:
return False
if np.dot((1, -3, -3, 5, -5, 3, 3, -1, -1, 3, 3, -5), target) < -5:
return False
if np.dot((1, -3, -3, 5, -1, 3, 3, -5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((1, -3, -3, 5, 3, -1, 3, -5, -5, 3, -1, 3), target) < -5:
return False
if np.dot((1, -3, -3, 5, 3, 3, -1, -5, -5, -1, 3, 3), target) < -5:
return False
if np.dot((1, -3, 1, 1, -3, 1, 1, 1, -1, 3, -1, -1), target) < -3:
return False
if np.dot((1, -3, 1, 1, -2, 2, -2, 2, -2, 2, 2, -2), target) < -3:
return False
if np.dot((1, -3, 1, 1, -2, 2, 2, -2, -2, 2, -2, 2), target) < -3:
return False
if np.dot((1, -3, 1, 1, -1, 3, -1, -1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((1, -3, 5, -3, -5, 3, -1, 3, 3, -1, -5, 3), target) < -5:
return False
if np.dot((1, -3, 5, -3, -5, 3, 3, -1, -1, 3, -5, 3), target) < -5:
return False
if np.dot((1, -3, 5, -3, -1, 3, -5, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((1, -3, 5, -3, 3, -1, -5, 3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((1, -1, -3, 3, -3, -1, 3, 1, 3, 1, -1, -3), target) < -3:
return False
if np.dot((1, -1, -3, 3, -3, 3, 1, -1, -1, 3, 1, -3), target) < -3:
return False
if np.dot((1, -1, -3, 3, -1, 3, 1, -3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((1, -1, -3, 3, 3, 1, -1, -3, -3, -1, 3, 1), target) < -3:
return False
if np.dot((1, -1, 0, 0, -1, 1, 0, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), target) < 0:
return False
if np.dot((1, 0, -1, 0, -1, 0, 1, 0, -1, 1, 0, 0), target) < -1:
return False
if np.dot((1, 0, -1, 0, -1, 0, 1, 0, 0, -1, 1, 0), target) < -1:
return False
if np.dot((1, 0, -1, 0, -1, 1, 0, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((1, 0, -1, 0, 0, -1, 1, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((1, 0, 0, -1, -1, 0, 0, 1, -1, 1, 0, 0), target) < -1:
return False
if np.dot((1, 0, 0, -1, -1, 0, 0, 1, 0, 0, -1, 1), target) < -1:
return False
if np.dot((1, 0, 0, -1, -1, 0, 1, 0, -1, 0, 1, 0), target) < -1:
return False
if np.dot((1, 0, 0, -1, -1, 0, 1, 0, 0, -1, 0, 1), target) < -1:
return False
if np.dot((1, 0, 0, -1, -1, 1, 0, 0, -1, 0, 0, 1), target) < -1:
return False
if np.dot((1, 0, 0, -1, 0, -1, 0, 1, -1, 0, 1, 0), target) < -1:
return False
if np.dot((1, 0, 0, -1, 0, 0, -1, 1, -1, 0, 0, 1), target) < -1:
return False
if np.dot((1, 1, -3, 1, -3, 1, 1, 1, -1, -1, 3, -1), target) < -3:
return False
if np.dot((1, 1, -3, 1, -2, -2, 2, 2, -2, 2, 2, -2), target) < -3:
return False
if np.dot((1, 1, -3, 1, -2, 2, 2, -2, -2, -2, 2, 2), target) < -3:
return False
if np.dot((1, 1, -3, 1, -1, -1, 3, -1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((1, 1, 1, -3, -3, 1, 1, 1, -1, -1, -1, 3), target) < -3:
return False
if np.dot((1, 1, 1, -3, -1, -1, -1, 3, -3, 1, 1, 1), target) < -3:
return False
if np.dot((1, 3, -1, -3, -3, 3, 1, -1, -1, -3, 3, 1), target) < -3:
return False
if np.dot((1, 3, -1, -3, -1, -3, 3, 1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((1, 5, -3, -3, -5, 3, 3, -1, -1, -5, 3, 3), target) < -5:
return False
if np.dot((1, 5, -3, -3, -1, -5, 3, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((2, -2, -2, 2, -3, 1, 1, 1, -2, 2, 2, -2), target) < -3:
return False
if np.dot((2, -2, -2, 2, -2, 2, 2, -2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((2, -2, 2, -2, -3, 1, 1, 1, -2, 2, -2, 2), target) < -3:
return False
if np.dot((2, -2, 2, -2, -2, 2, -2, 2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((2, 2, -2, -2, -3, 1, 1, 1, -2, -2, 2, 2), target) < -3:
return False
if np.dot((2, 2, -2, -2, -2, -2, 2, 2, -3, 1, 1, 1), target) < -3:
return False
if np.dot((3, -5, -1, 3, -5, 3, 3, -1, -3, 5, 1, -3), target) < -5:
return False
if np.dot((3, -5, -1, 3, -3, 5, 1, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, -5, 3, -1, -5, 3, -1, 3, -3, 5, 1, -3), target) < -5:
return False
if np.dot((3, -5, 3, -1, -5, 3, 3, -1, -3, 5, -3, 1), target) < -5:
return False
if np.dot((3, -5, 3, -1, -3, 5, -3, 1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, -5, 3, -1, -3, 5, 1, -3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((3, -1, -5, 3, -5, 3, -1, 3, 1, -3, 5, -3), target) < -5:
return False
if np.dot((3, -1, -5, 3, -5, 3, 3, -1, -3, 1, 5, -3), target) < -5:
return False
if np.dot((3, -1, -5, 3, -3, 1, 5, -3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, -1, -5, 3, 1, -3, 5, -3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((3, -1, -3, 1, -3, 3, 1, -1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((3, -1, -1, -1, -3, 1, 1, 1, -3, 1, 1, 1), target) < -3:
return False
if np.dot((3, -1, 3, -5, -5, 3, -1, 3, 1, -3, -3, 5), target) < -5:
return False
if np.dot((3, -1, 3, -5, -5, 3, 3, -1, -3, 1, -3, 5), target) < -5:
return False
if np.dot((3, -1, 3, -5, -3, 1, -3, 5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, -1, 3, -5, 1, -3, -3, 5, -5, 3, -1, 3), target) < -5:
return False
if np.dot((3, 1, -3, -1, -3, 3, 1, -1, -1, -3, 3, 1), target) < -3:
return False
if np.dot((3, 1, -3, -1, -1, -3, 3, 1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((3, 1, -1, -3, -3, -1, 3, 1, -3, 3, 1, -1), target) < -3:
return False
if np.dot((3, 1, -1, -3, -3, -1, 3, 1, 1, -1, -3, 3), target) < -3:
return False
if np.dot((3, 1, -1, -3, -3, 3, 1, -1, -3, -1, 3, 1), target) < -3:
return False
if np.dot((3, 1, -1, -3, -3, 3, 1, -1, -1, -3, 1, 3), target) < -3:
return False
if np.dot((3, 1, -1, -3, -1, -3, 1, 3, -3, 3, 1, -1), target) < -3:
return False
if np.dot((3, 1, -1, -3, 1, -1, -3, 3, -3, -1, 3, 1), target) < -3:
return False
if np.dot((3, 3, -5, -1, -5, 3, 3, -1, -3, -3, 5, 1), target) < -5:
return False
if np.dot((3, 3, -5, -1, -3, -3, 5, 1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, 3, -1, -5, -5, -1, 3, 3, 1, -3, -3, 5), target) < -5:
return False
if np.dot((3, 3, -1, -5, -5, 3, -1, 3, -3, 1, -3, 5), target) < -5:
return False
if np.dot((3, 3, -1, -5, -5, 3, 3, -1, -3, -3, 1, 5), target) < -5:
return False
if np.dot((3, 3, -1, -5, -3, -3, 1, 5, -5, 3, 3, -1), target) < -5:
return False
if np.dot((3, 3, -1, -5, -3, 1, -3, 5, -5, 3, -1, 3), target) < -5:
return False
if np.dot((3, 3, -1, -5, 1, -3, -3, 5, -5, -1, 3, 3), target) < -5:
return False
if np.dot((5, -3, -3, 1, -5, 3, 3, -1, -5, 3, 3, -1), target) < -5:
return False
if np.dot((5, -3, 1, -3, -5, 3, -1, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((5, -3, 1, -3, -5, 3, 3, -1, -5, 3, -1, 3), target) < -5:
return False
if np.dot((5, 1, -3, -3, -5, -1, 3, 3, -5, 3, 3, -1), target) < -5:
return False
if np.dot((5, 1, -3, -3, -5, 3, -1, 3, -5, 3, -1, 3), target) < -5:
return False
if np.dot((5, 1, -3, -3, -5, 3, 3, -1, -5, -1, 3, 3), target) < -5:
return False
return True
@pytest.mark.parametrize(
"oracle,psi",
[
(oracle_222, unit_tensor(2, 3)),
(oracle_222, None),
(oracle_222_w, w_tensor),
(oracle_223, None),
(oracle_223_interesting, tensor_223_interesting),
(oracle_224, None),
(oracle_333, None),
(oracle_444, None),
],
)
def test_polytope(oracle, psi, eps=1e-2, trials=100):
shape = oracle.shape
# random starting tensor?
if psi is None:
psi = random_tensor(shape)
for trial in range(trials):
# choose a random target and determine whether in entanglement polytope
targets = random_targets(shape)
is_member = oracle(targets)
# try to scale to target spectra
res = scale(psi, targets, eps, max_iterations=None if is_member else 100)
# if in polytope, scaling should always succeed since we set max_iterations to None
if is_member:
assert res
# otherwise approximate scaling might still succeed => perform some sanity checks
if res:
marginals = [marginal(res.psi, k) for k in range(len(shape))]
specs = [sorted(np.linalg.eigvalsh(rho), reverse=True) for rho in marginals]
assert oracle(specs), "scalings should stay in polytope"
| 41.276215
| 91
| 0.424128
| 6,447
| 32,278
| 2.116954
| 0.017838
| 0.076641
| 0.312427
| 0.344006
| 0.903063
| 0.900572
| 0.896468
| 0.881448
| 0.876539
| 0.869871
| 0
| 0.192047
| 0.32375
| 32,278
| 781
| 92
| 41.329065
| 0.433205
| 0.011525
| 0
| 0.478552
| 0
| 0
| 0.001317
| 0
| 0
| 0
| 0
| 0
| 0.002681
| 1
| 0.013405
| false
| 0
| 0.006702
| 0
| 0.481233
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e525085e0d71d9d681aa0931ac25e2261b36c5f5
| 4,714
|
py
|
Python
|
django/researchdata/tests.py
|
bear-rsg/clic-calendar
|
d1b624bedcccfdd3b77e744bc0bdeedcfd084edc
|
[
"MIT"
] | null | null | null |
django/researchdata/tests.py
|
bear-rsg/clic-calendar
|
d1b624bedcccfdd3b77e744bc0bdeedcfd084edc
|
[
"MIT"
] | 6
|
2020-11-25T16:10:40.000Z
|
2021-06-22T09:54:01.000Z
|
django/researchdata/tests.py
|
bear-rsg/clic-calendar
|
d1b624bedcccfdd3b77e744bc0bdeedcfd084edc
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
class TestQuestionListView(TestCase):
"""
Test Question List View
"""
fixtures = ['testdata.json', ]
def test_question_list_empty_get(self):
"""
Empty GET request of the question list page
should show list of published questions but not unpublished questions
"""
url = reverse('question-list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h2>Calendar</h2>')
self.assertContains(response, 'published question')
self.assertNotContains(response, 'unpublished question')
def test_question_list_nonsense_get(self):
"""
Nonsense GET request of the question list page
should show list of published questions but not unpublished questions
"""
url = reverse('question-list')
response = self.client.get(url, {'nonsense': 'aaa'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h2>Calendar</h2>')
self.assertContains(response, 'published question')
self.assertNotContains(response, 'unpublished question')
def test_question_list_nonsense_post(self):
"""
Nonsense POST request of the question list page
should error 405, as POST is not allowed
"""
url = reverse('question-list')
response = self.client.post(url, {'nonsense': 'aaa'})
self.assertEqual(response.status_code, 405)
class TestQuestionDetailView(TestCase):
"""
Test Question Details View
"""
fixtures = ['testdata.json', ]
def test_question_detail_empty_get(self):
"""
Empty GET request of the question detail page should show:
- question details
- 'share your answer' section
- list of approved answers (and ignore unapproved answers)
"""
url = reverse('question-detail', kwargs={'pk': 1})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'published question')
self.assertContains(response, '<h2>Share Your Answer</h2>')
self.assertContains(response, '<h2>All Answers</h2>')
self.assertContains(response, 'approved answer')
self.assertNotContains(response, 'unapproved answer')
def test_question_detail_nonsense_get(self):
"""
Empty GET request of the question detail page should show:
- question details
- 'share your answer' section
- list of approved answers (and ignore unapproved answers)
"""
url = reverse('question-detail', kwargs={'pk': 1})
response = self.client.get(url, {'nonsense': 'aaa'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'published question')
self.assertContains(response, '<h2>Share Your Answer</h2>')
self.assertContains(response, '<h2>All Answers</h2>')
self.assertContains(response, 'approved answer')
self.assertNotContains(response, 'unapproved answer')
def test_question_detail_unpublished_question(self):
"""
GET request of the question detail page
for an unpublished question should show 404
"""
url = reverse('question-detail', kwargs={'pk': 2})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_question_detail_nonsense_post(self):
"""
Nonsense POST request of the question detail page
should error 405, as POST is not allowed
"""
url = reverse('question-detail', kwargs={'pk': 1})
response = self.client.post(url, {'nonsense': 'aaa'})
self.assertEqual(response.status_code, 405)
class TestAnswerCreateSuccessView(TestCase):
def test_answer_create_empty_get(self):
"""
Empty GET request of the answer create page
should show create answer form
"""
url = reverse('answer-create-success')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Your answer has been sent to us successfully.')
def test_answer_create_nonsense_get(self):
"""
Nonsense GET request of the answer create page
should show create answer form
"""
url = reverse('answer-create-success')
response = self.client.get(url, {'nonsense': 'aaa'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Your answer has been sent to us successfully.')
| 37.712
| 86
| 0.651464
| 525
| 4,714
| 5.76381
| 0.150476
| 0.083278
| 0.120291
| 0.086252
| 0.900859
| 0.891276
| 0.880701
| 0.839392
| 0.821216
| 0.784865
| 0
| 0.014603
| 0.244591
| 4,714
| 124
| 87
| 38.016129
| 0.835159
| 0.218922
| 0
| 0.737705
| 0
| 0
| 0.186228
| 0.012575
| 0
| 0
| 0
| 0
| 0.442623
| 1
| 0.147541
| false
| 0
| 0.032787
| 0
| 0.262295
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e582abf10156f813dbb05a522e36d9670d3a5e94
| 155
|
py
|
Python
|
python_crash_course/ch2/name_quote.py
|
tangentspire/Python_Practice
|
e7f22303230a2ffa4e3f5ae57854bac9c4c3bc34
|
[
"Apache-2.0"
] | null | null | null |
python_crash_course/ch2/name_quote.py
|
tangentspire/Python_Practice
|
e7f22303230a2ffa4e3f5ae57854bac9c4c3bc34
|
[
"Apache-2.0"
] | 3
|
2020-02-11T22:58:27.000Z
|
2021-06-10T20:30:42.000Z
|
python_crash_course/ch2/name_quote.py
|
tangentspire/Python_Practice
|
e7f22303230a2ffa4e3f5ae57854bac9c4c3bc34
|
[
"Apache-2.0"
] | null | null | null |
print("I don't know with what weapons world war three will be fought with,\nbut, I know world war four will be fought with stones.\n\t- Albert Einstein")
| 77.5
| 154
| 0.754839
| 30
| 155
| 3.9
| 0.666667
| 0.136752
| 0.205128
| 0.273504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167742
| 155
| 1
| 155
| 155
| 0.906977
| 0
| 0
| 0
| 0
| 1
| 0.929032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
e5d3eb88f083f487b6df7165bff0d50aa762f83f
| 1,023
|
py
|
Python
|
oo/direcao.py
|
jacksonezidio/pythonbirds
|
1c569253b0d13dd0a1ddbc2631d05853df12a518
|
[
"MIT"
] | null | null | null |
oo/direcao.py
|
jacksonezidio/pythonbirds
|
1c569253b0d13dd0a1ddbc2631d05853df12a518
|
[
"MIT"
] | null | null | null |
oo/direcao.py
|
jacksonezidio/pythonbirds
|
1c569253b0d13dd0a1ddbc2631d05853df12a518
|
[
"MIT"
] | null | null | null |
class Direcao:
def __init__(self):
self.direcao_atual = "Norte"
def girar_a_direita(self):
if self.direcao_atual == "Norte":
self.direcao_atual = "Leste"
elif self.direcao_atual == "Leste":
self.direcao_atual = "Sul"
elif self.direcao_atual == "Sul":
self.direcao_atual = "Oeste"
elif self.direcao_atual == "Oeste":
self.direcao_atual = "Norte"
def girar_a_esquerda(self):
if self.direcao_atual == "Norte":
self.direcao_atual = "Oeste"
elif self.direcao_atual == "Oeste":
self.direcao_atual = "Sul"
elif self.direcao_atual == "Sul":
self.direcao_atual = "Leste"
elif self.direcao_atual == "Leste":
self.direcao_atual = "Norte"
if __name__ == "__main__":
direcao = Direcao()
print(direcao.direcao_atual)
direcao.girar_a_direita()
print(direcao.direcao_atual)
direcao.girar_a_direita()
print(direcao.direcao_atual)
| 29.228571
| 43
| 0.603128
| 116
| 1,023
| 4.974138
| 0.163793
| 0.415945
| 0.471404
| 0.207972
| 0.883882
| 0.883882
| 0.883882
| 0.807626
| 0.807626
| 0.714038
| 0
| 0
| 0.282502
| 1,023
| 35
| 44
| 29.228571
| 0.786104
| 0
| 0
| 0.785714
| 0
| 0
| 0.083008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0
| 0
| 0.142857
| 0.107143
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
00a990c066e283412c864be0c5e3bb6f8b4d146d
| 4,740
|
py
|
Python
|
rpxdock/tests/search/test_onecomp.py
|
quecloud/rpxdock
|
41f7f98f5dacf24fc95897910263a0bec2209e59
|
[
"Apache-2.0"
] | null | null | null |
rpxdock/tests/search/test_onecomp.py
|
quecloud/rpxdock
|
41f7f98f5dacf24fc95897910263a0bec2209e59
|
[
"Apache-2.0"
] | null | null | null |
rpxdock/tests/search/test_onecomp.py
|
quecloud/rpxdock
|
41f7f98f5dacf24fc95897910263a0bec2209e59
|
[
"Apache-2.0"
] | 1
|
2020-04-13T20:07:52.000Z
|
2020-04-13T20:07:52.000Z
|
import rpxdock as rp, concurrent
def test_cage_hier_onecomp_notrim(hscore, bodyC3):
arg = rp.app.defaults()
arg.wts = rp.Bunch(ncontact=0.01, rpx=1.0)
arg.beam_size = 2e4
arg.max_bb_redundancy = 2.0
arg.max_delta_h = 9999
arg.nout_debug = 0
arg.nout_top = 0
arg.nout_each = 0
arg.score_only_ss = 'H'
arg.max_trim = 0
# arg.debug = True
arg.executor = concurrent.futures.ThreadPoolExecutor(min(4, arg.ncpu / 2))
spec = rp.search.DockSpec1CompCage('T3')
sampler = rp.sampling.hier_axis_sampler(spec.nfold, lb=0, ub=100, axis=spec.axis,
flipax=spec.flip_axis)
result = rp.search.make_onecomp(bodyC3, spec, hscore, rp.hier_search, sampler, **arg)
# print(result)
# result.dump_pdbs_top_score(
# hscore=hscore, **arg.sub(nout_top=10, output_prefix='test_cage_hier_onecomp_notrim'))
# rp.dump(result, 'rpxdock/data/testdata/test_cage_hier_onecomp_notrim.pickle')
ref = rp.data.get_test_data('test_cage_hier_onecomp_notrim')
rp.search.assert_results_close(result, ref)
# result.dump_pdbs_top_score(10)
def test_cage_hier_D3_onecomp_notrim(hscore, bodyC3):
arg = rp.app.defaults()
arg.wts = rp.Bunch(ncontact=0.01, rpx=1.0)
arg.beam_size = 2e4
arg.max_bb_redundancy = 2.0
arg.max_delta_h = 9999
arg.nout_debug = 0
arg.nout_top = 0
arg.nout_each = 0
arg.score_only_ss = 'H'
arg.max_trim = 0
# arg.debug = True
arg.executor = concurrent.futures.ThreadPoolExecutor(min(4, arg.ncpu / 2))
spec = rp.search.DockSpec1CompCage('D3_3')
sampler = rp.sampling.hier_axis_sampler(spec.nfold, lb=0, ub=100, axis=spec.axis,
flipax=spec.flip_axis)
result = rp.search.make_onecomp(bodyC3, spec, hscore, rp.hier_search, sampler, **arg)
# print(result)
# result.dump_pdbs_top_score(
# hscore=hscore, **arg.sub(nout_top=10, output_prefix='test_cage_hier_D3_onecomp_notrim'))
# rp.dump(result, 'rpxdock/data/testdata/test_cage_hier_D3_onecomp_notrim.pickle')
ref = rp.data.get_test_data('test_cage_hier_D3_onecomp_notrim')
rp.search.assert_results_close(result, ref)
# result.dump_pdbs_top_score(10)
def test_cage_hier_D3_2_onecomp_notrim(hscore, bodyC2):
arg = rp.app.defaults()
arg.wts = rp.Bunch(ncontact=0.01, rpx=1.0)
arg.beam_size = 2e4
arg.max_bb_redundancy = 2.0
arg.max_delta_h = 9999
arg.nout_debug = 0
arg.nout_top = 0
arg.nout_each = 0
arg.score_only_ss = 'H'
arg.max_trim = 0
# arg.debug = True
arg.executor = concurrent.futures.ThreadPoolExecutor(min(4, arg.ncpu / 2))
spec = rp.search.DockSpec1CompCage('D3_2')
sampler = rp.sampling.hier_axis_sampler(spec.nfold, lb=0, ub=100, axis=spec.axis,
flipax=spec.flip_axis)
result = rp.search.make_onecomp(bodyC2, spec, hscore, rp.hier_search, sampler, **arg)
# print(result)
# result.dump_pdbs_top_score(
# hscore=hscore, **arg.sub(nout_top=10, output_prefix='test_cage_hier_D3_2_onecomp_notrim'))
# rp.dump(result, 'rpxdock/data/testdata/test_cage_hier_D3_2_onecomp_notrim.pickle')
ref = rp.data.get_test_data('test_cage_hier_D3_2_onecomp_notrim')
rp.search.assert_results_close(result, ref)
# result.dump_pdbs_top_score(10)
def _test_cage_hier_onecomp_trim(hscore, bodyC3):
arg = rp.app.defaults()
arg.wts = rp.Bunch(ncontact=0.01, rpx=1.0)
arg.beam_size = 2e4
arg.max_bb_redundancy = 2.0
arg.max_delta_h = 9999
arg.nout_debug = 0
arg.nout_top = 0
arg.nout_each = 0
arg.score_only_ss = 'H'
arg.max_trim = 200
arg.trim_direction = 'C'
# arg.debug = True
# arg.executor = concurrent.futures.ThreadPoolExecutor(min(4, arg.ncpu / 2))
spec = rp.search.DockSpec1CompCage('T3')
sampler = rp.sampling.hier_axis_sampler(spec.nfold, lb=0, ub=200, resl=5, angresl=5,
axis=spec.axis, flipax=spec.flip_axis)
result = rp.search.make_onecomp(bodyC3, spec, hscore, rp.hier_search, sampler, **arg)
print(result)
result.dump_pdbs_top_score(10)
# rp.dump(result, 'rpxdock/data/testdata/test_cage_hier_onecomp_trim.pickle')
# ref = rp.data.get_test_data('test_cage_hier_onecomp_trim')
# rp.search.assert_results_close(result, ref)
def main():
hscore = rp.data.small_hscore()
# hscore = rp.RpxHier('ilv_h/1000', hscore_data_dir='/home/sheffler/data/rpx/hscore')
# C2 = rp.data.get_body('C2_REFS10_1')
C3 = rp.data.get_body('C3_1na0-1_1')
# test_cage_hier_onecomp_notrim(hscore, C3)
test_cage_hier_D3_onecomp_notrim(hscore, C3)
# test_cage_hier_D3_2_onecomp_notrim(hscore, C2)
# _test_cage_hier_onecomp_trim(hscore, C3)
if __name__ == '__main__':
main()
| 37.92
| 95
| 0.698101
| 741
| 4,740
| 4.167341
| 0.137652
| 0.029793
| 0.073834
| 0.045337
| 0.924547
| 0.920984
| 0.899611
| 0.887953
| 0.851684
| 0.851684
| 0
| 0.041485
| 0.17616
| 4,740
| 125
| 96
| 37.92
| 0.749296
| 0.274051
| 0
| 0.708861
| 0
| 0
| 0.038383
| 0.027835
| 0
| 0
| 0
| 0
| 0.037975
| 1
| 0.063291
| false
| 0
| 0.012658
| 0
| 0.075949
| 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00d5bcaba51235e90279f0f06705eaa310ad808c
| 6,784
|
py
|
Python
|
TrV2.py
|
charon-iv6/Binance-project
|
2bd7163717bd5fc0d0a0be22621bd9a8dcc0ebf4
|
[
"MIT"
] | null | null | null |
TrV2.py
|
charon-iv6/Binance-project
|
2bd7163717bd5fc0d0a0be22621bd9a8dcc0ebf4
|
[
"MIT"
] | null | null | null |
TrV2.py
|
charon-iv6/Binance-project
|
2bd7163717bd5fc0d0a0be22621bd9a8dcc0ebf4
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, jsonify
from mudle_TrV2 import *
import json
config = json.loads(open("config.json", "r").read())
app = Flask(__name__)
FORMAT = "utf-8"
# FLASK CONFIG #
SERVER:str = config["HOST"]
PORT:int = config["PORT"]
@app.route('/open', methods=['POST'])
def term():
Req = None
try:
Req = request.get_json()
print(Req)
except:
return jsonify({"Error": -2314})
if Req['type'].upper() == 'MARKET':
try:
Token: str = Req['token']
Pair: str = Req['pair'].upper()
Side: str = Req['side'].upper()
Amount: float = float(Req['amount'])
Leverage: int = int(Req['leverage'])
Margin: str = Req['margin'].upper()
except:
return jsonify({"Error": -2415})
try:
MAIN = ORDERS(TOKEN=Token, PAIR=Pair,
TYPE="MARKET", SIDE=Side,
AMOUNT=Amount, MARGIN=Margin,
LEVERAGE=Leverage)
except AK_SK_Database:
return jsonify({"Error": -2543})
except AK_SK_Invalid:
return jsonify({"Error": -2543})
except Leverage_Wrong:
return jsonify({"Error": -4214})
try:
MAIN.Validate_leverage()
except Leverage_Rediction_NotSupported:
return jsonify({"Error": -4531})
except Leverage_Unknown_Error:
return jsonify({"Error": -4218})
try:
MAIN.change_Margin()
except Margin_Wrong:
return jsonify({"Error": -5421})
except Margin_Couldnt_Change:
return jsonify({"Error": -7424})
try:
MAIN.ORDER()
except Market_Error:
return jsonify({"Error": -6431})
except MARKET_Margin_Insufficient:
return jsonify({"Error": -2643})
except Market_Not_OneWayMode:
return jsonify({"Error": -2676})
except Market_Amount_InSufficient:
return jsonify({"Error": -6753})
try:
MAIN.submit_Order()
return jsonify({"Success": "True"})
except:
return jsonify({"Error": -7532})
elif Req['type'].upper() == 'MARKET':
try:
Token: str = Req['token']
Pair: str = Req['pair'].upper()
Side: str = Req['side'].upper()
Amount: float = float(Req['amount'])
Leverage: int = int(Req['leverage'])
Margin: str = Req['margin'].upper()
Price: float = float(Req['price'])
except:
return jsonify({"Error": -2415})
try:
MAIN = ORDERS(TOKEN=Token, PAIR=Pair,
TYPE="MARKET", SIDE=Side,
AMOUNT=Amount, MARGIN=Margin,
LEVERAGE=Leverage)
except AK_SK_Database:
return jsonify({"Error": -2543})
except AK_SK_Invalid:
return jsonify({"Error": -2543})
except Leverage_Wrong:
return jsonify({"Error": -4214})
try:
MAIN.Validate_leverage()
except Leverage_Rediction_NotSupported:
return jsonify({"Error": -4531})
except Leverage_Unknown_Error:
return jsonify({"Error": -4218})
try:
MAIN.change_Margin()
except Margin_Wrong:
return jsonify({"Error": -5421})
except Margin_Couldnt_Change:
return jsonify({"Error": -7424})
try:
MAIN.ORDER()
except Market_Error:
return jsonify({"Error": -6431})
except MARKET_Margin_Insufficient:
return jsonify({"Error": -2643})
except Market_Not_OneWayMode:
return jsonify({"Error": -2676})
except Market_Amount_InSufficient:
return jsonify({"Error": -6753})
try:
MAIN.submit_Order()
return jsonify({"Success": "True"})
except:
return jsonify({"Error": -7532})
else:
return jsonify({"Error": -8213})
@app.route('/balance', methods=['POST'])
def Balance():
try:
Req = request.get_json()
print(Req)
except:
return jsonify({"Error": -2340})
try:
Token: str = Req['token']
except:
return jsonify({"Error": -1341})
try:
MAIN = Account(TOKEN=Token)
except Token_Wrong:
return jsonify({"Error": -1342})
try:
BB = MAIN.get_Balance()
return jsonify({"balance": BB})
except AK_SK_Invalid:
return jsonify({"Error": -1343})
@app.route('/positions', methods=['POST'])
def Positions():
try:
Req = request.get_json()
except:
return jsonify({"Error": -2341})
try:
Token: str = Req['token']
except:
return jsonify({"Error": -9458})
try:
MAIN = Account(TOKEN=Token)
return jsonify(MAIN.open_Positions())
except AK_SK_Database:
return jsonify({"Error": -4342})
except AK_SK_Invalid:
return jsonify({"Error": -5213})
@app.route('/close', methods=['POST'])
def Close():
try:
Req = request.get_json()
except:
return jsonify({"Error": -9314})
try:
Token = Req['token']
Pair = Req['pair'].upper()
Amount = Req['amount']
Side = Req['side'].upper()
except:
return jsonify({"Error": -9331})
try:
MAIN = ORDERS(TOKEN=Token, PAIR=Pair,
TYPE="MARKET", SIDE=Side,
AMOUNT=Amount)
except AK_SK_Database:
return jsonify({"Error": -4342})
except AK_SK_Invalid:
return jsonify({"Error": -5213})
try:
MAIN.close_Order()
except Close_Error:
return jsonify({"Error": -9315})
try:
MAIN.submit_Order()
except:
return jsonify({"Error": -9315})
return jsonify({"Success": "True"})
@app.route('/check', methods=['POST'])
def Check():
try:
Req = request.get_json()
except:
return jsonify({"Error": -8864})
try:
Token: str = Req['token']
except:
return jsonify({"Error": -5432})
try:
Account(TOKEN=Token)
except Token_Wrong:
return jsonify({"Error": -4262})
except AK_SK_Database:
return jsonify({"Error": -4342})
except AK_SK_Invalid:
return jsonify({"Error": -5213})
return jsonify({"Success": "True"})
if __name__ == '__main__':
app.run(host=SERVER, debug=False, port=PORT)
| 29.624454
| 56
| 0.515035
| 669
| 6,784
| 5.098655
| 0.15994
| 0.201994
| 0.248021
| 0.098505
| 0.773967
| 0.756376
| 0.756376
| 0.746116
| 0.746116
| 0.639695
| 0
| 0.042996
| 0.348614
| 6,784
| 228
| 57
| 29.754386
| 0.728898
| 0.001769
| 0
| 0.758621
| 0
| 0
| 0.079052
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024631
| false
| 0
| 0.014778
| 0
| 0.300493
| 0.009852
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
daaf92a36315379042bc4ef61e1b335267122d2f
| 3,368
|
py
|
Python
|
tests/test_processors.py
|
YoavCohen/logbook
|
3e0badb395ed8d0038d02996d38aa0505441327e
|
[
"BSD-3-Clause"
] | 771
|
2015-11-07T12:55:07.000Z
|
2022-03-16T09:37:44.000Z
|
tests/test_processors.py
|
YoavCohen/logbook
|
3e0badb395ed8d0038d02996d38aa0505441327e
|
[
"BSD-3-Clause"
] | 169
|
2015-11-08T06:56:55.000Z
|
2022-02-28T06:58:50.000Z
|
tests/test_processors.py
|
YoavCohen/logbook
|
3e0badb395ed8d0038d02996d38aa0505441327e
|
[
"BSD-3-Clause"
] | 105
|
2015-11-07T14:11:07.000Z
|
2021-12-31T17:46:36.000Z
|
from textwrap import dedent
import logbook
from .utils import make_fake_mail_handler
def test_handler_filter_after_processor(activation_strategy, logger):
handler = make_fake_mail_handler(
format_string=dedent('''
Subject: Application Error for {record.extra[path]} [{record.extra[method]}]
Message type: {record.level_name}
Location: {record.filename}:{record.lineno}
Module: {record.module}
Function: {record.func_name}
Time: {record.time:%Y-%m-%d %H:%M:%S}
Remote IP: {record.extra[ip]}
Request: {record.extra[path]} [{record.extra[method]}]
Message:
{record.message}
''').lstrip(),
filter=lambda r, h: 'ip' in r.extra,
bubble=False)
class Request(object):
remote_addr = '127.0.0.1'
method = 'GET'
path = '/index.html'
def handle_request(request):
def inject_extra(record):
record.extra['ip'] = request.remote_addr
record.extra['method'] = request.method
record.extra['path'] = request.path
processor = logbook.Processor(inject_extra)
with activation_strategy(processor):
handler.push_thread()
try:
try:
1 / 0
except Exception:
logger.exception('Exception happened during request')
finally:
handler.pop_thread()
handle_request(Request())
assert len(handler.mails) == 1
mail = handler.mails[0][2]
assert 'Subject: Application Error for /index.html [GET]' in mail
assert '1 / 0' in mail
def test_handler_processors(activation_strategy, logger):
handler = make_fake_mail_handler(
format_string=dedent('''
Subject: Application Error for {record.extra[path]} [{record.extra[method]}]
Message type: {record.level_name}
Location: {record.filename}:{record.lineno}
Module: {record.module}
Function: {record.func_name}
Time: {record.time:%Y-%m-%d %H:%M:%S}
Remote IP: {record.extra[ip]}
Request: {record.extra[path]} [{record.extra[method]}]
Message:
{record.message}
''').lstrip())
class Request(object):
remote_addr = '127.0.0.1'
method = 'GET'
path = '/index.html'
def handle_request(request):
def inject_extra(record):
record.extra['ip'] = request.remote_addr
record.extra['method'] = request.method
record.extra['path'] = request.path
processor = logbook.Processor(inject_extra)
with activation_strategy(processor):
handler.push_thread()
try:
try:
1 / 0
except Exception:
logger.exception('Exception happened during request')
finally:
handler.pop_thread()
handle_request(Request())
assert len(handler.mails) == 1
mail = handler.mails[0][2]
assert 'Subject: Application Error for /index.html [GET]' in mail
assert '1 / 0' in mail
| 33.019608
| 88
| 0.545428
| 347
| 3,368
| 5.175793
| 0.227666
| 0.097996
| 0.050111
| 0.057906
| 0.908686
| 0.908686
| 0.908686
| 0.908686
| 0.908686
| 0.908686
| 0
| 0.011759
| 0.343527
| 3,368
| 101
| 89
| 33.346535
| 0.800543
| 0
| 0
| 0.888889
| 0
| 0
| 0.407957
| 0.06057
| 0
| 0
| 0
| 0
| 0.074074
| 1
| 0.074074
| false
| 0
| 0.037037
| 0
| 0.209877
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
daba7335e4b23c7500e10cf5abcd80d94eca5132
| 3,321
|
py
|
Python
|
tests/test_index.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
tests/test_index.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
tests/test_index.py
|
CCI-Tools/zarr-cache
|
23c873966e06440b65406fd633b0c8fe63ca70a1
|
[
"MIT"
] | null | null | null |
import unittest
from zarr_cache import MemoryStoreIndex
class MemoryStoreIndexTest(unittest.TestCase):
def test_fifo(self):
index = MemoryStoreIndex(is_lifo=False)
self.assertEqual(None, index.max_size)
self.assertEqual(0, index.current_size)
index.push_key('s1', 'k1', 100)
self.assertEqual(100, index.current_size)
index.push_key('s1', 'k2', 130)
index.push_key('s1', 'k3', 120)
index.push_key('s1', 'k4', 180)
index.push_key('s1', 'k5', 160)
self.assertEqual(100 + 130 + 120 + 180 + 160, index.current_size)
r1 = index.pop_key()
r2 = index.pop_key()
r3 = index.pop_key()
self.assertEqual(('s1', 'k5', 160), r1)
self.assertEqual(('s1', 'k4', 180), r2)
self.assertEqual(('s1', 'k3', 120), r3)
self.assertEqual(100 + 130, index.current_size)
index.push_key('s2', 'k1', 260)
index.push_key('s2', 'k2', 220)
index.push_key('s2', 'k3', 210)
self.assertEqual(100 + 130 + 260 + 220 + 210, index.current_size)
index.mark_key('s2', 'k2')
r1 = index.pop_key()
r2 = index.pop_key()
self.assertEqual(('s2', 'k3', 210), r1)
self.assertEqual(('s2', 'k1', 260), r2)
self.assertEqual(100 + 130 + 220, index.current_size)
s = index.delete_key('s2', 'k2')
self.assertEqual(220, s)
self.assertEqual(100 + 130, index.current_size)
s = index.delete_store('s1')
self.assertEqual(100 + 130, s)
self.assertEqual(0, index.current_size)
s = index.delete_store('s2')
self.assertEqual(0, s)
self.assertEqual(0, index.current_size)
def test_lifo(self):
index = MemoryStoreIndex(is_lifo=True)
self.assertEqual(None, index.max_size)
self.assertEqual(0, index.current_size)
index.push_key('s1', 'k1', 100)
self.assertEqual(100, index.current_size)
index.push_key('s1', 'k2', 130)
index.push_key('s1', 'k3', 120)
index.push_key('s1', 'k4', 180)
index.push_key('s1', 'k5', 160)
self.assertEqual(100 + 130 + 120 + 180 + 160, index.current_size)
r1 = index.pop_key()
r2 = index.pop_key()
r3 = index.pop_key()
self.assertEqual(('s1', 'k1', 100), r1)
self.assertEqual(('s1', 'k2', 130), r2)
self.assertEqual(('s1', 'k3', 120), r3)
self.assertEqual(180 + 160, index.current_size)
index.push_key('s2', 'k1', 260)
index.push_key('s2', 'k2', 220)
index.push_key('s2', 'k3', 210)
self.assertEqual(180 + 160 + 260 + 220 + 210, index.current_size)
index.mark_key('s2', 'k2')
r1 = index.pop_key()
r2 = index.pop_key()
self.assertEqual(('s1', 'k4', 180), r1)
self.assertEqual(('s1', 'k5', 160), r2)
self.assertEqual(260 + 220 + 210, index.current_size)
s = index.delete_key('s2', 'k2')
self.assertEqual(220, s)
self.assertEqual(260 + 210, index.current_size)
s = index.delete_store('s1')
self.assertEqual(0, s)
self.assertEqual(260 + 210, index.current_size)
s = index.delete_store('s2')
self.assertEqual(260 + 210, s)
self.assertEqual(0, index.current_size)
| 32.881188
| 73
| 0.577236
| 438
| 3,321
| 4.242009
| 0.111872
| 0.290635
| 0.155005
| 0.07535
| 0.86437
| 0.798708
| 0.793326
| 0.740581
| 0.740581
| 0.696448
| 0
| 0.122958
| 0.262873
| 3,321
| 100
| 74
| 33.21
| 0.636029
| 0
| 0
| 0.714286
| 0
| 0
| 0.038543
| 0
| 0
| 0
| 0
| 0
| 0.467532
| 1
| 0.025974
| false
| 0
| 0.025974
| 0
| 0.064935
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9712943022818368984c6981b6fd0c0783f3df18
| 21,982
|
py
|
Python
|
sdk/python/pulumi_keycloak/generic_client_role_mapper.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2020-04-28T15:20:56.000Z
|
2022-03-24T18:00:17.000Z
|
sdk/python/pulumi_keycloak/generic_client_role_mapper.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 49
|
2020-02-06T17:53:35.000Z
|
2022-03-25T19:36:08.000Z
|
sdk/python/pulumi_keycloak/generic_client_role_mapper.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-06-09T01:08:56.000Z
|
2021-12-07T15:30:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['GenericClientRoleMapperArgs', 'GenericClientRoleMapper']
@pulumi.input_type
class GenericClientRoleMapperArgs:
def __init__(__self__, *,
realm_id: pulumi.Input[str],
role_id: pulumi.Input[str],
client_id: Optional[pulumi.Input[str]] = None,
client_scope_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a GenericClientRoleMapper resource.
:param pulumi.Input[str] realm_id: The realm this role mapper exists within.
:param pulumi.Input[str] role_id: The ID of the role to be added to this role mapper.
:param pulumi.Input[str] client_id: The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
:param pulumi.Input[str] client_scope_id: The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
"""
pulumi.set(__self__, "realm_id", realm_id)
pulumi.set(__self__, "role_id", role_id)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_scope_id is not None:
pulumi.set(__self__, "client_scope_id", client_scope_id)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Input[str]:
"""
The realm this role mapper exists within.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: pulumi.Input[str]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> pulumi.Input[str]:
"""
The ID of the role to be added to this role mapper.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: pulumi.Input[str]):
pulumi.set(self, "role_id", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientScopeId")
def client_scope_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
"""
return pulumi.get(self, "client_scope_id")
@client_scope_id.setter
def client_scope_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_scope_id", value)
@pulumi.input_type
class _GenericClientRoleMapperState:
def __init__(__self__, *,
client_id: Optional[pulumi.Input[str]] = None,
client_scope_id: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering GenericClientRoleMapper resources.
:param pulumi.Input[str] client_id: The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
:param pulumi.Input[str] client_scope_id: The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
:param pulumi.Input[str] realm_id: The realm this role mapper exists within.
:param pulumi.Input[str] role_id: The ID of the role to be added to this role mapper.
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_scope_id is not None:
pulumi.set(__self__, "client_scope_id", client_scope_id)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientScopeId")
def client_scope_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
"""
return pulumi.get(self, "client_scope_id")
@client_scope_id.setter
def client_scope_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_scope_id", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> Optional[pulumi.Input[str]]:
"""
The realm this role mapper exists within.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter(name="roleId")
def role_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the role to be added to this role mapper.
"""
return pulumi.get(self, "role_id")
@role_id.setter
def role_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_id", value)
class GenericClientRoleMapper(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_scope_id: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allow for creating and managing a client's scope mappings within Keycloak.
By default, all the user role mappings of the user are added as claims within the token (OIDC) or assertion (SAML). When
`full_scope_allowed` is set to `false` for a client, role scope mapping allows you to limit the roles that get declared
inside an access token for a client.
## Example Usage
### Realm Role To Client)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client = keycloak.openid.Client("client",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="BEARER-ONLY")
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role")
client_role_mapper = keycloak.GenericClientRoleMapper("clientRoleMapper",
realm_id=realm.id,
client_id=client.id,
role_id=realm_role.id)
```
### Client Role To Client)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client_a = keycloak.openid.Client("clientA",
realm_id=realm.id,
client_id="client-a",
enabled=True,
access_type="BEARER-ONLY",
full_scope_allowed=False)
client_role_a = keycloak.Role("clientRoleA",
realm_id=realm.id,
client_id=client_a.id,
description="My Client Role")
client_b = keycloak.openid.Client("clientB",
realm_id=realm.id,
client_id="client-b",
enabled=True,
access_type="BEARER-ONLY")
client_role_b = keycloak.Role("clientRoleB",
realm_id=realm.id,
client_id=client_b.id,
description="My Client Role")
client_b_role_mapper = keycloak.GenericClientRoleMapper("clientBRoleMapper",
realm_id=realm.id,
client_id=client_b.id,
role_id=client_role_a.id)
```
### Realm Role To Client Scope)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client_scope = keycloak.openid.ClientScope("clientScope", realm_id=realm.id)
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role")
client_role_mapper = keycloak.GenericClientRoleMapper("clientRoleMapper",
realm_id=realm.id,
client_scope_id=client_scope.id,
role_id=realm_role.id)
```
### Client Role To Client Scope)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client = keycloak.openid.Client("client",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="BEARER-ONLY")
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=client.id,
description="My Client Role")
client_scope = keycloak.openid.ClientScope("clientScope", realm_id=realm.id)
client_b_role_mapper = keycloak.GenericClientRoleMapper("clientBRoleMapper",
realm_id=realm.id,
client_scope_id=client_scope.id,
role_id=client_role.id)
```
## Import
Generic client role mappers can be imported using one of the following two formats- When mapping a role to a client, use the format `{{realmId}}/client/{{clientId}}/scope-mappings/{{roleClientId}}/{{roleId}}` - When mapping a role to a client scope, use the format `{{realmId}}/client-scope/{{clientScopeId}}/scope-mappings/{{roleClientId}}/{{roleId}}` Examplebash
```sh
$ pulumi import keycloak:index/genericClientRoleMapper:GenericClientRoleMapper client_role_mapper my-realm/client/23888550-5dcd-41f6-85ba-554233021e9c/scope-mappings/ce51f004-bdfb-4dd5-a963-c4487d2dec5b/ff3aa49f-bc07-4030-8783-41918c3614a3
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] client_id: The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
:param pulumi.Input[str] client_scope_id: The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
:param pulumi.Input[str] realm_id: The realm this role mapper exists within.
:param pulumi.Input[str] role_id: The ID of the role to be added to this role mapper.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GenericClientRoleMapperArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allow for creating and managing a client's scope mappings within Keycloak.
By default, all the user role mappings of the user are added as claims within the token (OIDC) or assertion (SAML). When
`full_scope_allowed` is set to `false` for a client, role scope mapping allows you to limit the roles that get declared
inside an access token for a client.
## Example Usage
### Realm Role To Client)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client = keycloak.openid.Client("client",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="BEARER-ONLY")
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role")
client_role_mapper = keycloak.GenericClientRoleMapper("clientRoleMapper",
realm_id=realm.id,
client_id=client.id,
role_id=realm_role.id)
```
### Client Role To Client)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client_a = keycloak.openid.Client("clientA",
realm_id=realm.id,
client_id="client-a",
enabled=True,
access_type="BEARER-ONLY",
full_scope_allowed=False)
client_role_a = keycloak.Role("clientRoleA",
realm_id=realm.id,
client_id=client_a.id,
description="My Client Role")
client_b = keycloak.openid.Client("clientB",
realm_id=realm.id,
client_id="client-b",
enabled=True,
access_type="BEARER-ONLY")
client_role_b = keycloak.Role("clientRoleB",
realm_id=realm.id,
client_id=client_b.id,
description="My Client Role")
client_b_role_mapper = keycloak.GenericClientRoleMapper("clientBRoleMapper",
realm_id=realm.id,
client_id=client_b.id,
role_id=client_role_a.id)
```
### Realm Role To Client Scope)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client_scope = keycloak.openid.ClientScope("clientScope", realm_id=realm.id)
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role")
client_role_mapper = keycloak.GenericClientRoleMapper("clientRoleMapper",
realm_id=realm.id,
client_scope_id=client_scope.id,
role_id=realm_role.id)
```
### Client Role To Client Scope)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
client = keycloak.openid.Client("client",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="BEARER-ONLY")
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=client.id,
description="My Client Role")
client_scope = keycloak.openid.ClientScope("clientScope", realm_id=realm.id)
client_b_role_mapper = keycloak.GenericClientRoleMapper("clientBRoleMapper",
realm_id=realm.id,
client_scope_id=client_scope.id,
role_id=client_role.id)
```
## Import
Generic client role mappers can be imported using one of the following two formats- When mapping a role to a client, use the format `{{realmId}}/client/{{clientId}}/scope-mappings/{{roleClientId}}/{{roleId}}` - When mapping a role to a client scope, use the format `{{realmId}}/client-scope/{{clientScopeId}}/scope-mappings/{{roleClientId}}/{{roleId}}` Examplebash
```sh
$ pulumi import keycloak:index/genericClientRoleMapper:GenericClientRoleMapper client_role_mapper my-realm/client/23888550-5dcd-41f6-85ba-554233021e9c/scope-mappings/ce51f004-bdfb-4dd5-a963-c4487d2dec5b/ff3aa49f-bc07-4030-8783-41918c3614a3
```
:param str resource_name: The name of the resource.
:param GenericClientRoleMapperArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GenericClientRoleMapperArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_scope_id: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GenericClientRoleMapperArgs.__new__(GenericClientRoleMapperArgs)
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_scope_id"] = client_scope_id
if realm_id is None and not opts.urn:
raise TypeError("Missing required property 'realm_id'")
__props__.__dict__["realm_id"] = realm_id
if role_id is None and not opts.urn:
raise TypeError("Missing required property 'role_id'")
__props__.__dict__["role_id"] = role_id
super(GenericClientRoleMapper, __self__).__init__(
'keycloak:index/genericClientRoleMapper:GenericClientRoleMapper',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_scope_id: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
role_id: Optional[pulumi.Input[str]] = None) -> 'GenericClientRoleMapper':
"""
Get an existing GenericClientRoleMapper resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] client_id: The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
:param pulumi.Input[str] client_scope_id: The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
:param pulumi.Input[str] realm_id: The realm this role mapper exists within.
:param pulumi.Input[str] role_id: The ID of the role to be added to this role mapper.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GenericClientRoleMapperState.__new__(_GenericClientRoleMapperState)
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_scope_id"] = client_scope_id
__props__.__dict__["realm_id"] = realm_id
__props__.__dict__["role_id"] = role_id
return GenericClientRoleMapper(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the client this role mapper should be added to. Conflicts with `client_scope_id`. This argument is required if `client_scope_id` is not set.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientScopeId")
def client_scope_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the client scope this role mapper should be added to. Conflicts with `client_id`. This argument is required if `client_id` is not set.
"""
return pulumi.get(self, "client_scope_id")
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Output[str]:
"""
The realm this role mapper exists within.
"""
return pulumi.get(self, "realm_id")
@property
@pulumi.getter(name="roleId")
def role_id(self) -> pulumi.Output[str]:
"""
The ID of the role to be added to this role mapper.
"""
return pulumi.get(self, "role_id")
| 42.849903
| 372
| 0.635611
| 2,709
| 21,982
| 4.938354
| 0.07715
| 0.048139
| 0.056511
| 0.037674
| 0.864031
| 0.850501
| 0.849604
| 0.829496
| 0.822395
| 0.812304
| 0
| 0.007883
| 0.267082
| 21,982
| 512
| 373
| 42.933594
| 0.822482
| 0.526658
| 0
| 0.645714
| 1
| 0
| 0.099005
| 0.01638
| 0
| 0
| 0
| 0
| 0
| 1
| 0.154286
| false
| 0.005714
| 0.028571
| 0
| 0.274286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
971ae9df4fdbd2a44436b51b488191a782d0fbf2
| 28,106
|
py
|
Python
|
Q/questionnaire/views/services/views_services_test.py
|
ES-DOC/esdoc-questionnaire
|
9301eda375c4046323265b37ba96d94c94bf8b11
|
[
"MIT"
] | null | null | null |
Q/questionnaire/views/services/views_services_test.py
|
ES-DOC/esdoc-questionnaire
|
9301eda375c4046323265b37ba96d94c94bf8b11
|
[
"MIT"
] | 477
|
2015-01-07T18:22:27.000Z
|
2017-07-17T15:05:48.000Z
|
Q/questionnaire/views/services/views_services_test.py
|
ES-DOC/esdoc-questionnaire
|
9301eda375c4046323265b37ba96d94c94bf8b11
|
[
"MIT"
] | null | null | null |
####################
# ES-DOC CIM Questionnaire
# Copyright (c) 2017 ES-DOC. All rights reserved.
#
# University of Colorado, Boulder
# http://cires.colorado.edu/
#
# This project is distributed according to the terms of the MIT license [http://www.opensource.org/licenses/MIT].
####################
from django.http import HttpResponseForbidden, JsonResponse
from Q.questionnaire.views.services.views_services_base import validate_request
# TEST_DATA = {
# "id": 2,
# "name": "model",
# "description": "",
# "version": "1.2.0",
# "owner": 1,
# "shared_owners": [
#
# ],
# "properties": [
# {
# "id": 7,
# "name": "name",
# "proxy": 1,
# "order": 1,
# "field_type": "ATOMIC",
# "cardinality": "1|1",
# "atomic_value": "my test",
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "63a63777-0267-4e02-8dce-3b4e2765643c",
# "is_multiple": False,
# "is_required": True,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 8,
# "name": "enumeration",
# "proxy": 2,
# "order": 2,
# "field_type": "ENUMERATION",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "one",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "e5e7bee7-ee7d-45c9-b172-2aa76cdc13ea",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 9,
# "name": "thing",
# "proxy": 3,
# "order": 3,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
# {
# "id": 4,
# "name": "recursive_thing",
# "description": "",
# "version": "1.2.0",
# "owner": None,
# "shared_owners": [
#
# ],
# "properties": [
# {
# "id": 14,
# "name": "name",
# "proxy": 4,
# "order": 1,
# "field_type": "ATOMIC",
# "cardinality": "1|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "cc724be8-0b26-49db-b23c-4b71169e4eaf",
# "is_multiple": False,
# "is_required": True,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 15,
# "name": "child",
# "proxy": 5,
# "order": 2,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|*",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
# {
# "id": 5,
# "name": "recursive_thing",
# "description": "",
# "version": "1.2.0",
# "owner": None,
# "shared_owners": [
#
# ],
# "properties": [
# {
# "id": 17,
# "name": "name",
# "proxy": 4,
# "order": 1,
# "field_type": "ATOMIC",
# "cardinality": "1|1",
# "atomic_value": "one",
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "e7c036fb-dc41-4820-9244-7a3d7775215e",
# "is_multiple": False,
# "is_required": True,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 18,
# "name": "child",
# "proxy": 5,
# "order": 2,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|*",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "3cce1fe4-96c3-4bab-b651-82855adc347d",
# "is_multiple": True,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 2,
# "name": "recursive_thing"
# }
# ],
# "display_detail": False
# },
# {
# "id": 19,
# "name": "multiple_targets",
# "proxy": 6,
# "order": 3,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "871ffc9c-25fb-463d-8dc0-be917e122ad6",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 3,
# "name": "other_thing_one"
# },
# {
# "pk": 4,
# "name": "other_thing_two"
# }
# ],
# "display_detail": False
# }
# ],
# "project": 1,
# "ontology": 1,
# "proxy": 2,
# "is_document": False,
# "is_root": False,
# "is_published": False,
# "is_active": True,
# "is_complete": False,
# "key": "69247bbf-c4db-40cb-9847-03bffedfb5a5",
# "display_detail": False,
# "display_properties": False
# },
# {
# "id": 6,
# "name": "recursive_thing",
# "description": "",
# "version": "1.2.0",
# "owner": None,
# "shared_owners": [
#
# ],
# "properties": [
# {
# "id": 20,
# "name": "name",
# "proxy": 4,
# "order": 1,
# "field_type": "ATOMIC",
# "cardinality": "1|1",
# "atomic_value": "two",
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "c1354d64-d846-46e7-bc33-7ec23b060b76",
# "is_multiple": False,
# "is_required": True,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 21,
# "name": "child",
# "proxy": 5,
# "order": 2,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|*",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "61915f58-3607-4c8e-9191-39bf3f9b0069",
# "is_multiple": True,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 2,
# "name": "recursive_thing"
# }
# ],
# "display_detail": False
# },
# {
# "id": 22,
# "name": "multiple_targets",
# "proxy": 6,
# "order": 3,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "b01eaed5-9b82-47a2-9cc7-31024298cc3b",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 3,
# "name": "other_thing_one"
# },
# {
# "pk": 4,
# "name": "other_thing_two"
# }
# ],
# "display_detail": False
# }
# ],
# "project": 1,
# "ontology": 1,
# "proxy": 2,
# "is_document": False,
# "is_root": False,
# "is_published": False,
# "is_active": True,
# "is_complete": False,
# "key": "300ade30-6c7f-4adc-b217-1ba3d8f8dbe8",
# "display_detail": False,
# "display_properties": False
# },
# {
# "id": 7,
# "name": "recursive_thing",
# "description": "",
# "version": "1.2.0",
# "owner": None,
# "shared_owners": [
#
# ],
# "properties": [
# {
# "id": 23,
# "name": "name",
# "proxy": 4,
# "order": 1,
# "field_type": "ATOMIC",
# "cardinality": "1|1",
# "atomic_value": "three",
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "e3947772-1522-4369-8f14-9e7fa59d8ac7",
# "is_multiple": False,
# "is_required": True,
# "possible_relationship_targets": [
#
# ],
# "display_detail": False
# },
# {
# "id": 24,
# "name": "child",
# "proxy": 5,
# "order": 2,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|*",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "004636e9-c029-4c4f-a060-49298f454150",
# "is_multiple": True,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 2,
# "name": "recursive_thing"
# }
# ],
# "display_detail": False
# },
# {
# "id": 25,
# "name": "multiple_targets",
# "proxy": 6,
# "order": 3,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "e7ce05a8-0958-4b76-b1f9-c60b7aff3ea7",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 3,
# "name": "other_thing_one"
# },
# {
# "pk": 4,
# "name": "other_thing_two"
# }
# ],
# "display_detail": False
# }
# ],
# "project": 1,
# "ontology": 1,
# "proxy": 2,
# "is_document": False,
# "is_root": False,
# "is_published": False,
# "is_active": True,
# "is_complete": False,
# "key": "b9f3c966-33de-4d00-b213-fedafae2ff71",
# "display_detail": False,
# "display_properties": False
# }
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "8226358a-e8f3-4675-948f-a8cc64a43d28",
# "is_multiple": True,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 2,
# "name": "recursive_thing"
# }
# ],
# "display_detail": False
# },
# {
# "id": 16,
# "name": "multiple_targets",
# "proxy": 6,
# "order": 3,
# "field_type": "RELATIONSHIP",
# "cardinality": "0|1",
# "atomic_value": None,
# "enumeration_value": "",
# "enumeration_other_value": None,
# "relationship_values": [
#
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": False,
# "key": "a414e8f4-a87f-4c9c-a4c2-bed0e58a2032",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 3,
# "name": "other_thing_one"
# },
# {
# "pk": 4,
# "name": "other_thing_two"
# }
# ],
# "display_detail": False
# }
# ],
# "project": 1,
# "ontology": 1,
# "proxy": 2,
# "is_document": False,
# "is_root": False,
# "is_published": False,
# "is_active": True,
# "is_complete": False,
# "key": "65122ec7-a65c-4909-85a1-3e5b1c4f737d",
# "display_detail": False,
# "display_properties": False
# }
# ],
# "is_nil": False,
# "nil_reason": "UNKNOWN",
# "is_complete": True,
# "key": "c8e937a1-19d4-41eb-b8b2-90f2651455fc",
# "is_multiple": False,
# "is_required": False,
# "possible_relationship_targets": [
# {
# "pk": 2,
# "name": "recursive_thing"
# }
# ],
# "display_detail": False
# }
# ],
# "project": 1,
# "ontology": 1,
# "proxy": 1,
# "is_document": True,
# "is_root": True,
# "is_published": True,
# "is_active": True,
# "is_complete": True,
# "key": "652bb84c-493b-415e-8b5e-11e7530b9894",
# "display_detail": False,
# "display_properties": False
# }
TEST_DATA = {
"id": None,
"version": "1.2.0",
"name": "top-level model",
"documentation": "some documentation",
"label": None,
"proxy": 1,
"proxy_type": "model",
"project": 1,
"owner": 1,
"shared_owners": [
],
"is_meta": True,
"is_root": True,
"is_active": True,
"is_published": False,
"is_complete": True,
"key": "652bb84c-493b-415e-8b5e-11e7530b9894",
"display_detail": False,
"display_properties": False,
"properties": [
],
"nodes": [
{
"id": None,
"version": "1.2.0",
"name": "atmosphere",
"documentation": "some documentation",
"label": None,
"proxy": 1,
"proxy_type": "realm",
"project": 1,
"owner": 1,
"shared_owners": [
],
"is_meta": True,
"is_root": False,
"is_active": True,
"is_published": False,
"is_complete": True,
"key": "652bb84c-493b-415e-8b5e-11e7530b9894",
"display_detail": False,
"display_properties": False,
"properties": [
],
"nodes": [
]
}
]
}
def q_services_test(request):
valid_request, msg = validate_request(request, valid_methods=["GET"])
if not valid_request:
return HttpResponseForbidden(msg)
return JsonResponse(TEST_DATA, safe=False)
| 47.236975
| 115
| 0.250338
| 1,288
| 28,106
| 5.222826
| 0.179348
| 0.029136
| 0.058867
| 0.055746
| 0.804816
| 0.798722
| 0.794857
| 0.77345
| 0.763639
| 0.739111
| 0
| 0.060649
| 0.648011
| 28,106
| 594
| 116
| 47.316498
| 0.619327
| 0.908489
| 0
| 0.666667
| 0
| 0
| 0.244267
| 0.035892
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0
| 0.035088
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
972c70220d26bba532f75bd7002e32ac81fff9a9
| 30,350
|
py
|
Python
|
fastreport_cloud_sdk/api/subscription_users_api.py
|
FastReports/FastReport-Cloud-Python
|
4442e19ef4c980222ede6d9e0597f564d6d85b26
|
[
"MIT"
] | null | null | null |
fastreport_cloud_sdk/api/subscription_users_api.py
|
FastReports/FastReport-Cloud-Python
|
4442e19ef4c980222ede6d9e0597f564d6d85b26
|
[
"MIT"
] | null | null | null |
fastreport_cloud_sdk/api/subscription_users_api.py
|
FastReports/FastReport-Cloud-Python
|
4442e19ef4c980222ede6d9e0597f564d6d85b26
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
FastReport Cloud
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from fastreport_cloud_sdk.api_client import ApiClient
from fastreport_cloud_sdk.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class SubscriptionUsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def subscription_users_add_user(self, subscription_id, user_id, **kwargs): # noqa: E501
"""Add a user to the subscription, the added users will be displayed in the list of users of the subscription, and these users will also have an active subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_add_user(subscription_id, user_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param user_id: Idenitifier of user (required)
:type user_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.subscription_users_add_user_with_http_info(subscription_id, user_id, **kwargs) # noqa: E501
def subscription_users_add_user_with_http_info(self, subscription_id, user_id, **kwargs): # noqa: E501
"""Add a user to the subscription, the added users will be displayed in the list of users of the subscription, and these users will also have an active subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_add_user_with_http_info(subscription_id, user_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param user_id: Idenitifier of user (required)
:type user_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'subscription_id',
'user_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method subscription_users_add_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `subscription_users_add_user`") # noqa: E501
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `subscription_users_add_user`") # noqa: E501
if self.api_client.client_side_validation and 'subscription_id' in local_var_params and not re.search(r'^[A-Fa-f0-9]{24}$', local_var_params['subscription_id']): # noqa: E501
raise ApiValueError("Invalid value for parameter `subscription_id` when calling `subscription_users_add_user`, must conform to the pattern `/^[A-Fa-f0-9]{24}$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey', 'JWT'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/api/manage/v1/Subscriptions/{subscriptionId}/users/{userId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def subscription_users_get_users(self, subscription_id, **kwargs): # noqa: E501
"""Returns all users of subscription # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_get_users(subscription_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param skip: How many entities skip
:type skip: int
:param take: How many entities take
:type take: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: SubscriptionUsersVM
"""
kwargs['_return_http_data_only'] = True
return self.subscription_users_get_users_with_http_info(subscription_id, **kwargs) # noqa: E501
def subscription_users_get_users_with_http_info(self, subscription_id, **kwargs): # noqa: E501
"""Returns all users of subscription # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_get_users_with_http_info(subscription_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param skip: How many entities skip
:type skip: int
:param take: How many entities take
:type take: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(SubscriptionUsersVM, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'subscription_id',
'skip',
'take'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method subscription_users_get_users" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `subscription_users_get_users`") # noqa: E501
if self.api_client.client_side_validation and 'subscription_id' in local_var_params and not re.search(r'^[A-Fa-f0-9]{24}$', local_var_params['subscription_id']): # noqa: E501
raise ApiValueError("Invalid value for parameter `subscription_id` when calling `subscription_users_get_users`, must conform to the pattern `/^[A-Fa-f0-9]{24}$/`") # noqa: E501
if self.api_client.client_side_validation and 'skip' in local_var_params and local_var_params['skip'] > 2147483647: # noqa: E501
raise ApiValueError("Invalid value for parameter `skip` when calling `subscription_users_get_users`, must be a value less than or equal to `2147483647`") # noqa: E501
if self.api_client.client_side_validation and 'skip' in local_var_params and local_var_params['skip'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `skip` when calling `subscription_users_get_users`, must be a value greater than or equal to `0`") # noqa: E501
if self.api_client.client_side_validation and 'take' in local_var_params and local_var_params['take'] > 120: # noqa: E501
raise ApiValueError("Invalid value for parameter `take` when calling `subscription_users_get_users`, must be a value less than or equal to `120`") # noqa: E501
if self.api_client.client_side_validation and 'take' in local_var_params and local_var_params['take'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `take` when calling `subscription_users_get_users`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
if 'skip' in local_var_params and local_var_params['skip'] is not None: # noqa: E501
query_params.append(('skip', local_var_params['skip'])) # noqa: E501
if 'take' in local_var_params and local_var_params['take'] is not None: # noqa: E501
query_params.append(('take', local_var_params['take'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey', 'JWT'] # noqa: E501
response_types_map = {
200: "SubscriptionUsersVM",
400: "ProblemDetails",
403: "ProblemDetails",
404: "ProblemDetails",
500: None,
}
return self.api_client.call_api(
'/api/manage/v1/Subscriptions/{subscriptionId}/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def subscription_users_leave_subscripiton(self, subscription_id, **kwargs): # noqa: E501
"""Allows user to leave subscription,. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_leave_subscripiton(subscription_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.subscription_users_leave_subscripiton_with_http_info(subscription_id, **kwargs) # noqa: E501
def subscription_users_leave_subscripiton_with_http_info(self, subscription_id, **kwargs): # noqa: E501
"""Allows user to leave subscription,. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_leave_subscripiton_with_http_info(subscription_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'subscription_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method subscription_users_leave_subscripiton" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `subscription_users_leave_subscripiton`") # noqa: E501
if self.api_client.client_side_validation and 'subscription_id' in local_var_params and not re.search(r'^[A-Fa-f0-9]{24}$', local_var_params['subscription_id']): # noqa: E501
raise ApiValueError("Invalid value for parameter `subscription_id` when calling `subscription_users_leave_subscripiton`, must conform to the pattern `/^[A-Fa-f0-9]{24}$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey', 'JWT'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/api/manage/v1/Subscriptions/{subscriptionId}/leave', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def subscription_users_remove_user(self, subscription_id, user_id, **kwargs): # noqa: E501
"""Delete a user from the subscription, the added users will be displayed in the list of users of the subscription, and these users will also have an active subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_remove_user(subscription_id, user_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param user_id: Idenitifier of user (required)
:type user_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.subscription_users_remove_user_with_http_info(subscription_id, user_id, **kwargs) # noqa: E501
def subscription_users_remove_user_with_http_info(self, subscription_id, user_id, **kwargs): # noqa: E501
"""Delete a user from the subscription, the added users will be displayed in the list of users of the subscription, and these users will also have an active subscription. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.subscription_users_remove_user_with_http_info(subscription_id, user_id, async_req=True)
>>> result = thread.get()
:param subscription_id: Idenitifier of subscription (required)
:type subscription_id: str
:param user_id: Idenitifier of user (required)
:type user_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'subscription_id',
'user_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method subscription_users_remove_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'subscription_id' is set
if self.api_client.client_side_validation and ('subscription_id' not in local_var_params or # noqa: E501
local_var_params['subscription_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `subscription_id` when calling `subscription_users_remove_user`") # noqa: E501
# verify the required parameter 'user_id' is set
if self.api_client.client_side_validation and ('user_id' not in local_var_params or # noqa: E501
local_var_params['user_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_id` when calling `subscription_users_remove_user`") # noqa: E501
if self.api_client.client_side_validation and 'subscription_id' in local_var_params and not re.search(r'^[A-Fa-f0-9]{24}$', local_var_params['subscription_id']): # noqa: E501
raise ApiValueError("Invalid value for parameter `subscription_id` when calling `subscription_users_remove_user`, must conform to the pattern `/^[A-Fa-f0-9]{24}$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'subscription_id' in local_var_params:
path_params['subscriptionId'] = local_var_params['subscription_id'] # noqa: E501
if 'user_id' in local_var_params:
path_params['userId'] = local_var_params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKey', 'JWT'] # noqa: E501
response_types_map = {}
return self.api_client.call_api(
'/api/manage/v1/Subscriptions/{subscriptionId}/users/{userId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 48.951613
| 198
| 0.62
| 3,487
| 30,350
| 5.133639
| 0.067967
| 0.040221
| 0.064131
| 0.024133
| 0.948886
| 0.945869
| 0.945869
| 0.94397
| 0.937769
| 0.930283
| 0
| 0.015713
| 0.308007
| 30,350
| 619
| 199
| 49.030695
| 0.836635
| 0.432751
| 0
| 0.697509
| 1
| 0.02847
| 0.248094
| 0.074356
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032028
| false
| 0
| 0.017794
| 0
| 0.081851
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
974d2b89921f3c5380c5784fc26c261c256543f3
| 2,480
|
py
|
Python
|
main/classifiers.py
|
dtdannen/pocket-sai
|
21a956811f12a71c6e739a40ab392cbcc16e12a9
|
[
"MIT"
] | null | null | null |
main/classifiers.py
|
dtdannen/pocket-sai
|
21a956811f12a71c6e739a40ab392cbcc16e12a9
|
[
"MIT"
] | null | null | null |
main/classifiers.py
|
dtdannen/pocket-sai
|
21a956811f12a71c6e739a40ab392cbcc16e12a9
|
[
"MIT"
] | null | null | null |
'''
Created on Jan 2, 2016
@author: Dustin
'''
def save_all_interesections_as_white_stones(img):
'''
After the intersections have been properly identified, this will crop the image into
361 individual images, one for each intersection.
'''
w, h = 18,18
count = 0
global SAVE_NEG_IMAGES, INTERSECTIONS
if SAVE_WHITE_STONE_IMAGES:
if len(INTERSECTIONS) == pow(BOARD_SIZE,2):
for inter in INTERSECTIONS:
x1 = inter[0] - (w / 2)
y1 = inter[1] - (h / 2)
x2 = inter[0] + (w / 2)
y2 = inter[1] + (h / 2)
cv2.imwrite(POS_WHITE_TRAINING_IMAGES_DIR+"white_"+str(count)+".jpg", crop(img,x1,y1,x2,y2))
count+=1
print("Just produced "+ str(count)+" white stone images")
SAVE_WHITE_STONE_IMAGES = False
def save_all_intersections_as_neg_images(img):
'''
After the intersections have been properly identified, this will crop the image into
361 individual images, one for each intersection.
'''
w, h = 18,18
count = 0
global SAVE_NEG_IMAGES, INTERSECTIONS
if SAVE_NEG_IMAGES:
if len(INTERSECTIONS) == pow(BOARD_SIZE,2):
for inter in INTERSECTIONS:
x1 = inter[0] - (w / 2)
y1 = inter[1] - (h / 2)
x2 = inter[0] + (w / 2)
y2 = inter[1] + (h / 2)
cv2.imwrite(NEG_TRAINING_IMAGES_DIR+"neg_"+str(count)+".jpg", crop(img,x1,y1,x2,y2))
count+=1
print("Just produced "+ str(count)+" negative images")
SAVE_NEG_IMAGES = False
def save_all_intersections_as_white_stones(img):
'''
After the intersections have been properly identified, this will crop the image into
361 individual images, one for each intersection.
'''
w, h = 14,14
count = 0
global SAVE_WHITE_STONE_IMAGES, INTERSECTIONS
if SAVE_WHITE_STONE_IMAGES:
if len(INTERSECTIONS) == pow(BOARD_SIZE,2):
for inter in INTERSECTIONS:
x1 = inter[0] - (w / 2)
y1 = inter[1] - (h / 2)
x2 = inter[0] + (w / 2)
y2 = inter[1] + (h / 2)
cv2.imwrite(POS_WHITE_TRAINING_IMAGES_DIR+"white_"+str(count)+".jpg", crop(img,x1,y1,x2,y2))
count+=1
print("Just produced "+ str(count)+" white stone images")
SAVE_WHITE_STONE_IMAGES = False
| 36.470588
| 108
| 0.570161
| 325
| 2,480
| 4.181538
| 0.206154
| 0.051508
| 0.082414
| 0.03532
| 0.897719
| 0.897719
| 0.897719
| 0.854305
| 0.854305
| 0.854305
| 0
| 0.050978
| 0.319758
| 2,480
| 67
| 109
| 37.014925
| 0.754594
| 0.179032
| 0
| 0.8
| 0
| 0
| 0.062912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0
| 0
| 0.066667
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9753c825a31e54f2419c7b4b0ffd5fb33e58b7dc
| 25,097
|
py
|
Python
|
sdk/python/pulumi_keycloak/realm_events.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2020-04-28T15:20:56.000Z
|
2022-03-24T18:00:17.000Z
|
sdk/python/pulumi_keycloak/realm_events.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 49
|
2020-02-06T17:53:35.000Z
|
2022-03-25T19:36:08.000Z
|
sdk/python/pulumi_keycloak/realm_events.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-06-09T01:08:56.000Z
|
2021-12-07T15:30:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['RealmEventsArgs', 'RealmEvents']
@pulumi.input_type
class RealmEventsArgs:
def __init__(__self__, *,
realm_id: pulumi.Input[str],
admin_events_details_enabled: Optional[pulumi.Input[bool]] = None,
admin_events_enabled: Optional[pulumi.Input[bool]] = None,
enabled_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
events_enabled: Optional[pulumi.Input[bool]] = None,
events_expiration: Optional[pulumi.Input[int]] = None,
events_listeners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a RealmEvents resource.
:param pulumi.Input[str] realm_id: The name of the realm the event settings apply to.
:param pulumi.Input[bool] admin_events_details_enabled: When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
:param pulumi.Input[bool] admin_events_enabled: When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_event_types: The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
:param pulumi.Input[bool] events_enabled: When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[int] events_expiration: The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_listeners: The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
"""
pulumi.set(__self__, "realm_id", realm_id)
if admin_events_details_enabled is not None:
pulumi.set(__self__, "admin_events_details_enabled", admin_events_details_enabled)
if admin_events_enabled is not None:
pulumi.set(__self__, "admin_events_enabled", admin_events_enabled)
if enabled_event_types is not None:
pulumi.set(__self__, "enabled_event_types", enabled_event_types)
if events_enabled is not None:
pulumi.set(__self__, "events_enabled", events_enabled)
if events_expiration is not None:
pulumi.set(__self__, "events_expiration", events_expiration)
if events_listeners is not None:
pulumi.set(__self__, "events_listeners", events_listeners)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Input[str]:
"""
The name of the realm the event settings apply to.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: pulumi.Input[str]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter(name="adminEventsDetailsEnabled")
def admin_events_details_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_details_enabled")
@admin_events_details_enabled.setter
def admin_events_details_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_events_details_enabled", value)
@property
@pulumi.getter(name="adminEventsEnabled")
def admin_events_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_enabled")
@admin_events_enabled.setter
def admin_events_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_events_enabled", value)
@property
@pulumi.getter(name="enabledEventTypes")
def enabled_event_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
"""
return pulumi.get(self, "enabled_event_types")
@enabled_event_types.setter
def enabled_event_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_event_types", value)
@property
@pulumi.getter(name="eventsEnabled")
def events_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "events_enabled")
@events_enabled.setter
def events_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "events_enabled", value)
@property
@pulumi.getter(name="eventsExpiration")
def events_expiration(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
"""
return pulumi.get(self, "events_expiration")
@events_expiration.setter
def events_expiration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "events_expiration", value)
@property
@pulumi.getter(name="eventsListeners")
def events_listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
"""
return pulumi.get(self, "events_listeners")
@events_listeners.setter
def events_listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "events_listeners", value)
@pulumi.input_type
class _RealmEventsState:
def __init__(__self__, *,
admin_events_details_enabled: Optional[pulumi.Input[bool]] = None,
admin_events_enabled: Optional[pulumi.Input[bool]] = None,
enabled_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
events_enabled: Optional[pulumi.Input[bool]] = None,
events_expiration: Optional[pulumi.Input[int]] = None,
events_listeners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
realm_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RealmEvents resources.
:param pulumi.Input[bool] admin_events_details_enabled: When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
:param pulumi.Input[bool] admin_events_enabled: When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_event_types: The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
:param pulumi.Input[bool] events_enabled: When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[int] events_expiration: The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_listeners: The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
:param pulumi.Input[str] realm_id: The name of the realm the event settings apply to.
"""
if admin_events_details_enabled is not None:
pulumi.set(__self__, "admin_events_details_enabled", admin_events_details_enabled)
if admin_events_enabled is not None:
pulumi.set(__self__, "admin_events_enabled", admin_events_enabled)
if enabled_event_types is not None:
pulumi.set(__self__, "enabled_event_types", enabled_event_types)
if events_enabled is not None:
pulumi.set(__self__, "events_enabled", events_enabled)
if events_expiration is not None:
pulumi.set(__self__, "events_expiration", events_expiration)
if events_listeners is not None:
pulumi.set(__self__, "events_listeners", events_listeners)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
@property
@pulumi.getter(name="adminEventsDetailsEnabled")
def admin_events_details_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_details_enabled")
@admin_events_details_enabled.setter
def admin_events_details_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_events_details_enabled", value)
@property
@pulumi.getter(name="adminEventsEnabled")
def admin_events_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_enabled")
@admin_events_enabled.setter
def admin_events_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "admin_events_enabled", value)
@property
@pulumi.getter(name="enabledEventTypes")
def enabled_event_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
"""
return pulumi.get(self, "enabled_event_types")
@enabled_event_types.setter
def enabled_event_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_event_types", value)
@property
@pulumi.getter(name="eventsEnabled")
def events_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "events_enabled")
@events_enabled.setter
def events_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "events_enabled", value)
@property
@pulumi.getter(name="eventsExpiration")
def events_expiration(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
"""
return pulumi.get(self, "events_expiration")
@events_expiration.setter
def events_expiration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "events_expiration", value)
@property
@pulumi.getter(name="eventsListeners")
def events_listeners(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
"""
return pulumi.get(self, "events_listeners")
@events_listeners.setter
def events_listeners(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "events_listeners", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> Optional[pulumi.Input[str]]:
"""
The name of the realm the event settings apply to.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "realm_id", value)
class RealmEvents(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_events_details_enabled: Optional[pulumi.Input[bool]] = None,
admin_events_enabled: Optional[pulumi.Input[bool]] = None,
enabled_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
events_enabled: Optional[pulumi.Input[bool]] = None,
events_expiration: Optional[pulumi.Input[int]] = None,
events_listeners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows for managing Realm Events settings within Keycloak.
## Example Usage
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
realm_events = keycloak.RealmEvents("realmEvents",
realm_id=realm.id,
events_enabled=True,
events_expiration=3600,
admin_events_enabled=True,
admin_events_details_enabled=True,
enabled_event_types=[
"LOGIN",
"LOGOUT",
],
events_listeners=["jboss-logging"])
```
## Import
This resource currently does not support importing.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_events_details_enabled: When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
:param pulumi.Input[bool] admin_events_enabled: When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_event_types: The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
:param pulumi.Input[bool] events_enabled: When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[int] events_expiration: The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_listeners: The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
:param pulumi.Input[str] realm_id: The name of the realm the event settings apply to.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RealmEventsArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows for managing Realm Events settings within Keycloak.
## Example Usage
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
realm_events = keycloak.RealmEvents("realmEvents",
realm_id=realm.id,
events_enabled=True,
events_expiration=3600,
admin_events_enabled=True,
admin_events_details_enabled=True,
enabled_event_types=[
"LOGIN",
"LOGOUT",
],
events_listeners=["jboss-logging"])
```
## Import
This resource currently does not support importing.
:param str resource_name: The name of the resource.
:param RealmEventsArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RealmEventsArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_events_details_enabled: Optional[pulumi.Input[bool]] = None,
admin_events_enabled: Optional[pulumi.Input[bool]] = None,
enabled_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
events_enabled: Optional[pulumi.Input[bool]] = None,
events_expiration: Optional[pulumi.Input[int]] = None,
events_listeners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RealmEventsArgs.__new__(RealmEventsArgs)
__props__.__dict__["admin_events_details_enabled"] = admin_events_details_enabled
__props__.__dict__["admin_events_enabled"] = admin_events_enabled
__props__.__dict__["enabled_event_types"] = enabled_event_types
__props__.__dict__["events_enabled"] = events_enabled
__props__.__dict__["events_expiration"] = events_expiration
__props__.__dict__["events_listeners"] = events_listeners
if realm_id is None and not opts.urn:
raise TypeError("Missing required property 'realm_id'")
__props__.__dict__["realm_id"] = realm_id
super(RealmEvents, __self__).__init__(
'keycloak:index/realmEvents:RealmEvents',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
admin_events_details_enabled: Optional[pulumi.Input[bool]] = None,
admin_events_enabled: Optional[pulumi.Input[bool]] = None,
enabled_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
events_enabled: Optional[pulumi.Input[bool]] = None,
events_expiration: Optional[pulumi.Input[int]] = None,
events_listeners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
realm_id: Optional[pulumi.Input[str]] = None) -> 'RealmEvents':
"""
Get an existing RealmEvents resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] admin_events_details_enabled: When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
:param pulumi.Input[bool] admin_events_enabled: When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_event_types: The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
:param pulumi.Input[bool] events_enabled: When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
:param pulumi.Input[int] events_expiration: The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
:param pulumi.Input[Sequence[pulumi.Input[str]]] events_listeners: The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
:param pulumi.Input[str] realm_id: The name of the realm the event settings apply to.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RealmEventsState.__new__(_RealmEventsState)
__props__.__dict__["admin_events_details_enabled"] = admin_events_details_enabled
__props__.__dict__["admin_events_enabled"] = admin_events_enabled
__props__.__dict__["enabled_event_types"] = enabled_event_types
__props__.__dict__["events_enabled"] = events_enabled
__props__.__dict__["events_expiration"] = events_expiration
__props__.__dict__["events_listeners"] = events_listeners
__props__.__dict__["realm_id"] = realm_id
return RealmEvents(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="adminEventsDetailsEnabled")
def admin_events_details_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, saved admin events will included detailed information for create/update requests. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_details_enabled")
@property
@pulumi.getter(name="adminEventsEnabled")
def admin_events_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, admin events are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "admin_events_enabled")
@property
@pulumi.getter(name="enabledEventTypes")
def enabled_event_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The event types that will be saved to the database. Omitting this field enables all event types. Defaults to `[]` or all event types.
"""
return pulumi.get(self, "enabled_event_types")
@property
@pulumi.getter(name="eventsEnabled")
def events_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
When `true`, events from `enabled_event_types` are saved to the database, making them available through the admin console. Defaults to `false`.
"""
return pulumi.get(self, "events_enabled")
@property
@pulumi.getter(name="eventsExpiration")
def events_expiration(self) -> pulumi.Output[Optional[int]]:
"""
The amount of time in seconds events will be saved in the database. Defaults to `0` or never.
"""
return pulumi.get(self, "events_expiration")
@property
@pulumi.getter(name="eventsListeners")
def events_listeners(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The event listeners that events should be sent to. Defaults to `[]` or none. Note that new realms enable the `jboss-logging` listener by default, and this resource will remove that unless it is specified.
"""
return pulumi.get(self, "events_listeners")
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Output[str]:
"""
The name of the realm the event settings apply to.
"""
return pulumi.get(self, "realm_id")
| 51.960663
| 279
| 0.678288
| 3,063
| 25,097
| 5.330722
| 0.064643
| 0.081516
| 0.069819
| 0.050527
| 0.897415
| 0.887678
| 0.884432
| 0.876286
| 0.867161
| 0.846093
| 0
| 0.000824
| 0.225923
| 25,097
| 482
| 280
| 52.068465
| 0.839656
| 0.384747
| 0
| 0.781609
| 1
| 0
| 0.119589
| 0.025692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16092
| false
| 0.003831
| 0.019157
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97c1903496b8ed1b7c131a4942173d4c3198f44a
| 138
|
py
|
Python
|
numba/experimental/jitclass/__init__.py
|
auderson/numba
|
3d67c9850ab56457f418cf40af6245fd9c337705
|
[
"BSD-2-Clause"
] | 6,620
|
2015-01-04T08:51:04.000Z
|
2022-03-31T12:52:18.000Z
|
numba/experimental/jitclass/__init__.py
|
auderson/numba
|
3d67c9850ab56457f418cf40af6245fd9c337705
|
[
"BSD-2-Clause"
] | 6,457
|
2015-01-04T03:18:41.000Z
|
2022-03-31T17:38:42.000Z
|
numba/experimental/jitclass/__init__.py
|
auderson/numba
|
3d67c9850ab56457f418cf40af6245fd9c337705
|
[
"BSD-2-Clause"
] | 930
|
2015-01-25T02:33:03.000Z
|
2022-03-30T14:10:32.000Z
|
from numba.experimental.jitclass.decorators import jitclass
from numba.experimental.jitclass import boxing # Has import-time side effect
| 46
| 77
| 0.847826
| 18
| 138
| 6.5
| 0.611111
| 0.153846
| 0.358974
| 0.495727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101449
| 138
| 2
| 78
| 69
| 0.943548
| 0.195652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
97c364f8798a2f6a303a076d4a6d8de9863be61a
| 151
|
py
|
Python
|
malemba/__init__.py
|
loven-doo/MaLeMBa
|
739318e4b54057940f53a3fcb1c85f72bb0e4f3b
|
[
"BSD-3-Clause"
] | null | null | null |
malemba/__init__.py
|
loven-doo/MaLeMBa
|
739318e4b54057940f53a3fcb1c85f72bb0e4f3b
|
[
"BSD-3-Clause"
] | null | null | null |
malemba/__init__.py
|
loven-doo/MaLeMBa
|
739318e4b54057940f53a3fcb1c85f72bb0e4f3b
|
[
"BSD-3-Clause"
] | null | null | null |
from malemba.model_base import ModelBase, ArrayModelBase
from malemba.regressor_base import RegressorBase
from malemba.model_scheme import ModelScheme
| 37.75
| 56
| 0.887417
| 19
| 151
| 6.894737
| 0.578947
| 0.251908
| 0.244275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086093
| 151
| 3
| 57
| 50.333333
| 0.949275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1035c1515476d1ad795d5952ddf4ff66f581806
| 10,584
|
py
|
Python
|
hashicorp_vault_client/test/test_identity_api.py
|
drewmullen/HAC
|
fb185804fd244366f8f8d01df22835b3d96e7512
|
[
"Apache-2.0"
] | null | null | null |
hashicorp_vault_client/test/test_identity_api.py
|
drewmullen/HAC
|
fb185804fd244366f8f8d01df22835b3d96e7512
|
[
"Apache-2.0"
] | 2
|
2019-09-30T20:56:41.000Z
|
2019-10-02T00:22:07.000Z
|
hashicorp_vault_client/test/test_identity_api.py
|
drewmullen/HAC
|
fb185804fd244366f8f8d01df22835b3d96e7512
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
HashiCorp Vault API
HTTP API that gives you full access to Vault. All API routes are prefixed with `/v1/`. # noqa: E501
OpenAPI spec version: 1.2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import hashicorp_vault_client
from api.identity_api import IdentityApi # noqa: E501
from hashicorp_vault_client.rest import ApiException
class TestIdentityApi(unittest.TestCase):
"""IdentityApi unit test stubs"""
def setUp(self):
self.api = api.identity_api.IdentityApi() # noqa: E501
def tearDown(self):
pass
def test_delete_identity_alias_id_id(self):
"""Test case for delete_identity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_delete_identity_entity_alias_id_id(self):
"""Test case for delete_identity_entity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_delete_identity_entity_id_id(self):
"""Test case for delete_identity_entity_id_id
Update, read or delete an entity using entity ID # noqa: E501
"""
pass
def test_delete_identity_entity_name_name(self):
"""Test case for delete_identity_entity_name_name
Update, read or delete an entity using entity name # noqa: E501
"""
pass
def test_delete_identity_group_alias_id_id(self):
"""Test case for delete_identity_group_alias_id_id
"""
pass
def test_delete_identity_group_id_id(self):
"""Test case for delete_identity_group_id_id
Update or delete an existing group using its ID. # noqa: E501
"""
pass
def test_delete_identity_group_name_name(self):
"""Test case for delete_identity_group_name_name
"""
pass
def test_delete_identity_oidc_key_name(self):
"""Test case for delete_identity_oidc_key_name
CRUD operations for OIDC keys. # noqa: E501
"""
pass
def test_delete_identity_oidc_role_name(self):
"""Test case for delete_identity_oidc_role_name
CRUD operations on OIDC Roles # noqa: E501
"""
pass
def test_delete_identity_persona_id_id(self):
"""Test case for delete_identity_persona_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_get_identity_alias_id(self):
"""Test case for get_identity_alias_id
List all the alias IDs. # noqa: E501
"""
pass
def test_get_identity_alias_id_id(self):
"""Test case for get_identity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_get_identity_entity_alias_id(self):
"""Test case for get_identity_entity_alias_id
List all the alias IDs. # noqa: E501
"""
pass
def test_get_identity_entity_alias_id_id(self):
"""Test case for get_identity_entity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_get_identity_entity_id(self):
"""Test case for get_identity_entity_id
List all the entity IDs # noqa: E501
"""
pass
def test_get_identity_entity_id_id(self):
"""Test case for get_identity_entity_id_id
Update, read or delete an entity using entity ID # noqa: E501
"""
pass
def test_get_identity_entity_name(self):
"""Test case for get_identity_entity_name
List all the entity names # noqa: E501
"""
pass
def test_get_identity_entity_name_name(self):
"""Test case for get_identity_entity_name_name
Update, read or delete an entity using entity name # noqa: E501
"""
pass
def test_get_identity_group_alias_id(self):
"""Test case for get_identity_group_alias_id
List all the group alias IDs. # noqa: E501
"""
pass
def test_get_identity_group_alias_id_id(self):
"""Test case for get_identity_group_alias_id_id
"""
pass
def test_get_identity_group_id(self):
"""Test case for get_identity_group_id
List all the group IDs. # noqa: E501
"""
pass
def test_get_identity_group_id_id(self):
"""Test case for get_identity_group_id_id
Update or delete an existing group using its ID. # noqa: E501
"""
pass
def test_get_identity_group_name(self):
"""Test case for get_identity_group_name
"""
pass
def test_get_identity_group_name_name(self):
"""Test case for get_identity_group_name_name
"""
pass
def test_get_identity_oidc_config(self):
"""Test case for get_identity_oidc_config
OIDC configuration # noqa: E501
"""
pass
def test_get_identity_oidc_key(self):
"""Test case for get_identity_oidc_key
List OIDC keys # noqa: E501
"""
pass
def test_get_identity_oidc_key_name(self):
"""Test case for get_identity_oidc_key_name
CRUD operations for OIDC keys. # noqa: E501
"""
pass
def test_get_identity_oidc_role(self):
"""Test case for get_identity_oidc_role
List configured OIDC roles # noqa: E501
"""
pass
def test_get_identity_oidc_role_name(self):
"""Test case for get_identity_oidc_role_name
CRUD operations on OIDC Roles # noqa: E501
"""
pass
def test_get_identity_oidc_token_name(self):
"""Test case for get_identity_oidc_token_name
Generate an OIDC token # noqa: E501
"""
pass
def test_get_identity_oidc_well_known_keys(self):
"""Test case for get_identity_oidc_well_known_keys
Retrieve public keys # noqa: E501
"""
pass
def test_get_identity_oidc_well_known_openid_configuration(self):
"""Test case for get_identity_oidc_well_known_openid_configuration
Query OIDC configurations # noqa: E501
"""
pass
def test_get_identity_persona_id(self):
"""Test case for get_identity_persona_id
List all the alias IDs. # noqa: E501
"""
pass
def test_get_identity_persona_id_id(self):
"""Test case for get_identity_persona_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_post_identity_alias(self):
"""Test case for post_identity_alias
Create a new alias. # noqa: E501
"""
pass
def test_post_identity_alias_id_id(self):
"""Test case for post_identity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_post_identity_entity(self):
"""Test case for post_identity_entity
Create a new entity # noqa: E501
"""
pass
def test_post_identity_entity_alias(self):
"""Test case for post_identity_entity_alias
Create a new alias. # noqa: E501
"""
pass
def test_post_identity_entity_alias_id_id(self):
"""Test case for post_identity_entity_alias_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
def test_post_identity_entity_id_id(self):
"""Test case for post_identity_entity_id_id
Update, read or delete an entity using entity ID # noqa: E501
"""
pass
def test_post_identity_entity_merge(self):
"""Test case for post_identity_entity_merge
Merge two or more entities together # noqa: E501
"""
pass
def test_post_identity_entity_name_name(self):
"""Test case for post_identity_entity_name_name
Update, read or delete an entity using entity name # noqa: E501
"""
pass
def test_post_identity_group(self):
"""Test case for post_identity_group
Create a new group. # noqa: E501
"""
pass
def test_post_identity_group_alias(self):
"""Test case for post_identity_group_alias
Creates a new group alias, or updates an existing one. # noqa: E501
"""
pass
def test_post_identity_group_alias_id_id(self):
"""Test case for post_identity_group_alias_id_id
"""
pass
def test_post_identity_group_id_id(self):
"""Test case for post_identity_group_id_id
Update or delete an existing group using its ID. # noqa: E501
"""
pass
def test_post_identity_group_name_name(self):
"""Test case for post_identity_group_name_name
"""
pass
def test_post_identity_lookup_entity(self):
"""Test case for post_identity_lookup_entity
Query entities based on various properties. # noqa: E501
"""
pass
def test_post_identity_lookup_group(self):
"""Test case for post_identity_lookup_group
Query groups based on various properties. # noqa: E501
"""
pass
def test_post_identity_oidc_config(self):
"""Test case for post_identity_oidc_config
OIDC configuration # noqa: E501
"""
pass
def test_post_identity_oidc_introspect(self):
"""Test case for post_identity_oidc_introspect
Verify the authenticity of an OIDC token # noqa: E501
"""
pass
def test_post_identity_oidc_key_name(self):
"""Test case for post_identity_oidc_key_name
CRUD operations for OIDC keys. # noqa: E501
"""
pass
def test_post_identity_oidc_key_name_rotate(self):
"""Test case for post_identity_oidc_key_name_rotate
Rotate a named OIDC key. # noqa: E501
"""
pass
def test_post_identity_oidc_role_name(self):
"""Test case for post_identity_oidc_role_name
CRUD operations on OIDC Roles # noqa: E501
"""
pass
def test_post_identity_persona(self):
"""Test case for post_identity_persona
Create a new alias. # noqa: E501
"""
pass
def test_post_identity_persona_id_id(self):
"""Test case for post_identity_persona_id_id
Update, read or delete an alias ID. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 25.260143
| 104
| 0.63511
| 1,407
| 10,584
| 4.425729
| 0.086709
| 0.062952
| 0.098924
| 0.134896
| 0.872812
| 0.863016
| 0.852899
| 0.77726
| 0.642685
| 0.450297
| 0
| 0.021657
| 0.297619
| 10,584
| 418
| 105
| 25.320574
| 0.815981
| 0.478269
| 0
| 0.459677
| 0
| 0
| 0.001829
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.467742
| false
| 0.459677
| 0.040323
| 0
| 0.516129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
c1115eb980a0aaa6456ae51299d49cc561c02c09
| 74
|
py
|
Python
|
examples/underscored/trivial_function.py
|
doboy/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 7
|
2016-09-23T00:44:05.000Z
|
2021-10-04T21:19:12.000Z
|
examples/underscored/trivial_function.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 1
|
2016-09-23T00:45:05.000Z
|
2019-02-16T19:05:37.000Z
|
examples/underscored/trivial_function.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 3
|
2016-09-23T01:13:15.000Z
|
2018-07-20T21:22:17.000Z
|
# def bar():
# pass
#
# bar()
def _():
pass
_()
(bar,) = (_,)
| 7.4
| 13
| 0.351351
| 7
| 74
| 3.285714
| 0.428571
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.364865
| 74
| 9
| 14
| 8.222222
| 0.489362
| 0.364865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c1593c1fb4351548a1237aebdb10d87828564149
| 1,089
|
py
|
Python
|
__init__.py
|
smurfix/util-py
|
35dc9fc858c27bc68b51124f1513506db98fde6f
|
[
"MIT"
] | null | null | null |
__init__.py
|
smurfix/util-py
|
35dc9fc858c27bc68b51124f1513506db98fde6f
|
[
"MIT"
] | null | null | null |
__init__.py
|
smurfix/util-py
|
35dc9fc858c27bc68b51124f1513506db98fde6f
|
[
"MIT"
] | null | null | null |
# TODO split this up
from ._impl import * # noqa: F401,F403 # isort:skip
from ._dict import * # noqa: F401,F403 # isort:skip
try:
from ._event import * # noqa: F401,F403
except ImportError:
pass
try:
from ._ctx import * # noqa: F401,F403
except ImportError:
pass
try:
from ._queue import * # noqa: F401,F403
except ImportError:
pass
try:
from ._msgpack import * # noqa: F401,F403
except ImportError:
pass
try:
from ._module import * # noqa: F401,F403
except ImportError:
pass
try:
from ._msg import * # noqa: F401,F403
except ImportError:
pass
try:
from ._path import * # noqa: F401,F403
except ImportError:
pass
try:
from ._server import * # noqa: F401,F403
except ImportError:
pass
try:
from ._spawn import * # noqa: F401,F403
except ImportError:
pass
try:
from ._systemd import * # noqa: F401,F403
except ImportError:
pass
try:
from ._yaml import * # noqa: F401,F403
except ImportError:
pass
try:
from ._main import * # noqa: F401,F403
except ImportError:
pass
| 16.753846
| 53
| 0.651974
| 140
| 1,089
| 4.971429
| 0.207143
| 0.201149
| 0.281609
| 0.362069
| 0.860632
| 0.860632
| 0.783046
| 0.727011
| 0.727011
| 0
| 0
| 0.103832
| 0.257117
| 1,089
| 64
| 54
| 17.015625
| 0.756489
| 0.246097
| 0
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0
| 1
| 0
| true
| 0.24
| 0.52
| 0
| 0.52
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 13
|
c17b4f466f6941193aed3e547747ba1a73ccd635
| 933
|
py
|
Python
|
PerceptualMetric/psrc/perceptree/lib/res2net/__init__.py
|
PolasekT/ICTree
|
d13ad603101805bcc288411504ecffd6f2e1f365
|
[
"MIT"
] | 3
|
2021-12-09T22:37:03.000Z
|
2022-02-16T13:40:44.000Z
|
PerceptualMetric/psrc/perceptree/lib/res2net/__init__.py
|
PolasekT/ICTree
|
d13ad603101805bcc288411504ecffd6f2e1f365
|
[
"MIT"
] | null | null | null |
PerceptualMetric/psrc/perceptree/lib/res2net/__init__.py
|
PolasekT/ICTree
|
d13ad603101805bcc288411504ecffd6f2e1f365
|
[
"MIT"
] | 3
|
2021-12-09T22:37:08.000Z
|
2022-02-03T14:38:39.000Z
|
# -*- coding: utf-8 -*-
from perceptree.lib.res2net.res2net import res2net50
from perceptree.lib.res2net.res2net import res2net50_48w_2s
from perceptree.lib.res2net.res2net import res2net50_26w_4s
from perceptree.lib.res2net.res2net import res2net50_26w_6s
from perceptree.lib.res2net.res2net import res2net50_14w_8s
from perceptree.lib.res2net.res2net import res2net50_26w_8s
from perceptree.lib.res2net.res2next import res2next50
from perceptree.lib.res2net.dla import res2net_dla60
from perceptree.lib.res2net.dla import res2next_dla60
from perceptree.lib.res2net.res2net_v1b import res2net18_v1b
from perceptree.lib.res2net.res2net_v1b import res2net50_v1b
from perceptree.lib.res2net.res2net_v1b import res2net101_v1b
from perceptree.lib.res2net.res2net_v1b import res2net50_v1b_26w_4s
from perceptree.lib.res2net.res2net_v1b import res2net101_v1b_26w_4s
from perceptree.lib.res2net.res2net_v1b import res2net152_v1b_26w_4s
| 44.428571
| 68
| 0.868167
| 138
| 933
| 5.652174
| 0.173913
| 0.269231
| 0.326923
| 0.461538
| 0.889744
| 0.841026
| 0.75641
| 0.525641
| 0.337179
| 0.260256
| 0
| 0.130787
| 0.073955
| 933
| 20
| 69
| 46.65
| 0.771991
| 0.022508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
a9ea9280d8c505c0688044d161bf02a4f1f89f3f
| 9,777
|
py
|
Python
|
utils/dataset.py
|
ivanwhaf/noise-validation
|
7f4adfd7577a5cef40a57f5fd4e0716de0d3bb09
|
[
"MIT"
] | 1
|
2021-11-15T11:05:39.000Z
|
2021-11-15T11:05:39.000Z
|
utils/dataset.py
|
ivanwhaf/noise-validation
|
7f4adfd7577a5cef40a57f5fd4e0716de0d3bb09
|
[
"MIT"
] | null | null | null |
utils/dataset.py
|
ivanwhaf/noise-validation
|
7f4adfd7577a5cef40a57f5fd4e0716de0d3bb09
|
[
"MIT"
] | null | null | null |
"""
2021/5/8
Noisy dataset: MNIST, CIFAR10
"""
import matplotlib.pyplot as plt
import numpy as np
from torch.utils.data import DataLoader, Dataset
from torchvision import datasets, transforms, utils
# np.random.seed(0)
class MNISTNoisy(Dataset):
def __init__(self, root, train, transform, download, noise_type='symmetric', noise_rate=0.2):
self.base_dataset = datasets.MNIST(root, train, transform, download=download)
self.noise_type = noise_type
self.noise_rate = noise_rate
self.clean_sample_idx = []
self.noisy_sample_idx = []
# add label noise
if self.noise_type == 'symmetric':
self.uniform(noise_rate, 10)
elif self.noise_type == 'asymmetric':
self.flip(noise_rate, 10)
def uniform(self, noise_rate: float, num_classes: int):
"""Add symmetric noise"""
# noise transition matrix
ntm = noise_rate * np.full((num_classes, num_classes), 1 / (num_classes - 1))
np.fill_diagonal(ntm, 1 - noise_rate)
sample_indices = np.arange(len(self.base_dataset))
# np.random.shuffle(indices)
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]]) # new label
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Symmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def flip(self, noise_rate: float, num_classes: int):
"""Add asymmetric noise"""
# noise transition matrix
ntm = np.eye(num_classes) * (1 - noise_rate)
d = {7: 1, 2: 7, 5: 6, 6: 5, 3: 8} # 7->1, 2->7, 5->6, 6->5, 3->8
for raw_class, new_class in d.items():
ntm[raw_class][new_class] = noise_rate
for i in [0, 1, 4, 8, 9]:
ntm[i][i] = 1
sample_indices = np.arange(len(self.base_dataset))
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]])
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Asymmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def __len__(self):
return self.base_dataset.__len__()
def __getitem__(self, index):
return self.base_dataset.__getitem__(index)
class CIFAR10Noisy(Dataset):
def __init__(self, root, train, transform, download, noise_type='symmetric', noise_rate=0.2, need_idx=True):
self.base_dataset = datasets.CIFAR10(root, train, transform, download=download)
self.noise_type = noise_type
self.noise_rate = noise_rate
self.noisy_sample_idx = []
self.clean_sample_idx = []
self.need_idx = need_idx
self.base_transform=transform
# add label noise
if self.noise_type == 'symmetric':
self.uniform(noise_rate, 10)
elif self.noise_type == 'asymmetric':
self.flip(noise_rate, 10)
def uniform(self, noise_rate: float, num_classes: int):
"""Add symmetric noise"""
# noise transition matrix
ntm = noise_rate * np.full((num_classes, num_classes), 1 / (num_classes - 1))
np.fill_diagonal(ntm, 1 - noise_rate)
sample_indices = np.arange(len(self.base_dataset))
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]])
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.noisy_sample_idx = np.array(self.noisy_sample_idx)
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Symmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def flip(self, noise_rate: float, num_classes: int):
"""Add asymmetric noise"""
# noise transition matrix
ntm = np.eye(num_classes) * (1 - noise_rate)
d = {9: 1, 2: 0, 3: 5, 5: 3, 4: 7} # truck->automobile, bird->airplane, cat->dog, dog->cat, deer->horse
for raw_class, new_class in d.items():
ntm[raw_class][new_class] = noise_rate
for i in [0, 1, 6, 7, 8]:
ntm[i][i] = 1
sample_indices = np.arange(len(self.base_dataset))
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]])
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.noisy_sample_idx = np.array(self.noisy_sample_idx)
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Asymmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def __len__(self):
return self.base_dataset.__len__()
def __getitem__(self, index):
if self.need_idx:
return self.base_dataset.__getitem__(index), index
else:
return self.base_dataset.__getitem__(index)
class CIFAR100Noisy(Dataset):
def __init__(self, root, train, transform, download, noise_type='symmetric', noise_rate=0.2):
self.base_dataset = datasets.CIFAR100(root, train, transform, download=download)
self.noise_type = noise_type
self.noise_rate = noise_rate
self.noisy_sample_idx = []
self.clean_sample_idx = []
self.transform = transform
# add label noise
if self.noise_type == 'symmetric':
self.uniform(noise_rate, 100)
elif self.noise_type == 'asymmetric':
self.flip(noise_rate, 100)
def uniform(self, noise_rate: float, num_classes: int):
"""Add symmetric noise"""
# noise transition matrix
ntm = noise_rate * np.full((num_classes, num_classes), 1 / (num_classes - 1))
np.fill_diagonal(ntm, 1 - noise_rate)
sample_indices = np.arange(len(self.base_dataset))
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]])
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Symmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def flip(self, noise_rate: float, num_classes: int):
"""Add asymmetric noise"""
ntm = np.eye(num_classes) * (1 - noise_rate)
# row_indices = np.arange(num_classes)
# for i in range(num_classes):
# ntm[i][np.random.choice(row_indices[row_indices != i])] = noise_rate
for i in range(num_classes):
ntm[i][i + 1 if i + 1 < num_classes else 0] = noise_rate
sample_indices = np.arange(len(self.base_dataset))
# generate noisy label by noise transition matrix
for i in sample_indices:
label = np.random.choice(num_classes, p=ntm[self.base_dataset.targets[i]])
if label != self.base_dataset.targets[i]:
self.noisy_sample_idx.append(i)
self.base_dataset.targets[i] = label
self.clean_sample_idx = np.setdiff1d(sample_indices, self.noisy_sample_idx)
print('Noise type: Asymmetric')
print('Noise rate:', noise_rate)
print('Noise transition matrix:\n', ntm)
print('Clean samples:', len(self.clean_sample_idx), 'Noisy samples:', len(self.noisy_sample_idx))
def __len__(self):
return self.base_dataset.__len__()
def __getitem__(self, index):
return self.base_dataset.__getitem__(index)
if __name__ == '__main__':
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.1307,), std=(0.3081,))
])
mnist_noisy = MNISTNoisy(transform=transform, root='../dataset', train=True, download=True, noise_rate=0.5)
noisy_loader = DataLoader(mnist_noisy, batch_size=64, shuffle=False)
# save noisy figure
inputs, labels = next(iter(noisy_loader))
fig = plt.figure()
inputs = inputs.detach().cpu() # convert to cpu
grid = utils.make_grid(inputs)
print('Noisy labels:', labels)
plt.imshow(grid.numpy().transpose((1, 2, 0)))
plt.savefig('./mnist_noisy.png')
plt.close(fig)
| 38.042802
| 112
| 0.638539
| 1,303
| 9,777
| 4.550269
| 0.108979
| 0.071344
| 0.086018
| 0.075898
| 0.842975
| 0.838421
| 0.832855
| 0.825603
| 0.812447
| 0.805026
| 0
| 0.016081
| 0.243122
| 9,777
| 256
| 113
| 38.191406
| 0.785135
| 0.096246
| 0
| 0.757576
| 0
| 0
| 0.074188
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.024242
| 0.030303
| 0.175758
| 0.151515
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e77c2fddef3f63d9023fc2d703cbc3f890882be0
| 178
|
py
|
Python
|
SBaaS_quantification/stage01_quantification_analysis_execute.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_analysis_execute.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_analysis_execute.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
from .stage01_quantification_analysis_io import stage01_quantification_analysis_io
class stage01_quantification_analysis_execute(stage01_quantification_analysis_io):
pass
| 35.6
| 82
| 0.898876
| 20
| 178
| 7.4
| 0.45
| 0.567568
| 0.783784
| 0.628378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.078652
| 178
| 5
| 83
| 35.6
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
e7909d523e89c6b1132fbb3a5bc30f72377fd69b
| 5,023
|
py
|
Python
|
google/ads/googleads/interceptors/response_wrappers.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | 285
|
2018-10-05T16:47:58.000Z
|
2022-03-31T00:58:39.000Z
|
google/ads/googleads/interceptors/response_wrappers.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | 425
|
2018-09-10T13:32:41.000Z
|
2022-03-31T14:50:05.000Z
|
google/ads/googleads/interceptors/response_wrappers.py
|
JakobSteixner/google-ads-python
|
df2b802cc7e78295a4ece21cc7ef3787cd35dab0
|
[
"Apache-2.0"
] | 369
|
2018-11-28T07:01:00.000Z
|
2022-03-28T09:53:22.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper classes used to modify the behavior of response objects."""
import grpc
from google.ads.googleads import util
class _UnaryStreamWrapper(grpc.Call, grpc.Future):
def __init__(self, underlay_call, failure_handler, use_proto_plus=False):
super().__init__()
self._underlay_call = underlay_call
self._failure_handler = failure_handler
self._exception = None
self._use_proto_plus = use_proto_plus
def initial_metadata(self):
return self._underlay_call.initial_metadata()
def trailing_metadata(self):
return self._underlay_call.initial_metadata()
def code(self):
return self._underlay_call.code()
def details(self):
return self._underlay_call.details()
def debug_error_string(self):
return self._underlay_call.debug_error_string()
def cancelled(self):
return self._underlay_call.cancelled()
def running(self):
return self._underlay_call.running()
def done(self):
return self._underlay_call.done()
def result(self, timeout=None):
return self._underlay_call.result(timeout=timeout)
def exception(self, timeout=None):
if self._exception:
return self._exception
else:
return self._underlay_call.exception(timeout=timeout)
def traceback(self, timeout=None):
return self._underlay_call.traceback(timeout=timeout)
def add_done_callback(self, fn):
return self._underlay_call.add_done_callback(fn)
def add_callback(self, callback):
return self._underlay_call.add_callback(callback)
def is_active(self):
return self._underlay_call.is_active()
def time_remaining(self):
return self._underlay_call.time_remaining()
def cancel(self):
return self._underlay_call.cancel()
def __iter__(self):
return self
def __next__(self):
try:
message = next(self._underlay_call)
if self._use_proto_plus == True:
# By default this message is wrapped by proto-plus
return message
else:
return util.convert_proto_plus_to_protobuf(message)
except StopIteration:
raise
except Exception:
try:
self._failure_handler(self._underlay_call)
except Exception as e:
self._exception = e
raise e
class _UnaryUnaryWrapper(grpc.Call, grpc.Future):
def __init__(self, underlay_call, use_proto_plus=False):
super().__init__()
self._underlay_call = underlay_call
self._use_proto_plus = use_proto_plus
def initial_metadata(self):
return self._underlay_call.initial_metadata()
def trailing_metadata(self):
return self._underlay_call.initial_metadata()
def code(self):
return self._underlay_call.code()
def details(self):
return self._underlay_call.details()
def debug_error_string(self):
return self._underlay_call.debug_error_string()
def cancelled(self):
return self._underlay_call.cancelled()
def running(self):
return self._underlay_call.running()
def done(self):
return self._underlay_call.done()
def result(self, timeout=None):
message = self._underlay_call.result()
if self._use_proto_plus == True:
return message
else:
return util.convert_proto_plus_to_protobuf(message)
def exception(self, timeout=None):
if self._exception:
return self._exception
else:
return self._underlay_call.exception(timeout=timeout)
def traceback(self, timeout=None):
return self._underlay_call.traceback(timeout=timeout)
def add_done_callback(self, fn):
return self._underlay_call.add_done_callback(fn)
def add_callback(self, callback):
return self._underlay_call.add_callback(callback)
def is_active(self):
return self._underlay_call.is_active()
def time_remaining(self):
return self._underlay_call.time_remaining()
def cancel(self):
return self._underlay_call.cancel()
def __iter__(self):
if self._use_proto_plus == True:
return self
else:
return util.convert_proto_plus_to_protobuf(self)
def __next__(self):
return next(self._underlay_call)
| 29.547059
| 77
| 0.674298
| 618
| 5,023
| 5.165049
| 0.208738
| 0.154135
| 0.195489
| 0.213659
| 0.70614
| 0.70614
| 0.699248
| 0.676692
| 0.66416
| 0.638471
| 0
| 0.002111
| 0.245471
| 5,023
| 169
| 78
| 29.721893
| 0.840106
| 0.131993
| 0
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.339286
| false
| 0
| 0.017857
| 0.276786
| 0.741071
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
99c94ed02ca71ab62d32eec6450636834d4fdb7b
| 46,230
|
py
|
Python
|
argocd_python_client/api/project_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | 1
|
2021-11-20T13:37:43.000Z
|
2021-11-20T13:37:43.000Z
|
argocd_python_client/api/project_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
argocd_python_client/api/project_service_api.py
|
RyanSiu1995/argocd-python-client
|
2e8f097fe09f247a46ac70692241a93d1acd076a
|
[
"MIT"
] | null | null | null |
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argocd_python_client.api_client import ApiClient, Endpoint as _Endpoint
from argocd_python_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from argocd_python_client.model.project_global_projects_response import ProjectGlobalProjectsResponse
from argocd_python_client.model.project_project_create_request import ProjectProjectCreateRequest
from argocd_python_client.model.project_project_token_create_request import ProjectProjectTokenCreateRequest
from argocd_python_client.model.project_project_token_response import ProjectProjectTokenResponse
from argocd_python_client.model.project_project_update_request import ProjectProjectUpdateRequest
from argocd_python_client.model.project_sync_windows_response import ProjectSyncWindowsResponse
from argocd_python_client.model.runtime_error import RuntimeError
from argocd_python_client.model.v1_event_list import V1EventList
from argocd_python_client.model.v1alpha1_app_project import V1alpha1AppProject
from argocd_python_client.model.v1alpha1_app_project_list import V1alpha1AppProjectList
class ProjectServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __project_service_create(
self,
body,
**kwargs
):
"""Create a new project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_create(body, async_req=True)
>>> result = thread.get()
Args:
body (ProjectProjectCreateRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1AppProject
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.project_service_create = _Endpoint(
settings={
'response_type': (V1alpha1AppProject,),
'auth': [],
'endpoint_path': '/api/v1/projects',
'operation_id': 'project_service_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(ProjectProjectCreateRequest,),
},
'attribute_map': {
},
'location_map': {
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__project_service_create
)
def __project_service_create_token(
self,
project,
role,
body,
**kwargs
):
"""Create a new project token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_create_token(project, role, body, async_req=True)
>>> result = thread.get()
Args:
project (str):
role (str):
body (ProjectProjectTokenCreateRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ProjectProjectTokenResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project'] = \
project
kwargs['role'] = \
role
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.project_service_create_token = _Endpoint(
settings={
'response_type': (ProjectProjectTokenResponse,),
'auth': [],
'endpoint_path': '/api/v1/projects/{project}/roles/{role}/token',
'operation_id': 'project_service_create_token',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project',
'role',
'body',
],
'required': [
'project',
'role',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project':
(str,),
'role':
(str,),
'body':
(ProjectProjectTokenCreateRequest,),
},
'attribute_map': {
'project': 'project',
'role': 'role',
},
'location_map': {
'project': 'path',
'role': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__project_service_create_token
)
def __project_service_delete(
self,
name,
**kwargs
):
"""Delete deletes a project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_delete(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.project_service_delete = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/projects/{name}',
'operation_id': 'project_service_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_delete
)
def __project_service_delete_token(
self,
project,
role,
iat,
**kwargs
):
"""Delete a new project token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_delete_token(project, role, iat, async_req=True)
>>> result = thread.get()
Args:
project (str):
role (str):
iat (str):
Keyword Args:
id (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project'] = \
project
kwargs['role'] = \
role
kwargs['iat'] = \
iat
return self.call_with_http_info(**kwargs)
self.project_service_delete_token = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/projects/{project}/roles/{role}/token/{iat}',
'operation_id': 'project_service_delete_token',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project',
'role',
'iat',
'id',
],
'required': [
'project',
'role',
'iat',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project':
(str,),
'role':
(str,),
'iat':
(str,),
'id':
(str,),
},
'attribute_map': {
'project': 'project',
'role': 'role',
'iat': 'iat',
'id': 'id',
},
'location_map': {
'project': 'path',
'role': 'path',
'iat': 'path',
'id': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_delete_token
)
def __project_service_get(
self,
name,
**kwargs
):
"""Get returns a project by name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_get(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1AppProject
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.project_service_get = _Endpoint(
settings={
'response_type': (V1alpha1AppProject,),
'auth': [],
'endpoint_path': '/api/v1/projects/{name}',
'operation_id': 'project_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_get
)
def __project_service_get_global_projects(
self,
name,
**kwargs
):
"""Get returns a virtual project by name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_get_global_projects(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ProjectGlobalProjectsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.project_service_get_global_projects = _Endpoint(
settings={
'response_type': (ProjectGlobalProjectsResponse,),
'auth': [],
'endpoint_path': '/api/v1/projects/{name}/globalprojects',
'operation_id': 'project_service_get_global_projects',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_get_global_projects
)
def __project_service_get_sync_windows_state(
self,
name,
**kwargs
):
"""GetSchedulesState returns true if there are any active sync syncWindows # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_get_sync_windows_state(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ProjectSyncWindowsResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.project_service_get_sync_windows_state = _Endpoint(
settings={
'response_type': (ProjectSyncWindowsResponse,),
'auth': [],
'endpoint_path': '/api/v1/projects/{name}/syncwindows',
'operation_id': 'project_service_get_sync_windows_state',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_get_sync_windows_state
)
def __project_service_list(
self,
**kwargs
):
"""List returns list of projects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_list(async_req=True)
>>> result = thread.get()
Keyword Args:
name (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1AppProjectList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.project_service_list = _Endpoint(
settings={
'response_type': (V1alpha1AppProjectList,),
'auth': [],
'endpoint_path': '/api/v1/projects',
'operation_id': 'project_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_list
)
def __project_service_list_events(
self,
name,
**kwargs
):
"""ListEvents returns a list of project events # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_list_events(name, async_req=True)
>>> result = thread.get()
Args:
name (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1EventList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['name'] = \
name
return self.call_with_http_info(**kwargs)
self.project_service_list_events = _Endpoint(
settings={
'response_type': (V1EventList,),
'auth': [],
'endpoint_path': '/api/v1/projects/{name}/events',
'operation_id': 'project_service_list_events',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'name',
],
'required': [
'name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'name':
(str,),
},
'attribute_map': {
'name': 'name',
},
'location_map': {
'name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__project_service_list_events
)
def __project_service_update(
self,
project_metadata_name,
body,
**kwargs
):
"""Update updates a project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_service_update(project_metadata_name, body, async_req=True)
>>> result = thread.get()
Args:
project_metadata_name (str): Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names +optional
body (ProjectProjectUpdateRequest):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1AppProject
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_metadata_name'] = \
project_metadata_name
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.project_service_update = _Endpoint(
settings={
'response_type': (V1alpha1AppProject,),
'auth': [],
'endpoint_path': '/api/v1/projects/{project.metadata.name}',
'operation_id': 'project_service_update',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'project_metadata_name',
'body',
],
'required': [
'project_metadata_name',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_metadata_name':
(str,),
'body':
(ProjectProjectUpdateRequest,),
},
'attribute_map': {
'project_metadata_name': 'project.metadata.name',
},
'location_map': {
'project_metadata_name': 'path',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__project_service_update
)
| 36.690476
| 405
| 0.46472
| 3,911
| 46,230
| 5.236257
| 0.062644
| 0.031642
| 0.025392
| 0.026368
| 0.871478
| 0.852727
| 0.823331
| 0.813077
| 0.796181
| 0.793935
| 0
| 0.003564
| 0.453796
| 46,230
| 1,259
| 406
| 36.719619
| 0.807453
| 0.325806
| 0
| 0.665089
| 1
| 0
| 0.204222
| 0.047794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013018
| false
| 0
| 0.016568
| 0
| 0.042604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41394beffa4a9f21d4da3fb72937c6391adc3dcd
| 124
|
py
|
Python
|
mlprogram/datasets/deepfix/__init__.py
|
HiroakiMikami/mlprogram
|
573e94c567064705fa65267dd83946bf183197de
|
[
"MIT"
] | 9
|
2020-05-24T11:25:01.000Z
|
2022-03-28T15:32:10.000Z
|
mlprogram/datasets/deepfix/__init__.py
|
HiroakiMikami/mlprogram
|
573e94c567064705fa65267dd83946bf183197de
|
[
"MIT"
] | 87
|
2020-05-09T08:56:55.000Z
|
2022-03-31T14:46:45.000Z
|
mlprogram/datasets/deepfix/__init__.py
|
HiroakiMikami/NL2Prog
|
573e94c567064705fa65267dd83946bf183197de
|
[
"MIT"
] | 3
|
2021-02-22T20:38:29.000Z
|
2021-11-11T18:48:44.000Z
|
from mlprogram.datasets.deepfix.download import download # noqa
from mlprogram.datasets.deepfix.lexer import Lexer # noqa
| 41.333333
| 64
| 0.822581
| 16
| 124
| 6.375
| 0.5
| 0.254902
| 0.411765
| 0.54902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 124
| 2
| 65
| 62
| 0.927273
| 0.072581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
419700485da2a4e92294ca1cf2eed322b7875ae2
| 6,466
|
py
|
Python
|
coramin/relaxations/tests/test_relaxations_base.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 11
|
2019-04-03T21:33:29.000Z
|
2022-02-28T06:07:03.000Z
|
coramin/relaxations/tests/test_relaxations_base.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 50
|
2019-04-01T18:48:14.000Z
|
2022-03-04T21:51:27.000Z
|
coramin/relaxations/tests/test_relaxations_base.py
|
dilr/Coramin
|
22187e5f9e1631867c29f981ff6dc035341bd23d
|
[
"BSD-3-Clause"
] | 9
|
2019-03-31T21:29:35.000Z
|
2021-09-02T02:33:40.000Z
|
import unittest
import pyomo.environ as pe
from pyomo.opt import assert_optimal_termination
import coramin
from pyomo.core.base.var import SimpleVar
class TestBaseRelaxation(unittest.TestCase):
def test_push_and_pop_oa_points(self):
m = pe.ConcreteModel()
m.x = pe.Var(bounds=(-2, 1))
m.y = pe.Var()
m.rel = coramin.relaxations.PWXSquaredRelaxation()
m.rel.build(x=m.x, aux_var=m.y)
m.obj = pe.Objective(expr=m.y)
opt = pe.SolverFactory('gurobi_persistent')
opt.set_instance(m)
m.rel.add_persistent_solver(opt)
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, -0.5)
self.assertAlmostEqual(m.y.value, -2)
m.x.value = -1
m.rel.add_cut(keep_cut=True)
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, 0)
self.assertAlmostEqual(m.y.value, -1)
m.rel.push_oa_points()
m.rel.rebuild()
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, 0)
self.assertAlmostEqual(m.y.value, -1)
m.rel.clear_oa_points()
m.rel.rebuild()
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, -0.5)
self.assertAlmostEqual(m.y.value, -2)
m.x.value = -0.5
m.rel.add_cut(keep_cut=True)
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, 0.25)
self.assertAlmostEqual(m.y.value, -0.5)
m.rel.pop_oa_points()
m.rel.rebuild()
res = opt.solve(save_results=False)
assert_optimal_termination(res)
self.assertAlmostEqual(m.x.value, 0)
self.assertAlmostEqual(m.y.value, -1)
def test_push_oa_points_with_key(self):
m = pe.ConcreteModel()
m.x = pe.Var(bounds=(-1, 1))
m.y = pe.Var()
m.c = coramin.relaxations.PWXSquaredRelaxation()
m.c.build(x=m.x, aux_var=m.y)
m.c.add_oa_point(pe.ComponentMap([(m.x, 0)]))
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)])])
m.c.push_oa_points(key='first key')
m.c.add_oa_point(pe.ComponentMap([(m.x, 0.5)]))
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)]),
pe.ComponentMap([(m.x, 0.5)])])
m.c.push_oa_points()
m.c.add_oa_point(pe.ComponentMap([(m.x, -0.5)]))
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)]),
pe.ComponentMap([(m.x, 0.5)]),
pe.ComponentMap([(m.x, -0.5)])])
m.c.push_oa_points(key='second key')
m.c.pop_oa_points(key='first key')
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)])])
m.c.pop_oa_points()
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)]),
pe.ComponentMap([(m.x, 0.5)])])
m.c.pop_oa_points(key='second key')
self.assertEqual(m.c._oa_points, [pe.ComponentMap([(m.x, -1)]),
pe.ComponentMap([(m.x, 1)]),
pe.ComponentMap([(m.x, 0)]),
pe.ComponentMap([(m.x, 0.5)]),
pe.ComponentMap([(m.x, -0.5)])])
def test_push_and_pop_partitions(self):
m = pe.ConcreteModel()
m.x = pe.Var(bounds=(-2, 1))
m.y = pe.Var()
m.rel = coramin.relaxations.PWXSquaredRelaxation()
m.rel.build(x=m.x, aux_var=m.y)
m.obj = pe.Objective(expr=m.y)
self.assertEqual(m.rel._partitions[m.x], [-2, 1])
m.rel.add_partition_point(-1)
m.rel.rebuild()
self.assertEqual(m.rel._partitions[m.x], [-2, -1, 1])
m.rel.push_partitions()
m.rel.rebuild()
self.assertEqual(m.rel._partitions[m.x], [-2, -1, 1])
m.rel.clear_partitions()
m.rel.rebuild()
self.assertEqual(m.rel._partitions[m.x], [-2, 1])
m.rel.add_partition_point(-0.5)
m.rel.rebuild()
self.assertEqual(m.rel._partitions[m.x], [-2, -0.5, 1])
m.rel.pop_partitions()
m.rel.rebuild()
self.assertEqual(m.rel._partitions[m.x], [-2, -1, 1])
def test_push_and_pop_partitions_2(self):
m = pe.ConcreteModel()
m.x = pe.Var(bounds=(-1, 1))
m.y = pe.Var()
m.c = coramin.relaxations.PWXSquaredRelaxation()
m.c.build(x=m.x, aux_var=m.y)
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])]))
m.x.setlb(0)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [0, 1])]))
m.x.setlb(-1)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])]))
m.x.value = 0.5
m.c.add_partition_point()
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0.5, 1])]))
m.x.setlb(0)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [0, 0.5, 1])]))
m.x.setlb(-1)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0.5, 1])]))
m.x.setub(0)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 0])]))
m.x.setub(1)
m.c.rebuild()
self.assertEqual(m.c._partitions, pe.ComponentMap([(m.x, [-1, 1])]))
| 40.666667
| 81
| 0.520414
| 870
| 6,466
| 3.741379
| 0.087356
| 0.039324
| 0.16129
| 0.172043
| 0.894931
| 0.867281
| 0.832258
| 0.832258
| 0.832258
| 0.832258
| 0
| 0.025676
| 0.313331
| 6,466
| 158
| 82
| 40.924051
| 0.707432
| 0
| 0
| 0.702128
| 0
| 0
| 0.008506
| 0
| 0
| 0
| 0
| 0
| 0.276596
| 1
| 0.028369
| false
| 0
| 0.035461
| 0
| 0.070922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b5e096de3629b68d62849f091f5397efc3504d2
| 14,870
|
py
|
Python
|
psono/administration/tests/yubikey.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 48
|
2018-04-19T15:50:58.000Z
|
2022-01-23T15:58:11.000Z
|
psono/administration/tests/yubikey.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 9
|
2018-09-13T14:56:18.000Z
|
2020-01-17T16:44:33.000Z
|
psono/administration/tests/yubikey.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 11
|
2019-09-20T11:53:47.000Z
|
2021-07-18T22:41:31.000Z
|
from django.urls import reverse
from django.conf import settings
from django.contrib.auth.hashers import make_password
from rest_framework import status
import random
import string
import binascii
import os
from restapi import models
from restapi.tests.base import APITestCaseExtended
class ReadYubikeyTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.yubikey_otp = models.Yubikey_OTP.objects.create(
user=self.test_user_obj,
title= 'My TItle',
yubikey_id = '1234'
)
def test_read_yubikey(self):
"""
Tests GET method on yubikey
"""
url = reverse('admin_yubikey_otp')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class UpdateYubikeyTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.yubikey_otp = models.Yubikey_OTP.objects.create(
user=self.test_user_obj,
title= 'My TItle',
yubikey_id = '1234'
)
def test_update_yubikey(self):
"""
Tests PUT method on yubikey
"""
url = reverse('admin_yubikey_otp')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class CreateYubikeyTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.yubikey_otp = models.Yubikey_OTP.objects.create(
user=self.test_user_obj,
title= 'My TItle',
yubikey_id = '1234'
)
def test_create_yubikey(self):
"""
Tests POST method on yubikey
"""
url = reverse('admin_yubikey_otp')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class DeleteYubikeyOTPTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.yubikey_otp = models.Yubikey_OTP.objects.create(
user=self.test_user_obj,
title= 'My TItle',
yubikey_id = '1234'
)
def test_delete_yubikey_otp_success(self):
"""
Tests DELETE method on yubikey_otp
"""
url = reverse('admin_yubikey_otp')
data = {
'yubikey_otp_id': self.yubikey_otp.id
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(models.Yubikey_OTP.objects.all().count(), 0)
def test_delete_yubikey_otp_failure_no_admin(self):
"""
Tests DELETE method on yubikey_otp without being an admin
"""
url = reverse('admin_yubikey_otp')
data = {
'yubikey_otp_id': self.yubikey_otp.id
}
self.client.force_authenticate(user=self.test_user_obj)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_yubikey_otp_failure_no_yubikey_otp_id(self):
"""
Tests DELETE method on yubikey_otp without a yubikey_otp id
"""
url = reverse('admin_yubikey_otp')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_delete_yubikey_otp_failure_yubikey_otp_id_not_exist(self):
"""
Tests DELETE method on yubikey_otp with a yubikey_otp id that does not exist
"""
url = reverse('admin_yubikey_otp')
data = {
'yubikey_otp_id': '499d3c84-e8ae-4a6b-a4c2-43c79beb069a'
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 43.607038
| 114
| 0.682582
| 1,841
| 14,870
| 5.175448
| 0.070071
| 0.125105
| 0.057095
| 0.080605
| 0.936608
| 0.933774
| 0.930626
| 0.916982
| 0.908585
| 0.904282
| 0
| 0.025577
| 0.221722
| 14,870
| 340
| 115
| 43.735294
| 0.797719
| 0.021116
| 0
| 0.814672
| 0
| 0
| 0.052595
| 0.020288
| 0
| 0
| 0
| 0
| 0.030888
| 1
| 0.042471
| false
| 0.034749
| 0.03861
| 0
| 0.096525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b927a05c9e2afd397f9202ec4fff6d165f82583
| 121
|
py
|
Python
|
SEM6/elective.py
|
maruf212000/Python_Assignment_3
|
dfedb06ea5f73475c51467577622cb63f8f3888e
|
[
"MIT"
] | null | null | null |
SEM6/elective.py
|
maruf212000/Python_Assignment_3
|
dfedb06ea5f73475c51467577622cb63f8f3888e
|
[
"MIT"
] | null | null | null |
SEM6/elective.py
|
maruf212000/Python_Assignment_3
|
dfedb06ea5f73475c51467577622cb63f8f3888e
|
[
"MIT"
] | null | null | null |
def ror():
print("I am Ruby on Rails")
def joomla():
print("I am Joomla")
def drupal():
print("I am Drupal")
| 17.285714
| 31
| 0.586777
| 20
| 121
| 3.55
| 0.5
| 0.253521
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.239669
| 121
| 6
| 32
| 20.166667
| 0.771739
| 0
| 0
| 0
| 0
| 0
| 0.330579
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6be5aa35ca493b5ded0e7f442001a63dd7d97f40
| 923
|
py
|
Python
|
descriptors/descriptors/number.py
|
flaviomilan/python-advanced-topics
|
e692dc2b0e4001df24216f1dd2e926b8c54e2ffa
|
[
"MIT"
] | null | null | null |
descriptors/descriptors/number.py
|
flaviomilan/python-advanced-topics
|
e692dc2b0e4001df24216f1dd2e926b8c54e2ffa
|
[
"MIT"
] | null | null | null |
descriptors/descriptors/number.py
|
flaviomilan/python-advanced-topics
|
e692dc2b0e4001df24216f1dd2e926b8c54e2ffa
|
[
"MIT"
] | null | null | null |
from weakref import WeakKeyDictionary
class Positive:
def __init__(self) -> None:
self._instance_data = WeakKeyDictionary()
def __get__(self, instance, owner):
return self._instance_data[instance]
def __set__(self, instance, value):
if value < 0:
raise ValueError(f"Value {value} is not positive")
self._instance_data[instance] = value
def __delete__(self, instance):
self._instance_data.pop(instance)
class Negative:
def __init__(self) -> None:
self._instance_data = WeakKeyDictionary()
def __get__(self, instance, owner):
return self._instance_data[instance]
def __set__(self, instance, value):
if value >= 0:
raise ValueError(f"Value {value} is not negative")
self._instance_data[instance] = value
def __delete__(self, instance):
self._instance_data.pop(instance)
| 25.638889
| 62
| 0.654388
| 104
| 923
| 5.346154
| 0.259615
| 0.302158
| 0.230216
| 0.172662
| 0.863309
| 0.863309
| 0.863309
| 0.863309
| 0.863309
| 0.863309
| 0
| 0.002894
| 0.251354
| 923
| 35
| 63
| 26.371429
| 0.801737
| 0
| 0
| 0.695652
| 0
| 0
| 0.062839
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.347826
| false
| 0
| 0.043478
| 0.086957
| 0.565217
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
d462010b33847ebbf3dda32d67fb204f33f431a0
| 66
|
py
|
Python
|
__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
import readabilityinscience
import readabilityinscience.functions
| 22
| 37
| 0.924242
| 5
| 66
| 12.2
| 0.6
| 0.852459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 66
| 2
| 38
| 33
| 0.983871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d4812f0e37e0394b142f94d66fc9e32f4185454e
| 48,390
|
py
|
Python
|
optimizer.py
|
ManUtdMoon/Safe_Reachability_RL
|
e44a5666b8b2ec45e09a70686becb72933c33ef5
|
[
"MIT"
] | null | null | null |
optimizer.py
|
ManUtdMoon/Safe_Reachability_RL
|
e44a5666b8b2ec45e09a70686becb72933c33ef5
|
[
"MIT"
] | null | null | null |
optimizer.py
|
ManUtdMoon/Safe_Reachability_RL
|
e44a5666b8b2ec45e09a70686becb72933c33ef5
|
[
"MIT"
] | 1
|
2022-03-06T09:46:40.000Z
|
2022-03-06T09:46:40.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# =====================================
# @Time : 2020/9/1
# @Author : Yang Guan (Tsinghua Univ.)
# @FileName: optimizer.py
# =====================================
import logging
import os
import queue
import random
import threading
import ray
import tensorflow as tf
import numpy as np
from utils.misc import judge_is_nan, TimerStat
from utils.misc import random_choice_with_index
from utils.task_pool import TaskPool
from queue import Empty
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
WORKER_DEPTH = 2
BUFFER_DEPTH = 4
LEARNER_QUEUE_MAX_SIZE = 192
class UpdateThread(threading.Thread):
"""Background thread that updates the local model from gradient list.
"""
def __init__(self, workers, evaluator, args, optimizer_stats):
threading.Thread.__init__(self)
self.args = args
self.workers = workers
self.local_worker = workers['local_worker']
self.evaluator = evaluator
self.optimizer_stats = optimizer_stats
self.inqueue = queue.Queue(maxsize=self.args.grads_queue_size)
self.stopped = False
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.iteration = 0
self.update_timer = TimerStat()
self.grad_queue_get_timer = TimerStat()
self.grad_apply_timer = TimerStat()
self.grad_reuse = 0
self.grad = None
self.learner_stats = None
self.writer = tf.summary.create_file_writer(self.log_dir + '/optimizer')
self.ascent = False
def run(self):
while not self.stopped:
with self.update_timer:
self.step()
self.update_timer.push_units_processed(1)
def step(self):
self.optimizer_stats.update(dict(update_queue_size=self.inqueue.qsize(),
update_time=self.update_timer.mean,
update_throughput=self.update_timer.mean_throughput,
grad_queue_get_time=self.grad_queue_get_timer.mean,
grad_apply_timer=self.grad_apply_timer.mean,
grad_reuse=self.grad_reuse
))
# fetch grad
with self.grad_queue_get_timer:
try:
block = True if self.grad is None else False
self.grad, self.learner_stats = self.inqueue.get(block=block)
self.grad_reuse = 0
except Empty:
if self.grad_reuse < self.args.grads_max_reuse:
self.grad_reuse += 1
else:
self.grad, self.learner_stats = self.inqueue.get(timeout=30)
self.grad_reuse = 0
# apply grad
with self.grad_apply_timer:
# try:
# judge_is_nan(self.grad)
# except ValueError:
# self.grad = [tf.zeros_like(grad) for grad in self.grad]
# logger.info('Grad is nan!, zero it')
self.local_worker.apply_gradients(self.iteration, self.grad)
if self.iteration > self.args.penalty_start:
self.ascent = True
# log
if self.iteration % self.args.log_interval == 0:
logger.info('updating {} in total'.format(self.iteration))
logger.info('sampling {} in total'.format(self.optimizer_stats['num_sampled_steps']))
with self.writer.as_default():
for key, val in self.learner_stats.items():
if not isinstance(val, list):
if not isinstance(val, np.ndarray):
tf.summary.scalar('optimizer/learner_stats/scalar/{}'.format(key), val, step=self.iteration)
else:
tf.summary.histogram('optimizer/learner_stats/distribution/{}'.format(key), val, step=self.iteration)
else:
assert isinstance(val, list)
for i, v in enumerate(val):
if not isinstance(val, np.ndarray):
tf.summary.scalar('optimizer/learner_stats/list/{}/{}'.format(key, i), v, step=self.iteration)
else:
tf.summary.histogram('optimizer/learner_stats/list/{}/{}'.format(key, i), v, step=self.iteration)
for key, val in self.optimizer_stats.items():
tf.summary.scalar('optimizer/{}'.format(key), val, step=self.iteration)
self.writer.flush()
# evaluate
if self.iteration % self.args.eval_interval == 0:
self.evaluator.set_weights.remote(self.local_worker.get_weights())
if self.args.obs_ptype == 'normalize' or self.args.rew_ptype == 'normalize':
self.evaluator.set_ppc_params.remote(self.local_worker.get_ppc_params())
self.evaluator.run_evaluation.remote(self.iteration)
# save
if self.iteration % self.args.save_interval == 0:
self.local_worker.save_weights(self.model_dir, self.iteration)
self.workers['remote_workers'][0].save_ppc_params.remote(self.model_dir)
self.iteration += 1
class UpdateThread4RewShaping(threading.Thread):
"""Background thread that updates the local model from gradient list.
"""
def __init__(self, workers, evaluator, args, optimizer_stats):
threading.Thread.__init__(self)
self.args = args
self.workers = workers
self.local_worker = workers['local_worker']
self.evaluator = evaluator
self.optimizer_stats = optimizer_stats
self.inqueue = queue.Queue(maxsize=self.args.grads_queue_size)
self.stopped = False
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.iteration = 0
self.update_timer = TimerStat()
self.grad_queue_get_timer = TimerStat()
self.grad_apply_timer = TimerStat()
self.grad_reuse = 0
self.grad = None
self.learner_stats = None
self.writer = tf.summary.create_file_writer(self.log_dir + '/optimizer')
def run(self):
while not self.stopped:
with self.update_timer:
self.step()
self.update_timer.push_units_processed(1)
def step(self):
self.optimizer_stats.update(dict(update_queue_size=self.inqueue.qsize(),
update_time=self.update_timer.mean,
update_throughput=self.update_timer.mean_throughput,
grad_queue_get_time=self.grad_queue_get_timer.mean,
grad_apply_timer=self.grad_apply_timer.mean,
grad_reuse=self.grad_reuse
))
# fetch grad
with self.grad_queue_get_timer:
try:
block = True if self.grad is None else False
self.grad, self.learner_stats = self.inqueue.get(block=block)
self.grad_reuse = 0
except Empty:
if self.grad_reuse < self.args.grads_max_reuse:
self.grad_reuse += 1
else:
self.grad, self.learner_stats = self.inqueue.get(timeout=30)
self.grad_reuse = 0
# apply grad
with self.grad_apply_timer:
# try:
# judge_is_nan(self.grad)
# except ValueError:
# self.grad = [tf.zeros_like(grad) for grad in self.grad]
# logger.info('Grad is nan!, zero it')
self.local_worker.apply_gradients(self.iteration, self.grad)
# log
if self.iteration % self.args.log_interval == 0:
logger.info('updating {} in total'.format(self.iteration))
logger.info('sampling {} in total'.format(self.optimizer_stats['num_sampled_steps']))
with self.writer.as_default():
for key, val in self.learner_stats.items():
if not isinstance(val, list):
if not isinstance(val, np.ndarray):
tf.summary.scalar('optimizer/learner_stats/scalar/{}'.format(key), val, step=self.iteration)
else:
tf.summary.histogram('optimizer/learner_stats/distribution/{}'.format(key), val, step=self.iteration)
else:
assert isinstance(val, list)
for i, v in enumerate(val):
if not isinstance(val, np.ndarray):
tf.summary.scalar('optimizer/learner_stats/list/{}/{}'.format(key, i), v, step=self.iteration)
else:
tf.summary.histogram('optimizer/learner_stats/list/{}/{}'.format(key, i), v, step=self.iteration)
for key, val in self.optimizer_stats.items():
tf.summary.scalar('optimizer/{}'.format(key), val, step=self.iteration)
self.writer.flush()
# evaluate
if self.iteration % self.args.eval_interval == 0:
self.evaluator.set_weights.remote(self.local_worker.get_weights())
if self.args.obs_ptype == 'normalize' or self.args.rew_ptype == 'normalize':
self.evaluator.set_ppc_params.remote(self.local_worker.get_ppc_params())
self.evaluator.run_evaluation.remote(self.iteration)
# save
if self.iteration % self.args.save_interval == 0:
self.local_worker.save_weights(self.model_dir, self.iteration)
self.workers['remote_workers'][0].save_ppc_params.remote(self.model_dir)
self.iteration += 1
class OffPolicyAsyncOptimizer(object):
def __init__(self, workers, learners, replay_buffers, evaluator, args):
"""Initialize an off-policy async optimizers.
Arguments:
workers (dict): {local worker, remote workers (list)>=0}
learners (list): list of remote learners, len >= 1
replay_buffers (list): list of replay buffers, len >= 1
"""
self.args = args
self.workers = workers
self.local_worker = self.workers['local_worker']
self.learners = learners
self.learner_queue = queue.Queue(LEARNER_QUEUE_MAX_SIZE)
self.replay_buffers = replay_buffers
self.evaluator = evaluator
self.num_sampled_steps = 0
self.num_sampled_costs = 0
self.iteration = 0
self.num_samples_dropped = 0
self.num_grads_dropped = 0
self.optimizer_steps = 0
self.timers = {k: TimerStat() for k in ["sampling_timer", "replay_timer",
"learning_timer"]}
self.stats = {}
self.update_thread = UpdateThread(self.workers, self.evaluator, self.args,
self.stats)
self.update_thread.start()
self.max_weight_sync_delay = self.args.max_weight_sync_delay
self.steps_since_update = {}
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.sample_tasks = TaskPool()
self._set_workers()
# fill buffer to replay starts
logger.info('start filling the replay')
while not all([l >= self.args.replay_starts for l in
ray.get([rb.__len__.remote() for rb in self.replay_buffers])]):
for worker, objID in list(self.sample_tasks.completed()):
sample_batch, count = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.sample_tasks.add(worker, worker.sample_with_count.remote())
logger.info('end filling the replay')
self.replay_tasks = TaskPool()
self._set_buffers()
self.learn_tasks = TaskPool()
self._set_learners()
logger.info('Optimizer initialized')
def get_stats(self):
cost_rate = self.num_sampled_costs/self.num_sampled_steps
self.stats.update(dict(num_sampled_steps=self.num_sampled_steps,
num_sampled_costs=self.num_sampled_costs,
cost_rate=cost_rate,
iteration=self.iteration,
optimizer_steps=self.optimizer_steps,
num_samples_dropped=self.num_samples_dropped,
num_grads_dropped=self.num_grads_dropped,
learner_queue_size=self.learner_queue.qsize(),
sampling_time=self.timers['sampling_timer'].mean,
replay_time=self.timers["replay_timer"].mean,
learning_time=self.timers['learning_timer'].mean
)
)
return self.stats
def _set_workers(self):
weights = self.local_worker.get_weights()
for worker in self.workers['remote_workers']:
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
for _ in range(WORKER_DEPTH):
self.sample_tasks.add(worker, worker.sample_with_count.remote())
def _set_buffers(self):
for rb in self.replay_buffers:
for _ in range(BUFFER_DEPTH):
self.replay_tasks.add(rb, rb.replay.remote())
def _set_learners(self):
weights = self.local_worker.get_weights()
ppc_params = self.workers['remote_workers'][0].get_ppc_params.remote()
for learner in self.learners:
learner.set_weights.remote(weights)
if self.args.obs_ptype == 'normalize' or \
self.args.rew_ptype == 'normalize':
learner.set_ppc_params.remote(ppc_params)
rb, _ = random_choice_with_index(self.replay_buffers)
samples = ray.get(rb.replay.remote())
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration))
def step(self):
assert self.update_thread.is_alive()
assert len(self.workers['remote_workers']) > 0
weights = None
ppc_params = None
# sampling
with self.timers['sampling_timer']:
for worker, objID in self.sample_tasks.completed():
sample_batch, count = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
# self.num_sampled_costs += count_costs
self.steps_since_update[worker] += count
ppc_params = worker.get_ppc_params.remote()
if self.steps_since_update[worker] >= self.max_weight_sync_delay:
# judge_is_nan(self.local_worker.policy_with_value.policy.trainable_weights)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
self.sample_tasks.add(worker, worker.sample_with_count.remote())
# replay
with self.timers["replay_timer"]:
for rb, replay in self.replay_tasks.completed():
self.replay_tasks.add(rb, rb.replay.remote())
if self.learner_queue.full():
self.num_samples_dropped += 1
else:
samples = ray.get(replay)
self.learner_queue.put((rb, samples))
# learning
with self.timers['learning_timer']:
for learner, objID in self.learn_tasks.completed():
grads = ray.get(objID)
learner_stats = ray.get(learner.get_stats.remote())
if self.args.buffer_type == 'priority':
info_for_buffer = ray.get(learner.get_info_for_buffer.remote())
info_for_buffer['rb'].update_priorities.remote(info_for_buffer['indexes'],
info_for_buffer['td_error'])
rb, samples = self.learner_queue.get(block=False)
if ppc_params and \
(self.args.obs_ptype == 'normalize' or self.args.rew_ptype == 'normalize'):
learner.set_ppc_params.remote(ppc_params)
self.local_worker.set_ppc_params(ppc_params)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
learner.set_weights.remote(weights)
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration))
if self.update_thread.inqueue.full():
self.num_grads_dropped += 1
self.update_thread.inqueue.put([grads, learner_stats])
self.iteration = self.update_thread.iteration
self.optimizer_steps += 1
self.get_stats()
def stop(self):
self.update_thread.stopped = True
class OffPolicyAsyncOptimizerWithCost(object):
def __init__(self, workers, learners, replay_buffers, evaluator, args):
"""Initialize an off-policy async optimizers.
Arguments:
workers (dict): {local worker, remote workers (list)>=0}
learners (list): list of remote learners, len >= 1
replay_buffers (list): list of replay buffers, len >= 1
"""
self.args = args
if isinstance(self.args.random_seed, int):
self.set_seed(self.args.random_seed)
self.workers = workers
self.local_worker = self.workers['local_worker']
self.learners = learners
self.learner_queue = queue.Queue(LEARNER_QUEUE_MAX_SIZE)
self.replay_buffers = replay_buffers
self.evaluator = evaluator
self.num_sampled_steps = 0
self.num_sampled_costs = 0
self.iteration = 0
self.num_samples_dropped = 0
self.num_grads_dropped = 0
self.optimizer_steps = 0
self.timers = {k: TimerStat() for k in ["sampling_timer", "replay_timer",
"learning_timer"]}
self.stats = {}
self.update_thread = UpdateThread(self.workers, self.evaluator, self.args,
self.stats)
self.update_thread.start()
self.max_weight_sync_delay = self.args.max_weight_sync_delay
self.steps_since_update = {}
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.sample_tasks = TaskPool()
self._set_workers()
# fill buffer to replay starts
logger.info('start filling the replay')
while not all([l >= self.args.replay_starts for l in
ray.get([rb.__len__.remote() for rb in self.replay_buffers])]):
for worker, objID in list(self.sample_tasks.completed()):
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
logger.info('end filling the replay')
self.replay_tasks = TaskPool()
self._set_buffers()
self.learn_tasks = TaskPool()
self._set_learners()
logger.info('Optimizer initialized')
def set_seed(self, seed):
tf.random.set_seed(seed)
random.seed(seed)
np.random.seed(seed)
def get_stats(self):
cost_rate = self.num_sampled_costs/self.num_sampled_steps
self.stats.update(dict(num_sampled_steps=self.num_sampled_steps,
num_sampled_costs=self.num_sampled_costs,
cost_rate=cost_rate,
iteration=self.iteration,
optimizer_steps=self.optimizer_steps,
num_samples_dropped=self.num_samples_dropped,
num_grads_dropped=self.num_grads_dropped,
learner_queue_size=self.learner_queue.qsize(),
sampling_time=self.timers['sampling_timer'].mean,
replay_time=self.timers["replay_timer"].mean,
learning_time=self.timers['learning_timer'].mean
)
)
return self.stats
def _set_workers(self):
weights = self.local_worker.get_weights()
for worker in self.workers['remote_workers']:
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
for _ in range(WORKER_DEPTH):
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
def _set_buffers(self):
for rb in self.replay_buffers:
for _ in range(BUFFER_DEPTH):
self.replay_tasks.add(rb, rb.replay.remote())
def _set_learners(self):
weights = self.local_worker.get_weights()
ppc_params = self.workers['remote_workers'][0].get_ppc_params.remote()
for learner in self.learners:
learner.set_weights.remote(weights)
if self.args.obs_ptype == 'normalize' or \
self.args.rew_ptype == 'normalize':
learner.set_ppc_params.remote(ppc_params)
rb, _ = random_choice_with_index(self.replay_buffers)
samples = ray.get(rb.replay.remote())
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration))
def step(self):
assert self.update_thread.is_alive()
assert len(self.workers['remote_workers']) > 0
weights = None
ppc_params = None
# sampling
with self.timers['sampling_timer']:
for worker, objID in self.sample_tasks.completed():
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.steps_since_update[worker] += count
ppc_params = worker.get_ppc_params.remote()
if self.steps_since_update[worker] >= self.max_weight_sync_delay:
# judge_is_nan(self.local_worker.policy_with_value.policy.trainable_weights)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
self.sample_tasks.add(worker, worker.sample_with_count.remote())
# replay
with self.timers["replay_timer"]:
for rb, replay in self.replay_tasks.completed():
self.replay_tasks.add(rb, rb.replay.remote())
if self.learner_queue.full():
self.num_samples_dropped += 1
else:
samples = ray.get(replay)
self.learner_queue.put((rb, samples))
# learning
with self.timers['learning_timer']:
for learner, objID in self.learn_tasks.completed():
grads = ray.get(objID)
learner_stats = ray.get(learner.get_stats.remote())
if self.args.buffer_type == 'priority':
info_for_buffer = ray.get(learner.get_info_for_buffer.remote())
info_for_buffer['rb'].update_priorities.remote(info_for_buffer['indexes'],
info_for_buffer['td_error'])
if self.args.buffer_type == 'priority_cost':
info_for_buffer = ray.get(learner.get_info_for_buffer.remote())
info_for_buffer['rb'].update_priorities.remote(info_for_buffer['indexes'],
info_for_buffer['cost_td_error'])
rb, samples = self.learner_queue.get(block=False)
if ppc_params and \
(self.args.obs_ptype == 'normalize' or self.args.rew_ptype == 'normalize'):
learner.set_ppc_params.remote(ppc_params)
self.local_worker.set_ppc_params(ppc_params)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
learner.set_weights.remote(weights)
if self.update_thread.ascent:
# logger.info('Start dual ascent')
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration,
ascent=True))
else:
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration,
ascent=False))
# todo: remove ascent compute
if self.update_thread.inqueue.full():
self.num_grads_dropped += 1
self.update_thread.inqueue.put([grads, learner_stats])
self.iteration = self.update_thread.iteration
self.optimizer_steps += 1
self.get_stats()
def stop(self):
self.update_thread.stopped = True
class OffPolicyAsyncOptimizerWithRewardShaping(object):
def __init__(self, workers, learners, replay_buffers, evaluator, args):
"""Initialize an off-policy async optimizers.
Arguments:
workers (dict): {local worker, remote workers (list)>=0}
learners (list): list of remote learners, len >= 1
replay_buffers (list): list of replay buffers, len >= 1
"""
self.args = args
if isinstance(self.args.random_seed, int):
self.set_seed(self.args.random_seed)
self.workers = workers
self.local_worker = self.workers['local_worker']
self.learners = learners
self.learner_queue = queue.Queue(LEARNER_QUEUE_MAX_SIZE)
self.replay_buffers = replay_buffers
self.evaluator = evaluator
self.num_sampled_steps = 0
self.num_sampled_costs = 0
self.iteration = 0
self.num_samples_dropped = 0
self.num_grads_dropped = 0
self.optimizer_steps = 0
self.timers = {k: TimerStat() for k in ["sampling_timer", "replay_timer",
"learning_timer"]}
self.stats = {}
self.update_thread = UpdateThread4RewShaping(self.workers, self.evaluator, self.args,
self.stats)
self.update_thread.start()
self.max_weight_sync_delay = self.args.max_weight_sync_delay
self.steps_since_update = {}
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.sample_tasks = TaskPool()
self._set_workers()
# fill buffer to replay starts
logger.info('start filling the replay')
while not all([l >= self.args.replay_starts for l in
ray.get([rb.__len__.remote() for rb in self.replay_buffers])]):
for worker, objID in list(self.sample_tasks.completed()):
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
logger.info('end filling the replay')
self.replay_tasks = TaskPool()
self._set_buffers()
self.learn_tasks = TaskPool()
self._set_learners()
logger.info('Optimizer initialized')
def set_seed(self, seed):
tf.random.set_seed(seed)
random.seed(seed)
np.random.seed(seed)
def get_stats(self):
cost_rate = self.num_sampled_costs/self.num_sampled_steps
self.stats.update(dict(num_sampled_steps=self.num_sampled_steps,
num_sampled_costs=self.num_sampled_costs,
cost_rate=cost_rate,
iteration=self.iteration,
optimizer_steps=self.optimizer_steps,
num_samples_dropped=self.num_samples_dropped,
num_grads_dropped=self.num_grads_dropped,
learner_queue_size=self.learner_queue.qsize(),
sampling_time=self.timers['sampling_timer'].mean,
replay_time=self.timers["replay_timer"].mean,
learning_time=self.timers['learning_timer'].mean
)
)
return self.stats
def _set_workers(self):
weights = self.local_worker.get_weights()
for worker in self.workers['remote_workers']:
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
for _ in range(WORKER_DEPTH):
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
def _set_buffers(self):
for rb in self.replay_buffers:
for _ in range(BUFFER_DEPTH):
self.replay_tasks.add(rb, rb.replay.remote())
def _set_learners(self):
weights = self.local_worker.get_weights()
ppc_params = self.workers['remote_workers'][0].get_ppc_params.remote()
for learner in self.learners:
learner.set_weights.remote(weights)
if self.args.obs_ptype == 'normalize' or \
self.args.rew_ptype == 'normalize':
learner.set_ppc_params.remote(ppc_params)
rb, _ = random_choice_with_index(self.replay_buffers)
samples = ray.get(rb.replay.remote())
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration))
def step(self):
assert self.update_thread.is_alive()
assert len(self.workers['remote_workers']) > 0
weights = None
ppc_params = None
# sampling
with self.timers['sampling_timer']:
for worker, objID in self.sample_tasks.completed():
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
# self.num_sampled_costs += count_costs
self.steps_since_update[worker] += count
ppc_params = worker.get_ppc_params.remote()
if self.steps_since_update[worker] >= self.max_weight_sync_delay:
# judge_is_nan(self.local_worker.policy_with_value.policy.trainable_weights)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
self.sample_tasks.add(worker, worker.sample_with_count.remote())
# replay
with self.timers["replay_timer"]:
for rb, replay in self.replay_tasks.completed():
self.replay_tasks.add(rb, rb.replay.remote())
if self.learner_queue.full():
self.num_samples_dropped += 1
else:
samples = ray.get(replay)
self.learner_queue.put((rb, samples))
# learning
with self.timers['learning_timer']:
for learner, objID in self.learn_tasks.completed():
grads = ray.get(objID)
learner_stats = ray.get(learner.get_stats.remote())
if self.args.buffer_type == 'priority':
info_for_buffer = ray.get(learner.get_info_for_buffer.remote())
info_for_buffer['rb'].update_priorities.remote(info_for_buffer['indexes'],
info_for_buffer['td_error'])
rb, samples = self.learner_queue.get(block=False)
if ppc_params and \
(self.args.obs_ptype == 'normalize' or self.args.rew_ptype == 'normalize'):
learner.set_ppc_params.remote(ppc_params)
self.local_worker.set_ppc_params(ppc_params)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
learner.set_weights.remote(weights)
self.learn_tasks.add(learner, learner.compute_gradient.remote(samples[:-1], rb, samples[-1],
self.local_worker.iteration))
if self.update_thread.inqueue.full():
self.num_grads_dropped += 1
self.update_thread.inqueue.put([grads, learner_stats])
self.iteration = self.update_thread.iteration
self.optimizer_steps += 1
self.get_stats()
def stop(self):
self.update_thread.stopped = True
class AllReduceOptimizer(object):
def __init__(self, workers, learners, replay_buffers, evaluator, args):
self.args = args
self.evaluator = evaluator
self.workers = workers
self.learners = learners
self.learner_queue = queue.Queue(LEARNER_QUEUE_MAX_SIZE)
self.timers = {k: TimerStat() for k in ["sampling_timer", "replay_timer",
"learning_timer", "grad_apply_timer"]}
self.replay_buffers = replay_buffers
self.local_worker = self.workers['local_worker']
self.max_weight_sync_delay = self.args.max_weight_sync_delay
self.num_sampled_steps = 0
self.num_sampled_costs = 0
self.steps_since_update = {}
self.num_updates = 0
self.iteration = 0
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.writer = tf.summary.create_file_writer(self.log_dir + '/optimizer')
self.stats = {}
self.step_timer = TimerStat()
logger.info('Optimizer initialized')
self.sample_tasks = TaskPool()
self._set_workers()
# fill buffer to replay starts
logger.info('start filling the replay')
while not all([l >= self.args.replay_starts for l in
ray.get([rb.__len__.remote() for rb in self.replay_buffers])]):
for worker, objID in list(self.sample_tasks.completed()):
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
logger.info('end filling the replay')
self._set_learners()
def _set_workers(self):
weights = self.local_worker.get_weights()
for worker in self.workers['remote_workers']:
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
for _ in range(WORKER_DEPTH):
self.sample_tasks.add(worker, worker.random_sample_with_count.remote())
def _set_learners(self):
weights = self.local_worker.get_weights()
for learner in self.learners:
learner.set_weights.remote(weights)
def get_stats(self):
self.stats.update(dict(iteration=self.iteration,
num_sampled_steps=self.num_sampled_steps,
num_updates=self.num_updates,
step_timer=self.step_timer.mean,
)
)
return self.stats
def step(self):
# sampling
with self.timers['sampling_timer']:
for worker, objID in self.sample_tasks.completed():
sample_batch, count, count_costs = ray.get(objID)
random.choice(self.replay_buffers).add_batch.remote(sample_batch)
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.steps_since_update[worker] += count
ppc_params = worker.get_ppc_params.remote()
if self.steps_since_update[worker] >= self.max_weight_sync_delay:
# judge_is_nan(self.local_worker.policy_with_value.policy.trainable_weights)
if weights is None:
weights = ray.put(self.local_worker.get_weights())
worker.set_weights.remote(weights)
self.steps_since_update[worker] = 0
self.sample_tasks.add(worker, worker.sample_with_count.remote())
### replay and learning
with self.timers["learning_timer"]:
batch_grads = []
for learner in self.learners:
samples = random.choice(self.replay_buffers).replay.remote()
print(type(samples))
mb_grads = learner.compute_gradient.remote(samples, self.iteration, ascent=True)
batch_grads.append(mb_grads)
batch_grads = ray.get(batch_grads)
grads = np.array(batch_grads).mean(axis=0).tolist()
try:
judge_is_nan(grads)
except ValueError:
grads = [tf.zeros_like(grad) for grad in grads]
logger.info('Grad is nan!, zero it')
### update
with self.timers['grad_apply_timer']:
qc_grad, lam_grad = self.local_worker.apply_gradients(self.iteration, grads)
if self.iteration > 50000: # todo: change to proportional definition wrt max iter
self.local_worker.apply_ascent_gradients(self.iteration, qc_grad, lam_grad)
weights = ray.put(self.local_worker.get_weights)
for learner in self.learners:
learner.set_weights(weights)
# log
learner_stats = ray.get(random.choice(self.learners).get_stats.remote()) #TODO: change to all reduce?
if self.iteration % self.args.log_interval == 0:
logger.info('updating {} in total'.format(self.iteration))
logger.info('sampling {} in total'.format(self.stats['num_sampled_steps']))
with self.writer.as_default():
for key, val in learner_stats.items():
if not isinstance(val, list):
tf.summary.scalar('optimizer/learner_stats/scalar/{}'.format(key), val,
step=self.iteration)
else:
assert isinstance(val, list)
for i, v in enumerate(val):
tf.summary.scalar('optimizer/learner_stats/list/{}/{}'.format(key, i), v,
step=self.iteration)
for key, val in self.stats.items():
tf.summary.scalar('optimizer/{}'.format(key), val, step=self.iteration)
self.writer.flush()
# evaluate # TODO: parallel evaluate?
if self.iteration % self.args.eval_interval == 0:
self.evaluator.set_weights.remote(self.local_worker.get_weights())
self.evaluator.set_ppc_params.remote(self.workers['remote_workers'][0].get_ppc_params.remote())
self.evaluator.run_evaluation.remote(self.iteration)
# save
if self.iteration % self.args.save_interval == 0:
self.workers['local_worker'].save_weights(self.model_dir, self.iteration)
self.workers['remote_workers'][0].save_ppc_params.remote(self.args.model_dir)
self.iteration += 1
self.num_sampled_steps += self.args.sample_batch_size * len(self.workers['remote_workers'])
self.num_updates += self.iteration * self.args.epoch * int(self.args.sample_batch_size / self.args.mini_batch_size)
self.get_stats()
def stop(self):
pass
class SingleProcessOffPolicyOptimizer(object):
def __init__(self, worker, learners, replay_buffer, evaluator, args):
self.args = args
self.worker = worker
self.learners = learners
self.replay_buffer = replay_buffer
self.evaluator = evaluator
self.num_sampled_steps = 0
self.num_sampled_costs = 0
self.iteration = 0
self.timers = {k: TimerStat() for k in ["sampling_timer", "replay_timer", "learning_timer", "grad_apply_timer"]}
self.stats = {}
self.log_dir = self.args.log_dir
self.model_dir = self.args.model_dir
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
self.args.log_interval = 10
self.args.eval_interval = 3000
self.args.save_interval = 3000
# fill buffer to replay starts
logger.info('start filling the replay')
while not len(self.replay_buffer) >= self.args.replay_starts:
sample_batch, count, costs_count = self.worker.sample_with_count()
self.num_sampled_steps += count
self.num_sampled_costs += costs_count
self.replay_buffer.add_batch(sample_batch)
logger.info('end filling the replay')
self.writer = tf.summary.create_file_writer(self.log_dir + '/optimizer')
logger.info('Optimizer initialized')
self.get_stats()
def get_stats(self):
cost_rate = self.num_sampled_costs/self.num_sampled_steps
self.stats.update(dict(num_sampled_steps=self.num_sampled_steps,
num_sampled_costs=self.num_sampled_costs,
cost_rate=cost_rate,
iteration=self.iteration,
sampling_time=self.timers['sampling_timer'].mean,
replay_time=self.timers["replay_timer"].mean,
learning_time=self.timers['learning_timer'].mean,
grad_apply_timer=self.timers['grad_apply_timer'].mean
)
)
return self.stats
def step(self):
# sampling
sampling_interval = 10
if self.iteration % sampling_interval == 0:
with self.timers['sampling_timer']:
sample_batch, count, count_costs = self.worker.sample_with_count()
self.num_sampled_steps += count
self.num_sampled_costs += count_costs
self.replay_buffer.add_batch(sample_batch)
# replay
with self.timers["replay_timer"]:
samples = self.replay_buffer.replay()
# learning
with self.timers['learning_timer']:
self.learner.set_weights(self.worker.get_weights())
if self.args.obs_ptype == 'normalize' or \
self.args.rew_ptype == 'normalize':
self.learner.set_ppc_params(self.worker.get_ppc_params())
grads = self.learner.compute_gradient(samples[:-1], self.replay_buffer, samples[-1], self.iteration)
learner_stats = self.learner.get_stats()
if self.args.buffer_type == 'priority':
info_for_buffer = self.learner.get_info_for_buffer()
info_for_buffer['rb'].update_priorities(info_for_buffer['indexes'], info_for_buffer['td_error'])
if self.args.buffer_type == 'priority_cost':
info_for_buffer = self.learner.get_info_for_buffer()
info_for_buffer['rb'].update_priorities(info_for_buffer['indexes'], info_for_buffer['cost_td_error'])
# apply grad
with self.timers['grad_apply_timer']:
try:
judge_is_nan(grads)
except ValueError:
grads = [tf.zeros_like(grad) for grad in grads]
logger.info('Grad is nan!, zero it')
self.worker.apply_gradients(self.iteration, grads)
# log
if self.iteration % self.args.log_interval == 0:
logger.info('updating {} in total'.format(self.iteration))
logger.info('sampling {} in total'.format(self.stats['num_sampled_steps']))
with self.writer.as_default():
for key, val in learner_stats.items():
if not isinstance(val, list):
tf.summary.scalar('optimizer/learner_stats/scalar/{}'.format(key), val,
step=self.iteration)
else:
assert isinstance(val, list)
for i, v in enumerate(val):
tf.summary.scalar('optimizer/learner_stats/list/{}/{}'.format(key, i), v,
step=self.iteration)
for key, val in self.stats.items():
tf.summary.scalar('optimizer/{}'.format(key), val, step=self.iteration)
self.writer.flush()
# evaluate
if self.iteration % self.args.eval_interval == 0 and self.evaluator is not None:
self.evaluator.set_weights(self.worker.get_weights())
self.evaluator.set_ppc_params(self.worker.get_ppc_params())
self.evaluator.run_evaluation(self.iteration)
# save
if self.iteration % self.args.save_interval == 0:
self.worker.save_weights(self.model_dir, self.iteration)
self.worker.save_ppc_params(self.model_dir)
self.get_stats()
self.iteration += 1
def stop(self):
pass
| 46.980583
| 129
| 0.57547
| 5,467
| 48,390
| 4.849277
| 0.047009
| 0.025951
| 0.026593
| 0.017917
| 0.926597
| 0.910528
| 0.898231
| 0.892083
| 0.883067
| 0.875712
| 0
| 0.004398
| 0.328002
| 48,390
| 1,029
| 130
| 47.026239
| 0.810874
| 0.047427
| 0
| 0.88411
| 0
| 0
| 0.051464
| 0.009024
| 0
| 0
| 0
| 0.001944
| 0.011947
| 1
| 0.046595
| false
| 0.002389
| 0.014337
| 0
| 0.075269
| 0.001195
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e52810e3afe9bb6877317e2886f79bf83daf885
| 438
|
py
|
Python
|
ArticutAPI/__init__.py
|
Joshua0128/ArticutAPI
|
5521e9228263f8e0e918fd7e1db8367d81161ca8
|
[
"MIT"
] | 375
|
2019-05-01T15:15:35.000Z
|
2022-03-18T00:54:33.000Z
|
ArticutAPI/__init__.py
|
lpluo54/ArticutAPI
|
366dd9956d0ee3bd81b422844015101168b4bd34
|
[
"MIT"
] | 1
|
2019-07-05T06:50:09.000Z
|
2020-08-16T10:02:15.000Z
|
ArticutAPI/__init__.py
|
lpluo54/ArticutAPI
|
366dd9956d0ee3bd81b422844015101168b4bd34
|
[
"MIT"
] | 30
|
2019-09-01T11:17:37.000Z
|
2021-12-28T23:05:06.000Z
|
try:
from .ArticutAPI import Articut
from .Toolkit.analyse import AnalyseManager
from .Toolkit.localRE import TaiwanAddressAnalizer
from .Toolkit.toolkits import *
from .Toolkit.NER import GenericNER
except:
from ArticutAPI import Articut
from Toolkit.analyse import AnalyseManager
from Toolkit.localRE import TaiwanAddressAnalizer
from Toolkit.toolkits import *
from Toolkit.NER import GenericNER
| 33.692308
| 54
| 0.773973
| 48
| 438
| 7.0625
| 0.291667
| 0.259587
| 0.117994
| 0.159292
| 0.973451
| 0.973451
| 0.973451
| 0.973451
| 0.973451
| 0.973451
| 0
| 0
| 0.187215
| 438
| 12
| 55
| 36.5
| 0.952247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
2e6549de549c7a822293f45f129325c6fe1a40bf
| 86
|
py
|
Python
|
src/handlers/__init__.py
|
egor-muindor/yandere_bot
|
468ae765d72f4bcc8bd28b0f3f1faa9f261b7919
|
[
"MIT"
] | null | null | null |
src/handlers/__init__.py
|
egor-muindor/yandere_bot
|
468ae765d72f4bcc8bd28b0f3f1faa9f261b7919
|
[
"MIT"
] | null | null | null |
src/handlers/__init__.py
|
egor-muindor/yandere_bot
|
468ae765d72f4bcc8bd28b0f3f1faa9f261b7919
|
[
"MIT"
] | 1
|
2020-12-19T15:01:52.000Z
|
2020-12-19T15:01:52.000Z
|
from . import image_handler
from . import admin_handler
from . import default_handler
| 21.5
| 29
| 0.825581
| 12
| 86
| 5.666667
| 0.5
| 0.441176
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 86
| 3
| 30
| 28.666667
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e7496165ac9d80ed7e94e48a328532fcbf25590
| 49,105
|
py
|
Python
|
capParser/ByteCodes.py
|
MallikarjunTirlapur/JavaCard-Binary-Parser
|
abb897d6403fec94148afd63bb9bf924e37d9a9c
|
[
"MIT"
] | null | null | null |
capParser/ByteCodes.py
|
MallikarjunTirlapur/JavaCard-Binary-Parser
|
abb897d6403fec94148afd63bb9bf924e37d9a9c
|
[
"MIT"
] | null | null | null |
capParser/ByteCodes.py
|
MallikarjunTirlapur/JavaCard-Binary-Parser
|
abb897d6403fec94148afd63bb9bf924e37d9a9c
|
[
"MIT"
] | null | null | null |
# -*- c+ ding: utf-8 -*-
"""
Created on Mon Oct 14 10:38:58 2019
@author: Tirlapur
"""
from MyUtil import Util
class ByteCodes():
def __init__(self, binData):
self.ByteCodeTable = dict()
self.ByteCodeLengthTable = dict()
self.binData = binData
# self.stack[500] = 0
# self.frame[500] = 0
# self.stackTop = 0
# self.frameIndex = 0
def getByteCodeTable(self):
return self.ByteCodeTable
# def pushToStack(self, data):
# self.stackTop += 1
# self.stack[self.stackTop] = data
#
# def popFromStack(self):
# data = self.stack[self.stackTop]
# self.stackTop -= 1
# return data
#
# def writeToFrame(self, data):
# self.frame[self.frameIndex] = data
#
# def readFromFrame(self):
# return self.frame[self.frameIndex]
def nop(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aconst_null(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_m1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_4(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sconst_5(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_m1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_4(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iconst_5(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def bspush(self, index, byteCode, start, prnt):
#index into the biData - start+2 and length 2(for one byte)
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def sspush(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def bipush(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def sipush(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def iipush(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 8), 16))), prnt)
return 5
def aload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def sload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def iload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def aload_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aload_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aload_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aload_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sload_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sload_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sload_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sload_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iload_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iload_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iload_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iload_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aaload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def baload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def saload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iaload(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def astore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def sstore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def istore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def astore_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def astore_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def astore_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def astore_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sstore_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sstore_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sstore_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sstore_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def istore_0(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def istore_1(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def istore_2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def istore_3(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def aastore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def bastore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sastore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iastore(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def pop(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def pop2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def dup(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def dup2(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def dup_x(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def swap_x(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def sadd(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iadd(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ssub(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def isub(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def smul(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def imul(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sdiv(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def idiv(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def srem(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def irem(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sneg(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ineg(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sshl(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ishl(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sshr(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ishr(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sushr(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iushr(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sand(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def iand(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sor(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ior(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sxor(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ixor(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sinc(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def iinc(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def s2b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def s2i(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def i2b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def i2s(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def icmp(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ifeq(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifne(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def iflt(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifge(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifgt(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifle(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifnull(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def ifnonnull(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_acmpeq(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_acmpne(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmpeq(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmpne(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmplt(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmpge(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmpgt(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def if_scmple(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def goto(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def jsr(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ret(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def stableswitch(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
start += 2
length = 4
start += length
length = 4
low = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
start += length
length = 4
high = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
arrsize = high - low + 1;
return (7 + (arrsize*2))
def itableswitch(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
start += 2
length = 4
start += length
length = 8
low = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
start += length
length = 8
high = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
arrsize = high - low + 1;
return (11 + (arrsize*2))
def slookupswitch(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
start += 2
length = 4
start += length
length = 4
npairs = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
return (5 + (npairs*4))
def ilookupswitch(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
start += 2
length = 4
start += length
length = 4
npairs = (int)(Util.convertDataToPrint(self.binData, start, length), 16)
return (5 + (npairs*6))
def areturn(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def sreturn(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def ireturn(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def j_return(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ 'return'), prnt)
return 1
def getstatic_a(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getstatic_b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getstatic_s(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getstatic_i(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putstatic_a(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putstatic_b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putstatic_s(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putstatic_i(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_a(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_s(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_i(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_a(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_b(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_s(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_i(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def invokevirtual(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def invokespecial(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def invokestatic(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def invokeinterface(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 4), 4), 16))+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 8), 2), 16))), prnt)
return 5
def new(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def newarray(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def anewarray(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def arraylength(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def athrow(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]), prnt)
return 1
def checkcast(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 6), 16))), prnt)
return 4
def instanceof(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 6), 16))), prnt)
return 4
def sinc_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 6), 16))), prnt)
return 4
def iinc_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 6), 16))), prnt)
return 4
def ifeq_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifne_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def iflt_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifge_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifgt_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifle_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifnull_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def ifnonnull_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_acmpeq_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_acmpne_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmpeq_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmpne_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmplt_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmpge_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmpgt_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def if_scmple_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def goto_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_a_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_b_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_s_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_i_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def getfield_a_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_b_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_s_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def getfield_i_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_a_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putfield_b_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putfield_s_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putfield_i_w(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 4), 16))), prnt)
return 3
def putfield_a_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_b_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_s_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def putfield_i_this(self, index, byteCode, start, prnt):
Util.printOnConsole((' '+ self.ByteCodeTable[byteCode]+' '+str((int)(Util.convertDataToPrint(self.binData, (start + 2), 2), 16))), prnt)
return 2
def execByteCode(self, byteCode, index, start, prnt):
method = getattr(self, self.ByteCodeTable[byteCode], lambda: "Invalid method")
return method(index, byteCode, start, prnt)
def updateByteCodeTable(self):
self.ByteCodeTable.update({0x00 : 'nop'})
self.ByteCodeTable.update({0x01 : 'aconst_null'})
self.ByteCodeTable.update({0x02 : 'sconst_m1'})
self.ByteCodeTable.update({0x03 : 'sconst_0'})
self.ByteCodeTable.update({0x04 : 'sconst_1'})
self.ByteCodeTable.update({0x05 : 'sconst_2'})
self.ByteCodeTable.update({0x06 : 'sconst_3'})
self.ByteCodeTable.update({0x07 : 'sconst_4'})
self.ByteCodeTable.update({0x08 : 'sconst_5'})
self.ByteCodeTable.update({0x09 : 'iconst_m1'})
self.ByteCodeTable.update({0x0A : 'iconst_0'})
self.ByteCodeTable.update({0x0B : 'iconst_1'})
self.ByteCodeTable.update({0x0C : 'iconst_2'})
self.ByteCodeTable.update({0x0D : 'iconst_3'})
self.ByteCodeTable.update({0x0E : 'iconst_4'})
self.ByteCodeTable.update({0x0F : 'iconst_5'})
self.ByteCodeTable.update({0x10 : 'bspush'})
self.ByteCodeTable.update({0x11 : 'sspush'})
self.ByteCodeTable.update({0x12 : 'bipush'})
self.ByteCodeTable.update({0x13 : 'sipush'})
self.ByteCodeTable.update({0x14 : 'iipush'})
self.ByteCodeTable.update({0x15 : 'aload'})
self.ByteCodeTable.update({0x16 : 'sload'})
self.ByteCodeTable.update({0x17 : 'iload'})
self.ByteCodeTable.update({0x18 : 'aload_0'})
self.ByteCodeTable.update({0x19 : 'aload_1'})
self.ByteCodeTable.update({0x1A : 'aload_2'})
self.ByteCodeTable.update({0x1B : 'aload_3'})
self.ByteCodeTable.update({0x1C : 'sload_0'})
self.ByteCodeTable.update({0x1D : 'sload_1'})
self.ByteCodeTable.update({0x1E : 'sload_2'})
self.ByteCodeTable.update({0x1F : 'sload_3'})
self.ByteCodeTable.update({0x20 : 'iload_0'})
self.ByteCodeTable.update({0x21 : 'iload_1'})
self.ByteCodeTable.update({0x22 : 'iload_2'})
self.ByteCodeTable.update({0x23 : 'iload_3'})
self.ByteCodeTable.update({0x24 : 'aaload'})
self.ByteCodeTable.update({0x25 : 'baload'})
self.ByteCodeTable.update({0x26 : 'saload'})
self.ByteCodeTable.update({0x27 : 'iaload'})
self.ByteCodeTable.update({0x28 : 'astore'})
self.ByteCodeTable.update({0x29 : 'sstore'})
self.ByteCodeTable.update({0x2A : 'istore'})
self.ByteCodeTable.update({0x2B : 'astore_0'})
self.ByteCodeTable.update({0x2C : 'astore_1'})
self.ByteCodeTable.update({0x2D : 'astore_2'})
self.ByteCodeTable.update({0x2E : 'astore_3'})
self.ByteCodeTable.update({0x2F : 'sstore_0'})
self.ByteCodeTable.update({0x30 : 'sstore_1'})
self.ByteCodeTable.update({0x31 : 'sstore_2'})
self.ByteCodeTable.update({0x32 : 'sstore_3'})
self.ByteCodeTable.update({0x33 : 'istore_0'})
self.ByteCodeTable.update({0x34 : 'istore_1'})
self.ByteCodeTable.update({0x35 : 'istore_2'})
self.ByteCodeTable.update({0x36 : 'istore_3'})
self.ByteCodeTable.update({0x37 : 'aastore'})
self.ByteCodeTable.update({0x38 : 'bastore'})
self.ByteCodeTable.update({0x39 : 'sastore'})
self.ByteCodeTable.update({0x3A : 'iastore'})
self.ByteCodeTable.update({0x3B : 'pop'})
self.ByteCodeTable.update({0x3C : 'pop2'})
self.ByteCodeTable.update({0x3D : 'dup'})
self.ByteCodeTable.update({0x3E : 'dup2'})
self.ByteCodeTable.update({0x3F : 'dup_x'})
self.ByteCodeTable.update({0x40 : 'swap_x'})
self.ByteCodeTable.update({0x41 : 'sadd'})
self.ByteCodeTable.update({0x42 : 'iadd'})
self.ByteCodeTable.update({0x43 : 'ssub'})
self.ByteCodeTable.update({0x44 : 'isub'})
self.ByteCodeTable.update({0x45 : 'smul'})
self.ByteCodeTable.update({0x46 : 'imul'})
self.ByteCodeTable.update({0x47 : 'sdiv'})
self.ByteCodeTable.update({0x48 : 'idiv'})
self.ByteCodeTable.update({0x49 : 'srem'})
self.ByteCodeTable.update({0x4A : 'irem'})
self.ByteCodeTable.update({0x4B : 'sneg'})
self.ByteCodeTable.update({0x4C : 'ineg'})
self.ByteCodeTable.update({0x4D : 'sshl'})
self.ByteCodeTable.update({0x4E : 'ishl'})
self.ByteCodeTable.update({0x4F : 'sshr'})
self.ByteCodeTable.update({0x50 : 'ishr'})
self.ByteCodeTable.update({0x51 : 'sushr'})
self.ByteCodeTable.update({0x52 : 'iushr'})
self.ByteCodeTable.update({0x53 : 'sand'})
self.ByteCodeTable.update({0x54 : 'iand'})
self.ByteCodeTable.update({0x55 : 'sor'})
self.ByteCodeTable.update({0x56 : 'ior'})
self.ByteCodeTable.update({0x57 : 'sxor'})
self.ByteCodeTable.update({0x58 : 'ixor'})
self.ByteCodeTable.update({0x59 : 'sinc'})
self.ByteCodeTable.update({0x5A : 'iinc'})
self.ByteCodeTable.update({0x5B : 's2b'})
self.ByteCodeTable.update({0x5C : 's2i'})
self.ByteCodeTable.update({0x5D : 'i2b'})
self.ByteCodeTable.update({0x5E : 'i2s'})
self.ByteCodeTable.update({0x5F : 'icmp'})
self.ByteCodeTable.update({0x60 : 'ifeq'})
self.ByteCodeTable.update({0x61 : 'ifne'})
self.ByteCodeTable.update({0x62 : 'iflt'})
self.ByteCodeTable.update({0x63 : 'ifge'})
self.ByteCodeTable.update({0x64 : 'ifgt'})
self.ByteCodeTable.update({0x65 : 'ifle'})
self.ByteCodeTable.update({0x66 : 'ifnull'})
self.ByteCodeTable.update({0x67 : 'ifnonnull'})
self.ByteCodeTable.update({0x68 : 'if_acmpeq'})
self.ByteCodeTable.update({0x69 : 'if_acmpne'})
self.ByteCodeTable.update({0x6A : 'if_scmpeq'})
self.ByteCodeTable.update({0x6B : 'if_scmpne'})
self.ByteCodeTable.update({0x6C : 'if_scmplt'})
self.ByteCodeTable.update({0x6D : 'if_scmpge'})
self.ByteCodeTable.update({0x6E : 'if_scmpgt'})
self.ByteCodeTable.update({0x6F : 'if_scmple'})
self.ByteCodeTable.update({0x70 : 'goto'})
self.ByteCodeTable.update({0x71 : 'jsr'})
self.ByteCodeTable.update({0x72 : 'ret'})
self.ByteCodeTable.update({0x73 : 'stableswitch'})
self.ByteCodeTable.update({0x74 : 'itableswitch'})
self.ByteCodeTable.update({0x75 : 'slookupswitch'})
self.ByteCodeTable.update({0x76 : 'ilookupswitch'})
self.ByteCodeTable.update({0x77 : 'areturn'})
self.ByteCodeTable.update({0x78 : 'sreturn'})
self.ByteCodeTable.update({0x79 : 'ireturn'})
self.ByteCodeTable.update({0x7A : 'j_return'})
self.ByteCodeTable.update({0x7B : 'getstatic_a'})
self.ByteCodeTable.update({0x7C : 'getstatic_b'})
self.ByteCodeTable.update({0x7D : 'getstatic_s'})
self.ByteCodeTable.update({0x7E : 'getstatic_i'})
self.ByteCodeTable.update({0x7F : 'putstatic_a'})
self.ByteCodeTable.update({0x80 : 'putstatic_b'})
self.ByteCodeTable.update({0x81 : 'putstatic_s'})
self.ByteCodeTable.update({0x82 : 'putstatic_i'})
self.ByteCodeTable.update({0x83 : 'getfield_a'})
self.ByteCodeTable.update({0x84 : 'getfield_b'})
self.ByteCodeTable.update({0x85 : 'getfield_s'})
self.ByteCodeTable.update({0x86 : 'getfield_i'})
self.ByteCodeTable.update({0x87 : 'putfield_a'})
self.ByteCodeTable.update({0x88 : 'putfield_b'})
self.ByteCodeTable.update({0x89 : 'putfield_s'})
self.ByteCodeTable.update({0x8A : 'putfield_i'})
self.ByteCodeTable.update({0x8B : 'invokevirtual'})
self.ByteCodeTable.update({0x8C : 'invokespecial'})
self.ByteCodeTable.update({0x8D : 'invokestatic'})
self.ByteCodeTable.update({0x8E : 'invokeinterface'})
self.ByteCodeTable.update({0x8F : 'new'})
self.ByteCodeTable.update({0x90 : 'newarray'})
self.ByteCodeTable.update({0x91 : 'anewarray'})
self.ByteCodeTable.update({0x92 : 'arraylength'})
self.ByteCodeTable.update({0x93 : 'athrow'})
self.ByteCodeTable.update({0x94 : 'checkcast'})
self.ByteCodeTable.update({0x95 : 'instanceof'})
self.ByteCodeTable.update({0x96 : 'sinc_w'})
self.ByteCodeTable.update({0x97 : 'iinc_w'})
self.ByteCodeTable.update({0x98 : 'ifeq_w'})
self.ByteCodeTable.update({0x99 : 'ifne_w'})
self.ByteCodeTable.update({0x9A : 'iflt_w'})
self.ByteCodeTable.update({0x9B : 'ifge_w'})
self.ByteCodeTable.update({0x9C : 'ifgt_w'})
self.ByteCodeTable.update({0x9D : 'ifle_w'})
self.ByteCodeTable.update({0x9E : 'ifnull_w'})
self.ByteCodeTable.update({0x9F : 'ifnonnull_w'})
self.ByteCodeTable.update({0xA0 : 'if_acmpeq_w'})
self.ByteCodeTable.update({0xA1 : 'if_acmpne_w'})
self.ByteCodeTable.update({0xA2 : 'if_scmpeq_w'})
self.ByteCodeTable.update({0xA3 : 'if_scmpne_w'})
self.ByteCodeTable.update({0xA4 : 'if_scmplt_w'})
self.ByteCodeTable.update({0xA5 : 'if_scmpge_w'})
self.ByteCodeTable.update({0xA6 : 'if_scmpgt_w'})
self.ByteCodeTable.update({0xA7 : 'if_scmple_w'})
self.ByteCodeTable.update({0xA8 : 'goto_w'})
self.ByteCodeTable.update({0xA9 : 'getfield_a_w'})
self.ByteCodeTable.update({0xAA : 'getfield_b_w'})
self.ByteCodeTable.update({0xAB : 'getfield_s_w'})
self.ByteCodeTable.update({0xAC : 'getfield_i_w'})
self.ByteCodeTable.update({0xAD : 'getfield_a_this'})
self.ByteCodeTable.update({0xAE : 'getfield_b_this'})
self.ByteCodeTable.update({0xAF : 'getfield_s_this'})
self.ByteCodeTable.update({0xB0 : 'getfield_i_this'})
self.ByteCodeTable.update({0xB1 : 'putfield_a_w'})
self.ByteCodeTable.update({0xB2 : 'putfield_b_w'})
self.ByteCodeTable.update({0xB3 : 'putfield_s_w'})
self.ByteCodeTable.update({0xB4 : 'putfield_i_w'})
self.ByteCodeTable.update({0xB5 : 'putfield_a_this'})
self.ByteCodeTable.update({0xB6 : 'putfield_b_this'})
self.ByteCodeTable.update({0xB7 : 'putfield_s_this'})
self.ByteCodeTable.update({0xB8 : 'putfield_i_this'})
| 48.331693
| 306
| 0.590164
| 5,141
| 49,105
| 5.587629
| 0.072554
| 0.220149
| 0.116549
| 0.142449
| 0.744308
| 0.729479
| 0.729479
| 0.727947
| 0.724814
| 0.724814
| 0
| 0.032394
| 0.260075
| 49,105
| 1,016
| 307
| 48.331693
| 0.758215
| 0.012402
| 0
| 0.501282
| 0
| 0
| 0.085145
| 0
| 0
| 0
| 0.015267
| 0
| 0
| 1
| 0.242308
| false
| 0
| 0.001282
| 0.001282
| 0.484615
| 0.237179
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5cf0413daa30d2fb8572d9914511ec1bb0dd5e44
| 2,193
|
py
|
Python
|
tests/test_pytest_fixtures.py
|
FrNecas/requre
|
110ad5c42b6bbb087a28bcaf7d7b7834825ec65a
|
[
"MIT"
] | 4
|
2019-09-11T10:39:19.000Z
|
2020-01-26T14:46:04.000Z
|
tests/test_pytest_fixtures.py
|
FrNecas/requre
|
110ad5c42b6bbb087a28bcaf7d7b7834825ec65a
|
[
"MIT"
] | 134
|
2020-08-04T06:56:25.000Z
|
2022-03-28T19:59:10.000Z
|
tests/test_pytest_fixtures.py
|
FrNecas/requre
|
110ad5c42b6bbb087a28bcaf7d7b7834825ec65a
|
[
"MIT"
] | 8
|
2019-09-11T09:52:01.000Z
|
2020-05-15T07:49:20.000Z
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import pytest
from requre.storage import PersistentObjectStorage
from tests.testbase import network_connection_available
pytest_plugins = ["requre.pytest_fixtures"]
@pytest.mark.skipif(not network_connection_available(), reason="No network connection")
def test_record_requests_fixture(record_requests_fixture):
import requests
# ensure that storage_file is not propagated to Singleton
assert (
PersistentObjectStorage().cassette.storage_file
!= record_requests_fixture.storage_file
)
assert (
record_requests_fixture.storage_file.name == "test_record_requests_fixture.yaml"
)
requests.get("https://google.com")
@pytest.mark.skipif(not network_connection_available(), reason="No network connection")
def test_record_requests_fixture_different_call(record_requests_fixture):
import requests
assert (
record_requests_fixture.storage_file.name
== "test_record_requests_fixture_different_call.yaml"
)
requests.get("https://fedoraproject.org")
@pytest.mark.skipif(not network_connection_available(), reason="No network connection")
def test_record_requests_fixture_write(
remove_storage_file, record_requests_fixture, remove_storage_file_after
):
import requests
assert remove_storage_file == record_requests_fixture.storage_file
assert remove_storage_file == remove_storage_file_after
assert (
record_requests_fixture.storage_file.name
== "test_record_requests_fixture_write.yaml"
)
requests.get("https://google.com")
@pytest.mark.skipif(not network_connection_available(), reason="No network connection")
def test_record_requests_fixture_different_call_write(
remove_storage_file, record_requests_fixture, remove_storage_file_after
):
import requests
assert remove_storage_file == record_requests_fixture.storage_file
assert remove_storage_file == remove_storage_file_after
assert (
record_requests_fixture.storage_file.name
== "test_record_requests_fixture_different_call_write.yaml"
)
requests.get("https://fedoraproject.org")
| 33.227273
| 88
| 0.781122
| 260
| 2,193
| 6.192308
| 0.207692
| 0.165217
| 0.247826
| 0.124224
| 0.821739
| 0.775155
| 0.730435
| 0.727329
| 0.700621
| 0.700621
| 0
| 0
| 0.143183
| 2,193
| 65
| 89
| 33.738462
| 0.856839
| 0.059736
| 0
| 0.595745
| 0
| 0
| 0.177843
| 0.095238
| 0
| 0
| 0
| 0
| 0.191489
| 1
| 0.085106
| false
| 0
| 0.148936
| 0
| 0.234043
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf009ad77055b7835503478b8411f256dde4a9d9
| 160
|
py
|
Python
|
apps/beneficary/admin.py
|
MiloshBogdanovic/Auri-Soft
|
ab9f1540f3a9c91463bfb965d7399779f97717c0
|
[
"MIT"
] | null | null | null |
apps/beneficary/admin.py
|
MiloshBogdanovic/Auri-Soft
|
ab9f1540f3a9c91463bfb965d7399779f97717c0
|
[
"MIT"
] | null | null | null |
apps/beneficary/admin.py
|
MiloshBogdanovic/Auri-Soft
|
ab9f1540f3a9c91463bfb965d7399779f97717c0
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from apps.app.admin import MyAdminSite
from django.contrib import admin
from .models import *
| 16
| 38
| 0.79375
| 23
| 160
| 5.521739
| 0.521739
| 0.15748
| 0.267717
| 0.362205
| 0.440945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 160
| 9
| 39
| 17.777778
| 0.940741
| 0.1625
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf22a45e9cf946383a26db5abc69d46ffce1c82d
| 44,584
|
py
|
Python
|
tests/integration_tests/test_upcoming_events.py
|
Marhdz/django-happenings
|
5699889e972ae03f6fca480909cec1e46eff35af
|
[
"BSD-2-Clause"
] | 2
|
2019-12-11T14:39:05.000Z
|
2020-06-23T10:28:30.000Z
|
tests/integration_tests/test_upcoming_events.py
|
Marhdz/django-happenings
|
5699889e972ae03f6fca480909cec1e46eff35af
|
[
"BSD-2-Clause"
] | 1
|
2017-07-31T10:59:15.000Z
|
2017-08-01T10:32:30.000Z
|
tests/integration_tests/test_upcoming_events.py
|
Marhdz/django-happenings
|
5699889e972ae03f6fca480909cec1e46eff35af
|
[
"BSD-2-Clause"
] | 2
|
2018-12-25T00:34:11.000Z
|
2020-06-23T10:28:45.000Z
|
from __future__ import unicode_literals
from datetime import datetime, date, timedelta
from django.utils.timezone import make_aware, utc
from django.test.utils import override_settings
from happenings.utils.upcoming import UpcomingEvents
from happenings.templatetags.happenings_tags import current_happenings as haps
from .event_factory import create_event, SetMeUp
@override_settings(TIME_ZONE='UTC')
class UpcomingEventsTest(SetMeUp):
def upcoming_events(self, event, d, fin, num=5):
return UpcomingEvents(event, d, fin, num).get_upcoming_events()
def test_yearly(self):
"""
Test yearly repeat w/ 'now' month same as event.start_date.month, and
day b4 event.start_date.day.
"""
event = create_event(
start_date=(2014, 3, 31),
end_date=(2014, 3, 31),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2014, 3, 3), utc)
fin = d + timedelta(days=2000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 3, 31))
self.assertEqual(events[1][0].date(), date(2015, 3, 31))
self.assertEqual(events[2][0].date(), date(2016, 3, 31))
def test_yearly2(self):
"""
Test yearly repeat w/ 'now' month same as event.start_date.month, and
day after event.start_date.day.
"""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2015, 3, 3), utc)
fin = d + timedelta(days=4000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2016, 3, 1))
self.assertEqual(events[1][0].date(), date(2017, 3, 1))
self.assertEqual(events[2][0].date(), date(2018, 3, 1))
def test_yearly3(self):
"""Test yearly repeat w/ 'now' month b4 event.start_date.month."""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2015, 1, 1), utc)
fin = d + timedelta(days=4000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2015, 3, 1))
self.assertEqual(events[1][0].date(), date(2016, 3, 1))
self.assertEqual(events[2][0].date(), date(2017, 3, 1))
def test_yearly4(self):
"""Test yearly repeat w/ 'now' month after event.start_date.month."""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2015, 4, 4), utc)
fin = d + timedelta(days=4000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2016, 3, 1))
self.assertEqual(events[1][0].date(), date(2017, 3, 1))
self.assertEqual(events[2][0].date(), date(2018, 3, 1))
def test_yearly_future_event(self):
"""Test yearly repeat w/ event that hasn't started yet."""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2013, 1, 1), utc)
fin = d + timedelta(days=4000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 3, 1))
self.assertEqual(events[1][0].date(), date(2015, 3, 1))
self.assertEqual(events[2][0].date(), date(2016, 3, 1))
def test_yearly_on_leap_day(self):
"""
Test yearly repeat event w/ start day on leap day.
"""
event = create_event(
start_date=(2012, 2, 29),
end_date=(2012, 2, 29),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="YEARLY",
utc=True
)
d = make_aware(datetime(2014, 3, 3), utc)
fin = d + timedelta(days=4000)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 3)
for i in range(1):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2016, 2, 29))
self.assertEqual(events[1][0].date(), date(2020, 2, 29))
self.assertEqual(events[2][0].date(), date(2024, 2, 29))
def test_monthly(self):
event = create_event(
start_date=(2014, 3, 31),
end_date=(2014, 3, 31),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="MONTHLY",
utc=True
)
d = make_aware(datetime(2014, 6, 6), utc)
fin = d + timedelta(days=365)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 7, 31))
self.assertEqual(events[1][0].date(), date(2014, 8, 31))
self.assertEqual(events[2][0].date(), date(2014, 10, 31))
def test_monthly2(self):
"""Test with monthly repeating event that started last year."""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="MONTHLY",
utc=True
)
d = make_aware(datetime(2015, 4, 6), utc)
fin = d + timedelta(days=365)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2015, 5, 1))
self.assertEqual(events[1][0].date(), date(2015, 6, 1))
self.assertEqual(events[2][0].date(), date(2015, 7, 1))
def test_monthly_with_end_repeat(self):
event = create_event(
start_date=(2014, 3, 31),
end_date=(2014, 3, 31),
created_by=self.user,
title="Mondo",
description="Testing 1 2 3",
repeat="MONTHLY",
end_repeat=date(2014, 5, 31),
utc=True
)
d = make_aware(datetime(2014, 3, 3), utc)
fin = d + timedelta(days=365)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 2)
def test_monthly_future_event(self):
event = create_event(
start_date=(2015, 3, 10),
end_date=(2015, 3, 10),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="MONTHLY",
utc=True
)
d = make_aware(datetime(2015, 1, 6), utc)
fin = d + timedelta(days=365)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
self.assertEqual(events[0][0].date(), date(2015, 3, 10))
def test_weekly(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 3),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKLY",
utc=True
)
d = make_aware(datetime(2014, 3, 6), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 3, 8))
self.assertEqual(events[1][0].date(), date(2014, 3, 15))
def test_weekly_future_event(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 3),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKLY",
utc=True
)
d = make_aware(datetime(2013, 12, 1), utc)
fin = d + timedelta(days=900)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
self.assertEqual(events[0][0].date(), date(2014, 3, 1))
self.assertEqual(events[1][0].date(), date(2014, 3, 8))
self.assertEqual(events[2][0].date(), date(2014, 3, 15))
def test_biweekly(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 3),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="BIWEEKLY",
utc=True
)
d = make_aware(datetime(2014, 5, 8), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 5, 10))
self.assertEqual(events[1][0].date(), date(2014, 5, 24))
def test_daily(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="DAILY",
utc=True
)
d = make_aware(datetime(2014, 5, 6), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 5, 6))
self.assertEqual(events[1][0].date(), date(2014, 5, 7))
def test_daily_future_event(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="DAILY",
utc=True
)
d = make_aware(datetime(2013, 11, 6), utc)
fin = d + timedelta(days=900)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
self.assertEqual(events[0][0].date(), date(2014, 3, 1))
self.assertEqual(events[1][0].date(), date(2014, 3, 2))
def test_weekday_start_on_weekend(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKDAY",
utc=True
)
d = make_aware(datetime(2014, 5, 3), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 5, 5))
self.assertEqual(events[1][0].date(), date(2014, 5, 6))
def test_weekday_start_on_weekday(self):
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 1),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKDAY",
utc=True
)
d = make_aware(datetime(2014, 5, 7), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 5, 7))
self.assertEqual(events[1][0].date(), date(2014, 5, 8))
self.assertEqual(events[2][0].date(), date(2014, 5, 9))
self.assertEqual(events[3][0].date(), date(2014, 5, 12))
self.assertEqual(events[4][0].date(), date(2014, 5, 13))
def test_weekday_with_end_repeat(self):
event = create_event(
start_date=(2014, 3, 31),
end_date=(2014, 3, 31),
created_by=self.user,
title="Groove",
description="Testing 1 2 3",
repeat="WEEKDAY",
end_repeat=date(2014, 4, 1),
utc=True
)
d = make_aware(datetime(2014, 3, 3), utc)
fin = d + timedelta(days=365)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 2)
def test_weekday_event_in_future(self):
event = create_event(
start_date=(2014, 3, 10),
end_date=(2014, 3, 10),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKDAY",
utc=True
)
d = make_aware(datetime(2014, 2, 7), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 5)
for i in range(5):
self.assertEqual(events[i][1].title, event.title)
self.assertEqual(events[i][1].start_date, event.start_date)
self.assertEqual(events[0][0].date(), date(2014, 3, 10))
def test_single_day_event_in_future(self):
event = create_event(
start_date=(2014, 3, 7),
end_date=(2014, 3, 7),
created_by=self.user,
title="Vera",
description="Testing 1 2 3",
utc=True
)
d = make_aware(datetime(2014, 3, 3), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
self.assertEqual(events[0][0], event.start_date)
self.assertEqual(events[0][1].title, event.title)
self.assertEqual(events[0][1].start_date, event.start_date)
def test_event_in_past(self):
"""
Sending an event that has already started and ended, and that
doesn't have an end repeat, should return an empty list.
"""
event = create_event(
start_date=(2014, 3, 7),
end_date=(2014, 3, 7),
created_by=self.user,
title="Elvira",
description="Testing 1 2 3",
utc=True
)
d = make_aware(datetime(2015, 3, 4), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_end_repeat(self):
"""Test that an event's end_repeat is respected."""
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 3),
created_by=self.user,
title="Veronika",
description="Testing 1 2 3",
repeat="WEEKLY",
end_repeat=date(2014, 3, 10),
utc=True
)
d = make_aware(datetime(2014, 3, 6), utc)
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
self.assertEqual(events[0][0].date(), date(2014, 3, 8))
self.assertEqual(events[0][1].title, event.title)
def test_event_starts_outside_of_bound(self):
"""
Passing an event that starts outside of 'finish' argument
should return an empty list.
"""
event = create_event(
start_date=(2015, 3, 1),
end_date=(2015, 3, 3),
created_by=self.user,
title="Veronica",
description="Testing 1 2 3",
repeat="WEEKLY",
utc=True
)
d = make_aware(datetime(2014, 3, 6), utc)
# set finish arg to 90 days from d
fin = d + timedelta(days=90)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_different_num(self):
"""
If the 'num' argument is supplied, should return 'num' number of
events.
"""
num = 8
event = create_event(
start_date=(2014, 3, 1),
end_date=(2014, 3, 3),
created_by=self.user,
title="Veronica",
description="Testing 1 2 3",
repeat="WEEKLY",
utc=True
)
d = make_aware(datetime(2014, 3, 6), utc)
fin = d + timedelta(days=900)
events = self.upcoming_events(event, d, fin, num)
self.assertEqual(len(events), num)
def test_events_over_for_day_weekday(self):
"""
Tests that an event that is over for the day doesn't show up,
but that an even not over for the day, does.
"""
event = create_event(
start_date=(2014, 3, 1, 21),
end_date=(2014, 3, 1, 22),
created_by=self.user,
title="Jill",
description="Testing 1 2 3",
repeat="WEEKDAY",
end_repeat=date(2014, 3, 10),
utc=True
)
d = make_aware(datetime(2014, 3, 6, 20), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
self.assertEqual(events[0][0].date(), date(2014, 3, 6))
self.assertEqual(events[0][1].title, event.title)
d = make_aware(datetime(2014, 3, 6, 23), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_events_over_for_day_monthly(self):
"""
Tests that an event that is over for the day doesn't show up,
but that an even not over for the day, does.
"""
event = create_event(
start_date=(2015, 5, 1, 21),
end_date=(2015, 5, 1, 22),
created_by=self.user,
title="Albert",
description="Testing 1 2 3",
repeat="MONTHLY",
end_repeat=date(2015, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 1, 20), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
d = make_aware(datetime(2015, 5, 1, 23), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_events_over_for_day_yearly(self):
"""
Tests that an event that is over for the day doesn't show up,
but that an even not over for the day, does.
"""
event = create_event(
start_date=(2015, 5, 1, 21),
end_date=(2015, 5, 1, 22),
created_by=self.user,
title="Chelsea",
description="Testing 1 2 3",
repeat="YEARLY",
end_repeat=date(2017, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 1, 20), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
d = make_aware(datetime(2015, 5, 1, 23), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_events_over_for_day_weekly(self):
"""
Tests that an event that is over for the day doesn't show up,
but that an even not over for the day, does.
"""
event = create_event(
start_date=(2015, 5, 1, 21),
end_date=(2015, 5, 1, 22),
created_by=self.user,
title="Leon",
description="Testing 1 2 3",
repeat="WEEKLY",
end_repeat=date(2015, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 8, 20), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
d = make_aware(datetime(2015, 5, 8, 23), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_events_over_for_day_daily(self):
"""
Tests that an event that is over for the day doesn't show up,
but that an even not over for the day, does.
"""
event = create_event(
start_date=(2015, 5, 1, 21),
end_date=(2015, 5, 1, 22),
created_by=self.user,
title="Leon",
description="Testing 1 2 3",
repeat="DAILY",
end_repeat=date(2015, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 6, 20), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
d = make_aware(datetime(2015, 5, 6, 23), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
def test_multiday_event_weekly_repeat(self):
"""
Tests that a multiday event (hitherto called an event 'chunk') doesn't
show up in upcoming events if the event already 'started'
"""
event = create_event(
start_date=(2015, 5, 19),
end_date=(2015, 5, 21),
created_by=self.user,
title="Ada",
description="Testing 1 2 3",
repeat="WEEKLY",
end_repeat=date(2015, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 27), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
d = make_aware(datetime(2015, 5, 26), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
def test_multiday_event_biweekly_repeat(self):
"""
Tests that a multiday event (hitherto called an event 'chunk') doesn't
show up in upcoming events if the event already 'started'
"""
event = create_event(
start_date=(2015, 5, 1),
end_date=(2015, 5, 2),
created_by=self.user,
title="Claire",
description="Testing 1 2 3",
repeat="BIWEEKLY",
end_repeat=date(2015, 8, 10),
utc=True
)
d = make_aware(datetime(2015, 5, 16), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
d = make_aware(datetime(2015, 5, 15), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
def test_multiday_event_monthly_repeat(self):
"""
Tests that a multiday event (hitherto called an event 'chunk') doesn't
show up in upcoming events if the event already 'started'
"""
event = create_event(
start_date=(2015, 5, 1),
end_date=(2015, 5, 3),
created_by=self.user,
title="Chris",
description="Testing 1 2 3",
repeat="MONTHLY",
end_repeat=date(2015, 8, 10),
utc=True
)
for day in (2, 3):
d = make_aware(datetime(2015, 6, day), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
d = make_aware(datetime(2015, 6, 1), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
def test_multiday_event_yearly_repeat(self):
"""
Tests that a multiday event (hitherto called an event 'chunk') doesn't
show up in upcoming events if the event already 'started'
"""
event = create_event(
start_date=(2015, 5, 1),
end_date=(2015, 5, 3),
created_by=self.user,
title="Barry",
description="Testing 1 2 3",
repeat="YEARLY",
end_repeat=date(2018, 8, 10),
utc=True
)
for day in (2, 3):
d = make_aware(datetime(2016, 5, day), utc)
fin = d.replace(hour=23, minute=59, second=59, microsecond=999)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 0)
d = make_aware(datetime(2016, 5, 1), utc)
events = self.upcoming_events(event, d, fin)
self.assertEqual(len(events), 1)
# ------------------------Current Happenings Test -------------------------- #
# Kept getting invocation error when trying to create a new file for these
# tests so I'm putting them here for now
def hap(self, event, d):
# need to know the length of the returned generator, so used a little
# hack found here: http://stackoverflow.com/a/7223557/3188521
return sum(1 for _ in haps(now=d)['events_happening_now'])
def test_hap_no_repeat(self):
event = create_event(
start_date=(2015, 6, 14, 20),
end_date=(2015, 6, 14, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
utc=True
)
for hr in (20, 21, 22):
d = make_aware(datetime(2015, 6, 14, hr), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
for hr in (19, 23):
d = make_aware(datetime(2015, 6, 14, hr), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_weekly_repeat(self):
event = create_event(
start_date=(2015, 6, 14, 20),
end_date=(2015, 6, 14, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="WEEKLY",
end_repeat=date(2015, 6, 23),
utc=True
)
d = make_aware(datetime(2015, 6, 14, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 14, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 14, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 21, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 21, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 21, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 21, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 21, 22), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 21, 18), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 14, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 14, 22), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 14, 18), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 28, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 28, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 28, 21, 44), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_biweekly_repeat(self):
event = create_event(
start_date=(2015, 6, 1, 20),
end_date=(2015, 6, 1, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="BIWEEKLY",
end_repeat=date(2015, 6, 16),
utc=True
)
d = make_aware(datetime(2015, 6, 1, 21), utc)
events = self.hap(event, d)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 1, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 1, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 8, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 8, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 8, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 15, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 15, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 15, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 29, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 29, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 29, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_weekday_repeat(self):
event = create_event(
start_date=(2015, 6, 1, 20),
end_date=(2015, 6, 1, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="WEEKDAY",
end_repeat=date(2015, 6, 16),
utc=True
)
d = make_aware(datetime(2015, 6, 1, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 1, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 1, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 7, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 7, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 7, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 8, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 8, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 8, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
# not happening
d = make_aware(datetime(2015, 6, 1, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 1, 23), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 1, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
# check that end_repeat is respected
d = make_aware(datetime(2015, 6, 17, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 17, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 17, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_weekday_repeat_future(self):
event = create_event(
start_date=(2016, 6, 1, 20),
end_date=(2016, 6, 1, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="WEEKDAY",
utc=True
)
d = make_aware(datetime(2015, 6, 1, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 1, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 1, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_monthly_repeat(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 2, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="MONTHLY",
end_repeat=date(2015, 8, 1),
utc=True
)
d = make_aware(datetime(2015, 6, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 3, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 3, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 3, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 7, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 7, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 7, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
# make sure end_repeat is respected
d = make_aware(datetime(2015, 9, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 9, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 9, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
# test prev. year
d = make_aware(datetime(2014, 6, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2014, 6, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2014, 6, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_yearly_repeat(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 2, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="YEARLY",
end_repeat=date(2017, 8, 1),
utc=True
)
d = make_aware(datetime(2015, 6, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, 3, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 3, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 3, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 2, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 2, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2016, 6, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2016, 6, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2016, 6, 2, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
def test_hap_yearly_repeat_multi(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 4, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="YEARLY",
end_repeat=date(2017, 8, 1),
utc=True
)
for year in (2015, 2016):
for day in (2, 3, 4):
d = make_aware(datetime(year, 6, day, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(year, 6, day, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(year, 6, day, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(year, 6, day, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(year, 6, day, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 7, 2, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 5, 2, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_monthly_repeat_multi(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 4, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="MONTHLY",
end_repeat=date(2017, 8, 1),
utc=True
)
for month in (6, 7):
for day in (2, 3, 4):
d = make_aware(datetime(2015, month, day, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, month, day, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, month, day, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, month, day, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, month, day, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_weekly_repeat_multi(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 4, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="WEEKLY",
end_repeat=date(2017, 8, 1),
utc=True
)
for day in (2, 3, 4, 9, 10, 11):
d = make_aware(datetime(2015, 6, day, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, day, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
def test_hap_no_repeat_multi(self):
event = create_event(
start_date=(2015, 6, 2, 20),
end_date=(2015, 6, 4, 22),
created_by=self.user,
title="Happening",
description="Testing 1 2 3",
repeat="NEVER",
end_repeat=date(2017, 8, 1),
utc=True
)
for day in (2, 3, 4):
d = make_aware(datetime(2015, 6, day, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 20), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 21, 59), utc)
events = self.hap(event, d)
self.assertEqual(events, 1)
d = make_aware(datetime(2015, 6, day, 19), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, day, 22, 2), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 1, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
d = make_aware(datetime(2015, 6, 5, 21), utc)
events = self.hap(event, d)
self.assertEqual(events, 0)
| 38.040956
| 78
| 0.546272
| 5,909
| 44,584
| 4.023862
| 0.03977
| 0.137528
| 0.155444
| 0.105228
| 0.921016
| 0.911595
| 0.907558
| 0.893721
| 0.877403
| 0.85385
| 0
| 0.09102
| 0.320362
| 44,584
| 1,171
| 79
| 38.073442
| 0.693673
| 0.051857
| 0
| 0.740741
| 0
| 0
| 0.028292
| 0
| 0
| 0
| 0
| 0
| 0.212476
| 1
| 0.044834
| false
| 0
| 0.006823
| 0.001949
| 0.054581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf26b7a81dae7efdf7ec1bea4e6de8c7914920da
| 67
|
py
|
Python
|
python/desc/sims/GCRCatSimInterface/validation/__init__.py
|
jchiang87/sims_GCRCatSimInterface
|
320ddc07432bcaa05723944738a6e02b6841b69e
|
[
"BSD-3-Clause"
] | 1
|
2020-11-02T21:08:39.000Z
|
2020-11-02T21:08:39.000Z
|
python/desc/sims/GCRCatSimInterface/validation/__init__.py
|
jchiang87/sims_GCRCatSimInterface
|
320ddc07432bcaa05723944738a6e02b6841b69e
|
[
"BSD-3-Clause"
] | 71
|
2018-01-12T17:12:50.000Z
|
2021-02-26T23:54:37.000Z
|
python/desc/sims/GCRCatSimInterface/validation/__init__.py
|
jchiang87/sims_GCRCatSimInterface
|
320ddc07432bcaa05723944738a6e02b6841b69e
|
[
"BSD-3-Clause"
] | 5
|
2018-01-11T18:42:42.000Z
|
2019-11-15T17:41:22.000Z
|
from .ic_mags import *
from .ic_pos import *
from .ic_agn import *
| 16.75
| 22
| 0.731343
| 12
| 67
| 3.833333
| 0.5
| 0.391304
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179104
| 67
| 3
| 23
| 22.333333
| 0.836364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf50ec9b04eda072d1f277b83e7591f9fb4d3b41
| 2,210
|
py
|
Python
|
tests/strategies/test_net_error_strategy.py
|
megaz/pycircuitbreaker
|
5af7f75b706630d44227ee52a7ddb2346238bfdd
|
[
"MIT"
] | 3
|
2020-09-17T18:38:10.000Z
|
2022-03-04T22:19:08.000Z
|
tests/strategies/test_net_error_strategy.py
|
megaz/pycircuitbreaker
|
5af7f75b706630d44227ee52a7ddb2346238bfdd
|
[
"MIT"
] | 6
|
2020-04-03T05:14:55.000Z
|
2021-06-30T21:45:12.000Z
|
tests/strategies/test_net_error_strategy.py
|
megaz/pycircuitbreaker
|
5af7f75b706630d44227ee52a7ddb2346238bfdd
|
[
"MIT"
] | 3
|
2020-04-10T22:20:00.000Z
|
2021-06-30T20:53:39.000Z
|
from time import sleep
import pytest
from pycircuitbreaker import CircuitBreaker, CircuitBreakerState
from pycircuitbreaker.strategies import CircuitBreakerStrategy
def test_net_error_strategy(error_func, success_func):
breaker = CircuitBreaker(
strategy=CircuitBreakerStrategy.NET_ERROR,
error_threshold=3,
)
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.CLOSED
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.CLOSED
breaker.call(success_func)
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.CLOSED
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.OPEN
def test_net_error_strategy_half_open_to_closed(error_func, success_func):
breaker = CircuitBreaker(
strategy=CircuitBreakerStrategy.NET_ERROR,
error_threshold=1,
recovery_timeout=1,
)
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.OPEN
sleep(1)
assert breaker.state == CircuitBreakerState.HALF_OPEN
breaker.call(success_func)
assert breaker.state == CircuitBreakerState.CLOSED
def test_net_error_strategy_half_open_to_open(error_func, success_func):
breaker = CircuitBreaker(
strategy=CircuitBreakerStrategy.NET_ERROR,
error_threshold=1,
recovery_timeout=1,
)
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.OPEN
sleep(1)
assert breaker.state == CircuitBreakerState.HALF_OPEN
with pytest.raises(IOError):
breaker.call(error_func)
assert breaker.state == CircuitBreakerState.OPEN
def test_net_error_strategy_count_never_negative(success_func):
breaker = CircuitBreaker(
strategy=CircuitBreakerStrategy.NET_ERROR,
)
breaker.call(success_func)
assert breaker._strategy._net_error_count == 0
| 26
| 75
| 0.70905
| 231
| 2,210
| 6.545455
| 0.160173
| 0.094577
| 0.119048
| 0.244709
| 0.853836
| 0.838624
| 0.772487
| 0.772487
| 0.697751
| 0.697751
| 0
| 0.00464
| 0.21991
| 2,210
| 84
| 76
| 26.309524
| 0.87239
| 0
| 0
| 0.745455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.072727
| false
| 0
| 0.072727
| 0
| 0.145455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf7f297746c1506dc50bfa5fa5372690efbf012d
| 7,581
|
py
|
Python
|
tests/test_template.py
|
nkakouros/py-doq
|
542b19d9a046584029a9ce26c4b16adc8b86c034
|
[
"BSD-3-Clause"
] | 39
|
2020-02-25T09:55:02.000Z
|
2022-02-21T17:26:27.000Z
|
tests/test_template.py
|
nkakouros/py-doq
|
542b19d9a046584029a9ce26c4b16adc8b86c034
|
[
"BSD-3-Clause"
] | 25
|
2020-03-14T16:40:05.000Z
|
2022-03-04T09:04:05.000Z
|
tests/test_template.py
|
nkakouros/py-doq
|
542b19d9a046584029a9ce26c4b16adc8b86c034
|
[
"BSD-3-Clause"
] | 6
|
2020-03-14T15:37:53.000Z
|
2022-02-15T09:05:08.000Z
|
import os
from unittest import TestCase
from doq import Template
class SphinxTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.path = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'doq',
'templates',
'sphinx',
)
cls.template = Template(paths=cls.path)
def test_without_argument(self):
params = {
'name': 'foo',
'params': [],
'return_type': None,
}
actual = self.template.load(params=params, filename='noarg.txt')
expected = '"""foo."""'
self.assertEqual(expected, actual)
def test_without_argument_and_return_type(self):
params = {
'name': 'foo',
'params': [],
'return_type': 'str',
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':rtype: str',
'"""',
])
self.assertEqual(expected, actual)
def test_with_one_argument(self):
params = {
'name': 'foo',
'params': [{
'argument': 'arg1',
'annotation': None,
'default': None,
}],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join(['"""foo.', '', ':param arg1:', '"""'])
self.assertEqual(expected, actual)
def test_with_one_argument_and_default(self):
params = {
'name': 'foo',
'params': [{
'argument': 'arg1',
'annotation': None,
'default': '\'foo\'',
}],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join(['"""foo.', '', ':param arg1:', '"""'])
self.assertEqual(expected, actual)
def test_with_one_argument_and_annotaion(self):
params = {
'name': 'foo',
'params': [{
'argument': 'arg1',
'annotation': 'str',
'default': None,
}],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':type arg1: str',
'"""',
])
self.assertEqual(expected, actual)
def test_with_one_argument_annotation_and_return_type(self):
params = {
'name': 'foo',
'params': [{
'argument': 'arg1',
'annotation': 'str',
'default': None,
}],
'return_type': 'str',
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':type arg1: str',
':rtype: str',
'"""',
])
self.assertEqual(expected, actual)
def test_with_two_arguments(self):
params = {
'name': 'foo',
'params': [
{
'argument': 'arg1',
'annotation': None,
'default': None,
},
{
'argument': 'arg2',
'annotation': None,
'default': None,
},
],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':param arg2:',
'"""',
])
self.assertEqual(expected, actual)
def test_with_two_arguments_and_defaults(self):
params = {
'name': 'foo',
'params': [
{
'argument': 'arg1',
'annotation': None,
'default': '\'foo\'',
},
{
'argument': 'arg2',
'annotation': None,
'default': 'None',
},
],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':param arg2:',
'"""',
])
self.assertEqual(expected, actual)
def test_with_two_arguments_and_annotation(self):
params = {
'name': 'foo',
'params': [
{
'argument': 'arg1',
'annotation': 'str',
'default': None,
},
{
'argument': 'arg2',
'annotation': 'int',
'default': None,
},
],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':type arg1: str',
':param arg2:',
':type arg2: int',
'"""',
])
self.assertEqual(expected, actual)
def test_with_two_arguments_annotation_and_defaults(self):
params = {
'name': 'foo',
'params': [
{
'argument': 'arg1',
'annotation': 'str',
'default': '\'foo\'',
},
{
'argument': 'arg2',
'annotation': 'int',
'default': 'None',
},
],
'return_type': None,
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':type arg1: str',
':param arg2:',
':type arg2: int',
'"""',
])
self.assertEqual(expected, actual)
def test_with_two_arguments_annotation_defaults_and_return_type(self):
params = {
'name': 'foo',
'params': [
{
'argument': 'arg1',
'annotation': 'str',
'default': '\'foo\'',
},
{
'argument': 'arg2',
'annotation': 'int',
'default': 'None',
},
],
'return_type': 'str',
}
actual = self.template.load(params=params, filename='def.txt')
expected = '\n'.join([
'"""foo.',
'',
':param arg1:',
':type arg1: str',
':param arg2:',
':type arg2: int',
':rtype: str',
'"""',
])
self.assertEqual(expected, actual)
def test_class(self):
params = {
'name': 'foo',
'defs': [
{
'params': [],
},
],
}
actual = self.template.load(params=params, filename='class.txt')
expected = '"""foo."""\n'
self.assertEqual(expected, actual)
| 28.393258
| 74
| 0.391373
| 557
| 7,581
| 5.201077
| 0.098743
| 0.048326
| 0.057991
| 0.070418
| 0.876079
| 0.865033
| 0.849845
| 0.827408
| 0.799793
| 0.777356
| 0
| 0.008702
| 0.454294
| 7,581
| 266
| 75
| 28.5
| 0.691564
| 0
| 0
| 0.744
| 0
| 0
| 0.172141
| 0
| 0
| 0
| 0
| 0
| 0.048
| 1
| 0.052
| false
| 0
| 0.012
| 0
| 0.068
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b179dd9f5263c7d0c9373a12b0c0a8ccd7b450d
| 9,879
|
py
|
Python
|
intelligent-optimization-programs/PSO/sopt/Optimizers/Newton.py
|
taotao1234abcd/machine-learning-and-artificial-intelligence-python
|
04095d03a9bbe6b6189824a6a0f63b939ea04b65
|
[
"Apache-2.0"
] | 40
|
2018-06-15T00:11:44.000Z
|
2022-01-22T07:35:27.000Z
|
intelligent-optimization-programs/PSO/sopt/Optimizers/Newton.py
|
taotao1234abcd/machine-learning-and-artificial-intelligence-python
|
04095d03a9bbe6b6189824a6a0f63b939ea04b65
|
[
"Apache-2.0"
] | 3
|
2019-01-03T15:00:52.000Z
|
2019-01-03T17:56:41.000Z
|
intelligent-optimization-programs/PSO/sopt/Optimizers/Newton.py
|
taotao1234abcd/machine-learning-and-artificial-intelligence-python
|
04095d03a9bbe6b6189824a6a0f63b939ea04b65
|
[
"Apache-2.0"
] | 14
|
2019-01-10T12:34:10.000Z
|
2021-07-06T16:04:10.000Z
|
#!/usr/bin/env python3
# encoding: utf-8
"""
@version: 0.1
@author: lyrichu
@license: Apache Licence
@contact: 919987476@qq.com
@site: http://www.github.com/Lyrichu
@file: Newton.py
@time: 2018/09/11 20:29
@description:
newton based optimization method,like:dfp and bfgs
"""
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
from scipy import linalg
from sopt.util.newton_config import newton_config
from sopt.Optimizers.Gradients import gradients
class DFP:
def __init__(self,
func,
variables_num,
func_type = newton_config.func_type_min,
eps = newton_config.eps,
init_variables = newton_config.init_variables,
epochs = newton_config.epochs,
min_step = newton_config.min_step,
max_step = newton_config.max_step,
step_size = newton_config.step_size
):
'''
newton based optimization method of dfp
:param func: the target function
:param variables_num: the number of variables
:param func_type: 'min' or 'max'
:param eps: the min stop eps
:param init_variables:the initial variables
:param epochs:iteration numbers
:param min_step:the minimize step
:param max_step:the maximize step
:param step_size: the step size
'''
self.func = func
self.variables_num = variables_num
self.func_type = func_type
self.eps = eps
self.init_variables = init_variables
if self.init_variables is None:
self.init_variables = np.random.uniform(-1,1,self.variables_num)
assert type(self.init_variables) == np.ndarray and len(self.init_variables) == self.variables_num,\
"init_variables should be int or float ndarray of size %d !" % self.variables_num
self.epochs = epochs
self.min_step = min_step
self.max_step = max_step
self.step_size = step_size
self.generations_targets = []
self.generations_points = []
self.global_best_target = 0
self.global_best_point = None
self.global_best_index = 0
def find_best_step(self,x,d):
best_step = self.min_step
best_res = self.func(x-best_step*d)
for step in np.arange(self.min_step,self.max_step,self.step_size):
cur_res = self.func(x-step*d)
if self.func_type == newton_config.func_type_min:
if cur_res < best_res:
best_step = step
best_res = cur_res
else:
if cur_res > best_res:
best_step = step
best_res = cur_res
return best_step
def run(self):
D = np.eye(self.variables_num,self.variables_num)
x = self.init_variables.reshape(-1,1)
iteration = 0
for i in range(self.epochs):
g1 = gradients(self.func,x.flatten()).reshape((self.variables_num,1))
d = D.dot(g1) # D is approximately equals to Hessain Matrix
best_step = self.find_best_step(x,d)
s = best_step * d
x -= s
iteration += 1
g2 = gradients(self.func,x.flatten()).reshape((self.variables_num,1))
if np.sqrt((g2 ** 2).sum()) < self.eps:
break
y = g2 - g1
# update D
D += s.dot(s.T) / ((s.T).dot(y)) - D.dot(y).dot(y.T).dot(D) / ((y.T).dot(D).dot(y))
self.generations_points.append(x.flatten())
self.generations_targets.append(self.func(x.flatten()))
if self.func_type == newton_config.func_type_min:
self.global_best_target = np.min(np.array(self.generations_targets))
self.global_best_index = np.argmin(np.array(self.generations_targets))
self.global_best_point = self.generations_points[int(self.global_best_index)]
else:
self.global_best_target = np.max(np.array(self.generations_targets))
self.global_best_index = np.argmax(np.array(self.generations_targets))
self.global_best_point = self.generations_points[int(self.global_best_index)]
def save_plot(self, save_name="dfp.png"):
plt.plot(self.generations_targets, 'r-')
plt.xlabel("epochs")
plt.ylabel("target function value")
plt.plot("newton dfp with %d epochs" % self.epochs)
plt.savefig(save_name)
def show_result(self):
print("-" * 20, "newton dfp config is:", "-" * 20)
for k, v in self.__dict__.items():
if k not in ['init_variables', 'generations_targets', 'generations_points', 'm',
'global_best_target', 'global_best_point', 'global_best_index']:
print("%s:%s" % (k, v))
print("-" * 20, "newton dfp caculation result is:", "-" * 20)
print("global best epoch/total epochs:%s/%s" % (self.global_best_index, self.epochs))
print("global best point:", self.global_best_point)
print("global best target:", self.global_best_target)
class BFGS:
def __init__(self,
func,
variables_num,
func_type=newton_config.func_type_min,
eps=newton_config.eps,
init_variables=newton_config.init_variables,
epochs=newton_config.epochs,
min_step=newton_config.min_step,
max_step=newton_config.max_step,
step_size=newton_config.step_size
):
'''
newton based optimization method of bfgs
:param func: the target function
:param variables_num: the number of variables
:param func_type: 'min' or 'max'
:param eps: the min stop eps
:param init_variables:the initial variables
:param epochs:iteration numbers
:param min_step:the minimize step
:param max_step:the maximize step
:param step_size: the step size
'''
self.func = func
self.variables_num = variables_num
self.func_type = func_type
self.eps = eps
self.init_variables = init_variables
if self.init_variables is None:
self.init_variables = np.random.uniform(-1, 1, self.variables_num)
assert type(self.init_variables) == np.ndarray and len(self.init_variables) == self.variables_num, \
"init_variables should be int or float ndarray of size %d !" % self.variables_num
self.epochs = epochs
self.min_step = min_step
self.max_step = max_step
self.step_size = step_size
self.generations_targets = []
self.generations_points = []
self.global_best_target = 0
self.global_best_point = None
self.global_best_index = 0
def find_best_step(self, x, d):
best_step = self.min_step
best_res = self.func(x - best_step * d)
for step in np.arange(self.min_step, self.max_step, self.step_size):
cur_res = self.func(x - step * d)
if self.func_type == newton_config.func_type_min:
if cur_res < best_res:
best_step = step
best_res = cur_res
else:
if cur_res > best_res:
best_step = step
best_res = cur_res
return best_step
def run(self):
B = np.eye(self.variables_num, self.variables_num)
x = self.init_variables.reshape(-1, 1)
iteration = 0
for i in range(self.epochs):
g1 = gradients(self.func, x.flatten()).reshape((self.variables_num, 1))
d = linalg.inv(B).dot(g1) # D is approximately equals to Hessain Matrix
best_step = self.find_best_step(x, d)
s = best_step * d
x -= s
iteration += 1
g2 = gradients(self.func, x.flatten()).reshape((self.variables_num, 1))
if np.sqrt((g2 ** 2).sum()) < self.eps:
break
y = g2 - g1
# update D
B += y.dot(y.T) / ((y.T).dot(s)) - (B.dot(s).dot(s.T).dot(B)) / (s.T.dot(B).dot(s))
self.generations_points.append(x.flatten())
self.generations_targets.append(self.func(x.flatten()))
if self.func_type == newton_config.func_type_min:
self.global_best_target = np.min(np.array(self.generations_targets))
self.global_best_index = np.argmin(np.array(self.generations_targets))
self.global_best_point = self.generations_points[int(self.global_best_index)]
else:
self.global_best_target = np.max(np.array(self.generations_targets))
self.global_best_index = np.argmax(np.array(self.generations_targets))
self.global_best_point = self.generations_points[int(self.global_best_index)]
def save_plot(self, save_name="bfgs.png"):
plt.plot(self.generations_targets, 'r-')
plt.xlabel("epochs")
plt.ylabel("target function value")
plt.plot("newton bfgs with %d epochs" % self.epochs)
plt.savefig(save_name)
def show_result(self):
print("-" * 20, "newton bfgs config is:", "-" * 20)
for k, v in self.__dict__.items():
if k not in ['init_variables', 'generations_targets', 'generations_points', 'm',
'global_best_target', 'global_best_point', 'global_best_index']:
print("%s:%s" % (k, v))
print("-" * 20, "newton bfgs caculation result is:", "-" * 20)
print("global best epoch/total epochs:%s/%s" % (self.global_best_index, self.epochs))
print("global best point:", self.global_best_point)
print("global best target:", self.global_best_target)
| 40.322449
| 108
| 0.598846
| 1,297
| 9,879
| 4.339244
| 0.125675
| 0.071073
| 0.069652
| 0.040512
| 0.90796
| 0.90796
| 0.90796
| 0.90796
| 0.90796
| 0.90796
| 0
| 0.010763
| 0.294665
| 9,879
| 244
| 109
| 40.487705
| 0.796929
| 0.109829
| 0
| 0.871508
| 0
| 0
| 0.084217
| 0
| 0
| 0
| 0
| 0
| 0.011173
| 1
| 0.055866
| false
| 0
| 0.03352
| 0
| 0.111732
| 0.067039
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b343f8398f73c1fef44bbb55cdc4f1b85ea3e6d
| 20,765
|
py
|
Python
|
helper/generate_plots.py
|
DarkStar1997/EcosystemDocgen
|
701d0466dccc231d1e2be26f2bf45fd3a900bf2d
|
[
"MIT"
] | null | null | null |
helper/generate_plots.py
|
DarkStar1997/EcosystemDocgen
|
701d0466dccc231d1e2be26f2bf45fd3a900bf2d
|
[
"MIT"
] | null | null | null |
helper/generate_plots.py
|
DarkStar1997/EcosystemDocgen
|
701d0466dccc231d1e2be26f2bf45fd3a900bf2d
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
class Organism:
@classmethod
def generate_graphs(cls, df, savepath='outputs/img/'):
cls.generate_mortality_graphs(df, savepath)
cls.generate_demographic_graphs(df, savepath)
cls.generate_copulation_graphs(df, savepath)
cls.generate_dependency_graphs(df, savepath)
cls.generate_average_graphs(df, savepath)
cls.generate_theoretical_graphs(df, savepath)
@classmethod
def generate_mortality_graphs(cls, df, savepath):
raise NotImplementedError
@classmethod
def generate_demographic_graphs(cls, df, savepath):
raise NotImplementedError
@classmethod
def generate_copulation_graphs(cls, df, savepath):
raise NotImplementedError
@classmethod
def generate_dependency_graphs(cls, df, savepath):
raise NotImplementedError
@classmethod
def generate_average_graphs(cls, df, savepath):
raise NotImplementedError
@classmethod
def generate_theoretical_graphs(cls, df, savepath):
raise NotImplementedError
class Plant(Organism):
@classmethod
def generate_mortality_graphs(cls, df, savepath):
"""
AGE_DTH
FIT_DTH
AFR_DTH
MX_AGE
"""
plt.suptitle('Age affecting Death')
x = df.index
y = df['AGE_DTH']
plt.plot(x, y, '-r')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'AGE_DTH.png')
plt.clf()
plt.suptitle('Fitness affecting Death')
x = df.index
y = df['FIT_DTH']
plt.plot(x, y, '-b')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'FIT_DTH.png')
plt.clf()
plt.suptitle('Age vs. Fitness affecting Death')
x = df.index
y = df['AFR_DTH']
plt.plot(x, y, '-b')
plt.ylabel("Ratio")
plt.yticks(rotation=45)
plt.savefig(savepath + 'AFR_DTH.png')
plt.clf()
plt.suptitle('Max age with time')
x = df.index
y = df['MX_AGE']
plt.plot(x, y, '-b')
plt.ylabel("Max age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'MX_AGE.png')
plt.clf()
@classmethod
def generate_demographic_graphs(cls, df, savepath):
"""
POP
"""
plt.suptitle('Population')
x = df.index
y = df['POP']
plt.plot(x, y, '-b')
plt.ylabel("Population")
plt.yticks(rotation=45)
plt.savefig(savepath + 'POP.png')
plt.clf()
@classmethod
def generate_copulation_graphs(cls, df, savepath):
"""
M_POP
M_AGE_START
M_AGE_END
C_PROB
MT_PROB
OF_FACTOR
"""
plt.suptitle('Matable population')
x = df.index
y = df['M_POP']
plt.plot(x, y, '-r')
plt.ylabel("Matable Population")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_POP.png')
plt.clf()
plt.suptitle('Mating Start')
x = df.index
y = df['M_AGE_START']
plt.plot(x, y, '-b')
plt.ylabel("Age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_AGE_START.png')
plt.clf()
plt.suptitle('Mating End')
x = df.index
y = df['M_AGE_END']
plt.plot(x, y, '-r')
plt.ylabel("Age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_AGE_END.png')
plt.clf()
plt.suptitle('Mutation')
x = df.index
y = df['MT_PROB']
plt.plot(x, y, '-r')
plt.ylabel("Probability")
plt.yticks(rotation=45)
plt.savefig(savepath + 'MT_PROB.png')
plt.clf()
plt.suptitle('Conceive')
x = df.index
y = df['C_PROB']
plt.plot(x, y, '-b')
plt.ylabel("Probability")
plt.yticks(rotation=45)
plt.savefig(savepath + 'C_PROB.png')
plt.clf()
plt.suptitle('Multiple offprings')
x = df.index
y = df['OF_FACTOR']
plt.plot(x, y, '-b')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'OF_FACTOR.png')
plt.clf()
@classmethod
def generate_dependency_graphs(cls, df, savepath):
"""
HT_VT
WT_VT
"""
plt.suptitle('Factors affecting Vitality')
x = df.index
y = df['HT_VT']
plt.plot(x, y, '-r', label='Height')
x = df.index
y = df['WT_VT']
plt.plot(x, y, '-b', label='Weight')
plt.ylabel("Factor")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'X_VT.png')
plt.clf()
@classmethod
def generate_average_graphs(cls, df, savepath):
"""
AVG_GEN
AVG_AGE
AVG_HT
AVG_WT
AVG_SFIT
AVG_IMM
AVG_DTHF
AVGMA_VT
"""
plt.suptitle('Generation')
x = df.index
y = df['AVG_GEN']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_GEN.png')
plt.clf()
plt.suptitle('Age')
x = df.index
y = df['AVG_AGE']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_AGE.png')
plt.clf()
plt.suptitle('Height')
x = df.index
y = df['AVG_HT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_HT.png')
plt.clf()
plt.suptitle('Weight')
x = df.index
y = df['AVG_WT']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_WT.png')
plt.clf()
plt.suptitle('Static Fitness')
x = df.index
y = df['AVG_SFIT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_SFIT.png')
plt.clf()
plt.suptitle('Immunity')
x = df.index
y = df['AVG_IMM']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_IMM.png')
plt.clf()
plt.suptitle('Death Factor')
x = df.index
y = df['AVG_DTHF']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_DTHF.png')
plt.clf()
plt.suptitle('Max vitality at age')
x = df.index
y = df['AVGMA_VT']
plt.plot(x, y, '-c')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVGMA_VT.png')
plt.clf()
@classmethod
def generate_theoretical_graphs(cls, df, savepath):
"""
TMB_HT
TMB_WT
TM_HT
TM_WT
TMB_VT
TMM_VT
TMM_HT
TMM_WT
"""
plt.suptitle('Max base height')
x = df.index
y = df['TMB_HT']
plt.plot(x, y, '-b')
plt.ylabel("meters")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_HT.png')
plt.clf()
plt.suptitle('Max height')
x = df.index
y = df['TM_HT']
plt.plot(x, y, '-b')
plt.ylabel("meters")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TM_HT.png')
plt.clf()
plt.suptitle('Max base weight')
x = df.index
y = df['TMB_WT']
plt.plot(x, y, '-r')
plt.ylabel("kg")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_WT.png')
plt.clf()
plt.suptitle('Max weight')
x = df.index
y = df['TM_WT']
plt.plot(x, y, '-r')
plt.ylabel("kg")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TM_WT.png')
plt.clf()
plt.suptitle('Max base vitality')
x = df.index
y = df['TMB_VT']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_VT.png')
plt.clf()
plt.suptitle('Max vitality multiplier')
x = df.index
y = df['TMM_VT']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_VT.png')
plt.clf()
plt.suptitle('Max height multiplier')
x = df.index
y = df['TMM_HT']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_HT.png')
plt.clf()
plt.suptitle('Max weight multiplier')
x = df.index
y = df['TMM_WT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_WT.png')
plt.clf()
class Animal(Organism):
@classmethod
def generate_mortality_graphs(cls, df, savepath):
"""
AGE_DTH
FIT_DTH
AFR_DTH
MX_AGE
"""
plt.suptitle('Age affecting Death')
x = df.index
y = df['AGE_DTH']
plt.plot(x, y, '-r')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'AGE_DTH.png')
plt.clf()
plt.suptitle('Fitness affecting Death')
x = df.index
y = df['FIT_DTH']
plt.plot(x, y, '-b')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'FIT_DTH.png')
plt.clf()
plt.suptitle('Age vs. Fitness affecting Death')
x = df.index
y = df['AFR_DTH']
plt.plot(x, y, '-b')
plt.ylabel("Ratio")
plt.yticks(rotation=45)
plt.savefig(savepath + 'AFR_DTH.png')
plt.clf()
plt.suptitle('Max age with time')
x = df.index
y = df['MX_AGE']
plt.plot(x, y, '-b')
plt.ylabel("Max age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'MX_AGE.png')
plt.clf()
@classmethod
def generate_demographic_graphs(cls, df, savepath):
"""
MALE
FEMALE
"""
plt.suptitle('Population')
x = df.index
y = df['MALE']
plt.plot(x, y, '-b', label='Male')
x = df.index
y = df['FEMALE']
plt.plot(x, y, '-r', label='Female')
plt.ylabel("Population")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + "POP.png")
plt.clf()
@classmethod
def generate_copulation_graphs(cls, df, savepath):
"""
M_MALE
M_FEMALE
M_AGE_START
M_AGE_END
C_PROB
MT_PROB
OF_FACTOR
"""
plt.suptitle('Matable population')
x = df.index
y = df['M_MALE']
plt.plot(x, y, '-b', label='Male')
x = df.index
y = df['M_FEMALE']
plt.plot(x, y, '-r', label='Female')
plt.ylabel("Matable Population")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_POP.png')
plt.clf()
plt.suptitle('Mating Start')
x = df.index
y = df['M_AGE_START']
plt.plot(x, y, '-b')
plt.ylabel("Age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_AGE_START.png')
plt.clf()
plt.suptitle('Mating End')
x = df.index
y = df['M_AGE_END']
plt.plot(x, y, '-r')
plt.ylabel("Age")
plt.yticks(rotation=45)
plt.savefig(savepath + 'M_AGE_END.png')
plt.clf()
plt.suptitle('Mutation')
x = df.index
y = df['MT_PROB']
plt.plot(x, y, '-r')
plt.ylabel("Probability")
plt.yticks(rotation=45)
plt.savefig(savepath + 'MT_PROB.png')
plt.clf()
plt.suptitle('Conceive')
x = df.index
y = df['C_PROB']
plt.plot(x, y, '-b')
plt.ylabel("Probability")
plt.yticks(rotation=45)
plt.savefig(savepath + 'C_PROB.png')
plt.clf()
plt.suptitle('Multiple offprings')
x = df.index
y = df['OF_FACTOR']
plt.plot(x, y, '-b')
plt.ylabel("Factor")
plt.yticks(rotation=45)
plt.savefig(savepath + 'OF_FACTOR.png')
plt.clf()
@classmethod
def generate_dependency_graphs(cls, df, savepath):
"""
HT_ST
WT_ST
HT_VT
WT_VT
HT_SP
WT_SP
ST_SP
VT_SP
VT_AP
ST_AP
"""
plt.suptitle('Factors affecting Stamina')
x = df.index
y = df['HT_ST']
plt.plot(x, y, '-r', label='Height')
x = df.index
y = df['WT_ST']
plt.plot(x, y, '-b', label='Weight')
plt.ylabel("Factor")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'X_ST.png')
plt.clf()
plt.suptitle('Factors affecting Vitality')
x = df.index
y = df['HT_VT']
plt.plot(x, y, '-r', label='Height')
x = df.index
y = df['WT_VT']
plt.plot(x, y, '-b', label='Weight')
plt.ylabel("Factor")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'X_VT.png')
plt.clf()
plt.suptitle('Factors affecting Speed')
x = df.index
y = df['HT_SP']
plt.plot(x, y, '-r', label='Height')
x = df.index
y = df['WT_SP']
plt.plot(x, y, '-b', label='Weight')
x = df.index
y = df['ST_SP']
plt.plot(x, y, '-c', label='Stamina')
x = df.index
y = df['VT_SP']
plt.plot(x, y, '-g', label='Vitality')
plt.ylabel("Factor")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'X_SP.png')
plt.clf()
plt.suptitle('Factors affecting Appetite')
x = df.index
y = df['VT_AP']
plt.plot(x, y, '-g', label='Vitality')
x = df.index
y = df['ST_AP']
plt.plot(x, y, '-c', label='Stamina')
plt.ylabel("Factor")
plt.legend(loc="upper right")
plt.yticks(rotation=45)
plt.savefig(savepath + 'X_AP.png')
plt.clf()
@classmethod
def generate_average_graphs(cls, df, savepath):
"""
AVG_GEN
AVG_AGE
AVG_HT
AVG_WT
AVG_SFIT
AVG_IMM
AVG_DTHF
AVGMA_SP
AVGMA_AP
AVGMA_ST
AVGMA_VT
AVG_VIS
"""
plt.suptitle('Generation')
x = df.index
y = df['AVG_GEN']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_GEN.png')
plt.clf()
plt.suptitle('Age')
x = df.index
y = df['AVG_AGE']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_AGE.png')
plt.clf()
plt.suptitle('Height')
x = df.index
y = df['AVG_HT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_HT.png')
plt.clf()
plt.suptitle('Weight')
x = df.index
y = df['AVG_WT']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_WT.png')
plt.clf()
plt.suptitle('Static Fitness')
x = df.index
y = df['AVG_SFIT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_SFIT.png')
plt.clf()
plt.suptitle('Immunity')
x = df.index
y = df['AVG_IMM']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_IMM.png')
plt.clf()
plt.suptitle('Death Factor')
x = df.index
y = df['AVG_DTHF']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_DTHF.png')
plt.clf()
plt.suptitle('Max speed at age')
x = df.index
y = df['AVGMA_SP']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVGMA_SP.png')
plt.clf()
plt.suptitle('Max appetite at age')
x = df.index
y = df['AVGMA_AP']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVGMA_AP.png')
plt.clf()
plt.suptitle('Max stamina at age')
x = df.index
y = df['AVGMA_ST']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVGMA_ST.png')
plt.clf()
plt.suptitle('Max vitality at age')
x = df.index
y = df['AVGMA_VT']
plt.plot(x, y, '-c')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVGMA_VT.png')
plt.clf()
plt.suptitle('Max vision radius')
x = df.index
y = df['AVG_VIS']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'AVG_VIS.png')
plt.clf()
@classmethod
def generate_theoretical_graphs(cls, df, savepath):
"""
TMB_HT
TMB_WT
TM_HT
TM_WT
TMB_SP
TMB_AP
TMB_ST
TMB_VT
TM_SP
TMM_SP
TMM_ST
TMM_VT
TMM_HT
TMM_WT
"""
plt.suptitle('Max base height')
x = df.index
y = df['TMB_HT']
plt.plot(x, y, '-b')
plt.ylabel("meters")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_HT.png')
plt.clf()
plt.suptitle('Max height')
x = df.index
y = df['TM_HT']
plt.plot(x, y, '-b')
plt.ylabel("meters")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TM_HT.png')
plt.clf()
plt.suptitle('Max base weight')
x = df.index
y = df['TMB_WT']
plt.plot(x, y, '-r')
plt.ylabel("kg")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_WT.png')
plt.clf()
plt.suptitle('Max weight')
x = df.index
y = df['TM_WT']
plt.plot(x, y, '-r')
plt.ylabel("kg")
plt.yticks(rotation=45)
plt.savefig(savepath + 'TM_WT.png')
plt.clf()
plt.suptitle('Max base speed')
x = df.index
y = df['TMB_SP']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_SP.png')
plt.clf()
plt.suptitle('Max base appetite')
x = df.index
y = df['TMB_AP']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_AP.png')
plt.clf()
plt.suptitle('Max base stamina')
x = df.index
y = df['TMB_ST']
plt.plot(x, y, '-c')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_ST.png')
plt.clf()
plt.suptitle('Max base vitality')
x = df.index
y = df['TMB_VT']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMB_VT.png')
plt.clf()
plt.suptitle('Max speed')
x = df.index
y = df['TM_SP']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TM_SP.png')
plt.clf()
plt.suptitle('Max speed multiplier')
x = df.index
y = df['TMM_SP']
plt.plot(x, y, '-g')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_SP.png')
plt.clf()
plt.suptitle('Max stamina multiplier')
x = df.index
y = df['TMM_ST']
plt.plot(x, y, '-c')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_ST.png')
plt.clf()
plt.suptitle('Max vitality multiplier')
x = df.index
y = df['TMM_VT']
plt.plot(x, y, '-m')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_VT.png')
plt.clf()
plt.suptitle('Max height multiplier')
x = df.index
y = df['TMM_HT']
plt.plot(x, y, '-r')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_HT.png')
plt.clf()
plt.suptitle('Max weight multiplier')
x = df.index
y = df['TMM_WT']
plt.plot(x, y, '-b')
plt.yticks(rotation=45)
plt.savefig(savepath + 'TMM_WT.png')
plt.clf()
| 23.922811
| 58
| 0.496557
| 2,621
| 20,765
| 3.827928
| 0.04235
| 0.023323
| 0.062195
| 0.069969
| 0.961427
| 0.944583
| 0.922257
| 0.875212
| 0.847105
| 0.814711
| 0
| 0.010378
| 0.359595
| 20,765
| 867
| 59
| 23.950404
| 0.744097
| 0.027883
| 0
| 0.89604
| 1
| 0
| 0.150139
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031353
| false
| 0
| 0.00165
| 0
| 0.037954
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9921be7e084e8f6f50cde0fb895d21230b49f097
| 10,976
|
py
|
Python
|
tests/tasks/tasks/util/test_array_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | null | null | null |
tests/tasks/tasks/util/test_array_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | 34
|
2015-12-14T22:06:57.000Z
|
2018-02-07T08:40:47.000Z
|
tests/tasks/tasks/util/test_array_tasks.py
|
Exopy/ecpy_hqc_legacy
|
3e31a8865d130907a82005e6cd78d99c6da7a951
|
[
"BSD-3-Clause"
] | 6
|
2018-04-20T14:48:54.000Z
|
2021-06-23T22:25:17.000Z
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyHqcLegacy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Test the tasks operating on numpy.arrays.
"""
from multiprocessing import Event
import pytest
import enaml
import numpy as np
from exopy.tasks.api import RootTask
from exopy.testing.util import show_and_close_widget
from exopy_hqc_legacy.tasks.tasks.util.array_tasks import (ArrayExtremaTask,
ArrayFindValueTask)
with enaml.imports():
from exopy_hqc_legacy.tasks.tasks.util.views.array_views\
import ArrayExtremaView, ArrayFindValueView
class TestArrayExtremaTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = ArrayExtremaTask(name='Test')
self.root.add_child_task(0, self.task)
array = np.zeros((5,), dtype={'names': ['var1', 'var2'],
'formats': ['f8', 'f8']})
array['var1'][1] = -1
array['var1'][3] = 1
self.root.write_in_database('array', array)
def test_mode_observation(self):
"""Check that the database is correctly updated when the mode change.
"""
self.task.mode = 'Min'
assert self.task.get_from_database('Test_min_ind') == 0
assert self.task.get_from_database('Test_min_value') == 1.0
aux = self.task.list_accessible_database_entries()
assert 'Test_max_ind' not in aux
assert 'Test_max_value' not in aux
self.task.mode = 'Max'
assert self.task.get_from_database('Test_max_ind') == 0
assert self.task.get_from_database('Test_max_value') == 2.0
aux = self.task.list_accessible_database_entries()
assert 'Test_min_ind' not in aux
assert 'Test_min_value' not in aux
self.task.mode = 'Max & min'
assert self.task.get_from_database('Test_min_ind') == 0
assert self.task.get_from_database('Test_min_value') == 1.0
assert self.task.get_from_database('Test_max_ind') == 0
assert self.task.get_from_database('Test_max_value') == 2.0
def test_check1(self):
"""Simply test that everything is ok if the array exists in the
database.
"""
self.root.write_in_database('array', np.zeros((5,)))
self.task.target_array = '{array}'
test, traceback = self.task.check()
assert test
assert not traceback
def test_check2(self):
"""Simply test that everything is ok if the array exists in the
database and the column name is ok.
"""
self.task.target_array = '{array}'
self.task.column_name = 'var1'
test, traceback = self.task.check()
assert test
assert not traceback
def test_check3(self):
"""Test handling a wrong array name.
"""
self.task.target_array = '*{array}'
self.task.column_name = 'var3'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test-target_array' in traceback
def test_check4(self):
"""Test handling an array without names when a name is given.
"""
self.root.write_in_database('array', np.zeros((5,)))
self.task.target_array = '{array}'
self.task.column_name = 'var1'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check5(self):
"""Test handling an array with names when no name is given.
"""
self.task.target_array = '{array}'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check6(self):
"""Test handling a wrong column name.
"""
self.task.target_array = '{array}'
self.task.column_name = 'var3'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check7(self):
"""Test handling a 2d array without names.
"""
self.task.target_array = '{array}'
array = np.zeros((5, 5))
self.root.write_in_database('array', array)
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_perform1(self):
"""Test performing when mode is 'Max'.
"""
self.task.mode = 'Max'
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.root.prepare()
self.task.perform()
assert self.task.get_from_database('Test_max_ind') == 3
assert self.task.get_from_database('Test_max_value') == 1.0
def test_perform2(self):
"""Test performing when mode is 'Min'.
"""
self.task.mode = 'Min'
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.root.prepare()
self.task.perform()
assert self.task.get_from_database('Test_min_ind') == 1
assert self.task.get_from_database('Test_min_value') == -1.0
def test_perform3(self):
"""Test performing when mode is 'Max & min'.
"""
self.task.mode = 'Max & min'
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.root.prepare()
self.task.perform()
assert self.task.get_from_database('Test_max_ind') == 3
assert self.task.get_from_database('Test_max_value') == 1.0
assert self.task.get_from_database('Test_min_ind') == 1
assert self.task.get_from_database('Test_min_value') == -1.0
def test_perform4(self):
"""Test performing when no column name is given.
"""
self.root.write_in_database('array', np.zeros((5,)))
self.task.mode = 'Max'
self.task.target_array = '{array}'
self.root.prepare()
self.task.perform()
assert self.task.get_from_database('Test_max_ind') == 0
assert self.task.get_from_database('Test_max_value') == 0.0
@pytest.mark.ui
def test_array_extrema_view(exopy_qtbot):
"""Test the array extrema view.
"""
root = RootTask(should_stop=Event(), should_pause=Event())
task = ArrayExtremaTask(name='Test')
root.children.append(task)
show_and_close_widget(exopy_qtbot, ArrayExtremaView(task=task))
class TestArrayFindValueTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = ArrayFindValueTask(name='Test')
self.root.add_child_task(0, self.task)
array = np.zeros((5,), dtype={'names': ['var1', 'var2'],
'formats': ['f8', 'f8']})
array['var1'][1] = -1.5
array['var1'][3] = 1.6359
array['var1'][4] = 1.6359
self.root.write_in_database('array', array)
def test_check1(self):
"""Simply test that everything is ok if the array exists in the
database and value can be evaluated.
"""
self.root.write_in_database('array', np.zeros((5,)))
self.task.target_array = '{array}'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert test
assert not traceback
def test_check2(self):
"""Simply test that everything is ok if the array exists in the
database the column name is ok, and value can be evaluated.
"""
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert test
assert not traceback
def test_check3(self):
"""Test handling a wrong array name.
"""
self.task.target_array = '*{array}'
self.task.column_name = 'var3'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test-target_array' in traceback
def test_check4(self):
"""Test handling an array without names when a name is given.
"""
self.root.write_in_database('array', np.zeros((5,)))
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check5(self):
"""Test handling an array with names when no name is given.
"""
self.task.target_array = '{array}'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check6(self):
"""Test handling a wrong column name.
"""
self.task.target_array = '{array}'
self.task.column_name = 'var3'
self.task.value = '1.6359'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_check7(self):
"""Test handling a wrong value.
"""
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.task.value = '*1.6359'
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test-value' in traceback
def test_check8(self):
"""Test handling a 2d array value.
"""
self.task.target_array = '{array}'
self.task.value = '1.6359'
array = np.zeros((5, 5))
self.root.write_in_database('array', array)
test, traceback = self.task.check()
assert not test
assert len(traceback) == 1
assert 'root/Test' in traceback
def test_perform1(self):
"""Test performing.
"""
self.task.value = '1.6359'
self.task.target_array = '{array}'
self.task.column_name = 'var1'
self.root.prepare()
self.task.perform()
assert self.task.get_from_database('Test_index') == 3
@pytest.mark.ui
def test_array_find_value_view(exopy_qtbot):
"""Test the array extrema view.
"""
root = RootTask(should_stop=Event(), should_pause=Event())
task = ArrayFindValueTask(name='Test')
root.children.append(task)
show_and_close_widget(exopy_qtbot, ArrayFindValueView(task=task))
| 29.907357
| 79
| 0.594206
| 1,387
| 10,976
| 4.556597
| 0.111752
| 0.118987
| 0.044304
| 0.060127
| 0.854905
| 0.838608
| 0.808544
| 0.791456
| 0.781013
| 0.769462
| 0
| 0.021666
| 0.272504
| 10,976
| 366
| 80
| 29.989071
| 0.769818
| 0.16035
| 0
| 0.817757
| 0
| 0
| 0.092967
| 0.004881
| 0
| 0
| 0
| 0
| 0.299065
| 1
| 0.116822
| false
| 0
| 0.042056
| 0
| 0.168224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
994f5dedb5c3cfb7d9d91da1a0075c7431a010c7
| 20,579
|
py
|
Python
|
test/test_run_backtest.py
|
jrmeier/fast_trade
|
2ecfcd281be7690c9d876d4c280d09970f95d550
|
[
"MIT"
] | null | null | null |
test/test_run_backtest.py
|
jrmeier/fast_trade
|
2ecfcd281be7690c9d876d4c280d09970f95d550
|
[
"MIT"
] | 1
|
2020-01-19T19:21:19.000Z
|
2020-01-20T13:50:50.000Z
|
test/test_run_backtest.py
|
jrmeier/fast_trade
|
2ecfcd281be7690c9d876d4c280d09970f95d550
|
[
"MIT"
] | null | null | null |
from numpy import nan
from fast_trade.run_backtest import (
prepare_new_backtest,
process_logic_and_generate_actions,
run_backtest,
take_action,
clean_field_type,
process_single_logic,
process_single_frame,
determine_action,
apply_backtest_to_df,
)
from collections import namedtuple
import pandas as pd
def test_take_action_greater_than():
mock_backtest = [["close", ">", "short"]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
res = take_action(mock_row, mock_backtest, last_frames=[mock_row])
assert res is True
def test_take_action_less_than():
mock_backtest = [["close", "<", "short"]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
res = take_action(mock_row, mock_backtest, last_frames=[mock_row])
assert res is False
def test_take_action_not_equal():
mock_backtest = [["close", "!=", 0.0219]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
res = take_action(mock_row, mock_backtest, last_frames=[mock_row])
assert res is True
def test_take_action_no_res():
# mock_backtest = [["close", "!=", 0.0219]]
mock_backtest = []
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
res = take_action(mock_row, mock_backtest, last_frames=[])
assert res is False
def test_take_action_many_frames():
mock_backtest = [["close", "=", 0.0212, 2]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_last_frames = [mock_row, mock_row]
res = take_action(mock_row, mock_backtest, last_frames=mock_last_frames)
assert res is True
def test_take_action_many_frames_fail():
mock_backtest = [["close", "=", 0.0212, 3]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row_1 = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_row_2 = MockRow(
date=1523937966,
close=0.0219,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_row_3 = MockRow(
date=1523937999,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_last_frames = [mock_row_1, mock_row_2, mock_row_3]
res = take_action(mock_row_1, mock_backtest, last_frames=mock_last_frames)
assert res is False
# WTF
def test_take_action_many_frames_diff():
mock_backtest = [["close", "=", 0.0212, 3]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row_1 = MockRow(
date=1523937963,
close=0.0214,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_row_2 = MockRow(
date=1523937966,
close=0.0219,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_row_3 = MockRow(
date=1523937999,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
)
mock_last_frames = [mock_row_1, mock_row_2, mock_row_3]
res = take_action(mock_row_1, mock_backtest, last_frames=mock_last_frames)
assert res is False
def test_clean_field_type_num():
mock_field = "mock_f1"
MockRow = namedtuple("MockRow", "date close open high low volume short mock_f1")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
mock_f1=50,
)
res = clean_field_type(mock_field, mock_row)
assert res == 50
def test_clean_field_type_num_str():
mock_field = "50"
MockRow = namedtuple("MockRow", "date close open high low volume short mock_f1")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
mock_f1=50,
)
res = clean_field_type(mock_field, mock_row)
assert res == 50
def test_clean_field_type_float_str():
mock_field = "50.0"
MockRow = namedtuple("MockRow", "date close open high low volume short mock_f1")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.01,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
mock_f1=50,
)
res = clean_field_type(mock_field, mock_row)
assert res == 50
def test_clean_field_type_float():
mock_field = 50.04
MockRow = namedtuple("MockRow", "date close open high low volume short mock_f1")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0112,
mock_f1=50.04,
)
res = clean_field_type(mock_field, mock_row)
assert res == 50.04
def test_clean_field_type_no_row_bool():
mock_field = True
res = clean_field_type(mock_field)
assert res is True
def test_clean_field_type_no_row():
mock_field = None
res = clean_field_type(mock_field)
assert res is None
def test_process_single_logic():
mock_logic = ["close", "<", "short"]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
res = process_single_logic(mock_logic, mock_row)
assert res is True
def test_process_single_logic_false():
mock_logic = ["close", "=", "short"]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
res = process_single_logic(mock_logic, mock_row)
assert res is False
def test_process_single_logic_false_greater():
mock_logic = ["close", ">", "short"]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
res = process_single_logic(mock_logic, mock_row)
assert res is False
def process_single_frame_2(logics, row, require_any):
results = []
return_value = False
for logic in logics:
res = process_single_logic(logic, row)
results.append(res)
if len(results):
if require_any:
return_value = any(results)
else:
return_value = all(results)
return return_value
def test_process_single_frame_require_any_false():
mock_logics = [["close", ">", "short"], ["close", ">", "low"]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_require_any = False
res = process_single_frame(mock_logics, mock_row, mock_require_any)
assert res is False
def test_process_single_frame_require_any_true():
mock_logics = [["close", ">", "short"], ["close", ">", "low"]]
MockRow = namedtuple("MockRow", "date close open high low volume short")
mock_row = MockRow(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_require_any = True
res = process_single_frame(mock_logics, mock_row, mock_require_any)
assert res is True
def test_determine_action_1():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [["close", "<", "short"]],
"any_exit": [],
"enter": [],
"any_enter": [],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "x"
def test_determine_action_2():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [["close", ">", "short"]],
"any_exit": [],
"enter": [],
"any_enter": [],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "h"
def test_determine_action_2_any_exit():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [],
"any_exit": [["close", "<", "short"]],
"enter": [],
"any_enter": [],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "ax"
def test_determine_action_3_any_exit():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [["close", ">", "short"]],
"any_exit": [["close", "<", "short"]],
"enter": [],
"any_enter": [],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "ax"
def test_determine_action_1_trailing_stop_loss():
MockFrame = namedtuple(
"MockFrame", "date close open high low volume short trailing_stop_loss"
)
mock_frame = MockFrame(
date=1523937963,
close=0.0212,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
trailing_stop_loss=10,
)
mock_backtest = {
"exit": [["close", ">", "short"]],
"any_exit": [["close", ">", "short"]],
"enter": [],
"any_enter": [],
"trailing_stop_loss": 0.0213,
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "tsl"
def test_determine_action_enter_1_mult():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0588,
open=0.0133,
high=0.125,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [["close", ">", "short"], ["high", ">", "close"]],
"any_enter": [],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "e"
def test_determine_action_any_enter():
MockFrame = namedtuple("MockFrame", "date close open high low volume short")
mock_frame = MockFrame(
date=1523937963,
close=0.0588,
open=0.0133,
high=0.025,
low=0.01,
volume=36898,
short=0.0312,
)
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [["close", ">", "19"]],
"any_enter": [["close", ">", "short"], ["close", ">", 100]],
}
res = determine_action(mock_frame, mock_backtest, last_frames=[mock_frame])
assert res == "ae"
def test_proccess_logic_and_actions_no_logics():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert "h" in res.action.unique()
def test_proccess_logic_and_actions_single_enter():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [["volume", ">", 171000]],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == ["h", "h", "h", "h", "h", "e", "e", "e", "e"]
def test_proccess_logic_and_actions_multi_enter():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [["ind_1", "<", 9], ["ind_1", ">", 7]],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"h",
"h",
"h",
"h",
"h",
"h",
"e",
"h",
]
def test_proccess_logic_and_actions_single_any_enter():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_backtest = {
"exit": [],
"any_exit": [],
"enter": [],
"any_enter": [["ind_1", ">", 8]],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"h",
"h",
"h",
"h",
"h",
"h",
"h",
"ae",
]
def test_proccess_logic_and_actions_single_exit():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_backtest = {
"exit": [["ind_1", ">", 7]],
"any_exit": [],
"enter": [],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"h",
"h",
"h",
"h",
"h",
"h",
"x",
"x",
]
def test_proccess_logic_and_actions_single_any_exit():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
mock_df["ind_1"] = [0, 0, 0, 0, 1, 1, 1, 0, 0]
mock_backtest = {
"exit": [],
"any_exit": [["ind_1", "=", 1]],
"enter": [],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action) == [
"h",
"h",
"h",
"h",
"ax",
"ax",
"ax",
"h",
"h",
]
def test_proccess_logic_and_actions_exit_enter():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 5, 4, 4, 3]
mock_backtest = {
"exit": [["ind_1", "=", 4]],
"any_exit": [],
"enter": [["ind_1", "=", 5]],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"h",
"h",
"x",
"e",
"e",
"x",
"x",
"h",
]
def test_proccess_logic_and_actions_multi_enter_exit():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_backtest = {
"exit": [["ind_1", ">", 8], ["ind_1", "<", 10]],
"any_exit": [],
"enter": [["ind_1", "<", 9], ["ind_1", ">", 7]],
"any_enter": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"h",
"h",
"h",
"h",
"h",
"h",
"e",
"x",
]
def test_proccess_logic_and_actions_enter_exit_confirmations():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_df["ind_2"] = [5, 5, 4, 2, 6, 4, 9, 9, 1]
mock_backtest = {
"enter": [["ind_1", "<", 2]],
"any_enter": [],
"exit": [["ind_2", ">", 8, 2]],
"any_exit": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
print(res)
assert list(res.action.values) == [
"e",
"e",
"h",
"h",
"h",
"h",
"h",
"x",
"h",
]
def test_proccess_logic_and_actions_enter_exit_confirmations_multi():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [0, 1, 2, 4, 5, 6, 7, 8, 9]
mock_df["ind_2"] = [5, 5, 4, 2, 6, 4, 9, 9, 1]
mock_df["ind_3"] = [1, 1, 2, 3, 5, 9, 9, 9, 8]
mock_backtest = {
"enter": [["ind_1", "<", 2, 2]],
"any_enter": [],
"exit": [["ind_2", ">", 8, 2], ["ind_3", "<", 8, 3]],
"any_exit": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
assert list(res.action.values) == [
"h",
"e",
"h",
"h",
"h",
"h",
"h",
"h",
"h",
]
def test_proccess_logic_and_actions_enter_exit_confirmations_multi_2():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [5, 5, 5, 2, 6, 7, 9, 9, 1]
mock_backtest = {
"enter": [["ind_1", "=", 5, 2]],
"any_enter": [],
"exit": [["ind_1", ">", 6, 3]],
"any_exit": [],
}
res = process_logic_and_generate_actions(mock_df, mock_backtest)
print(res)
assert list(res.action.values) == [
"h",
"e",
"e",
"h",
"h",
"h",
"h",
"x",
"h",
]
def test_prepare_new_backtest_simple():
mock_backtest = {"base_balance": 126, "exit_on_end": False, "comission": 0.022}
res = prepare_new_backtest(mock_backtest)
assert res["exit_on_end"] is False
assert res["base_balance"] == 126
assert res["comission"] != 0
def test_apply_backtest_to_df():
mock_df = pd.read_csv("./test/ohlcv_data.csv.txt", parse_dates=True).set_index(
"date"
)
# fake an indicator
mock_df["ind_1"] = [5, 5, 5, 2, 6, 7, 9, 9, 1]
mock_backtest = {
"base_balance": 1000,
"exit_on_end": False,
"comission": 0.01,
"lot_size_perc": 1,
"enter": [["ind_1", "=", 5, 2]],
"any_enter": [],
"exit": [["ind_1", ">", 6, 3]],
"any_exit": [],
}
res = apply_backtest_to_df(mock_df, mock_backtest)
assert "adj_account_value_change_perc" in list(res.columns)
assert "adj_account_value_change" in list(res.columns)
| 23.385227
| 84
| 0.551922
| 2,654
| 20,579
| 4.017709
| 0.05991
| 0.061896
| 0.01041
| 0.030385
| 0.892807
| 0.863172
| 0.841508
| 0.820876
| 0.782894
| 0.771078
| 0
| 0.088254
| 0.297974
| 20,579
| 879
| 85
| 23.411832
| 0.649823
| 0.010059
| 0
| 0.714286
| 1
| 0
| 0.130354
| 0.017338
| 0
| 0
| 0
| 0
| 0.058571
| 1
| 0.055714
| false
| 0
| 0.005714
| 0
| 0.062857
| 0.002857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
996c4cbbd6657576ba963f934782bed45240bd49
| 27,245
|
py
|
Python
|
metalacc/api/tests/test_journal_entry.py
|
stricoff92/metalaccounting
|
6c9f650b3dd3c74c3ebbe847e0c05bb233e14153
|
[
"MIT"
] | null | null | null |
metalacc/api/tests/test_journal_entry.py
|
stricoff92/metalaccounting
|
6c9f650b3dd3c74c3ebbe847e0c05bb233e14153
|
[
"MIT"
] | 3
|
2021-03-30T14:01:37.000Z
|
2021-06-10T19:46:42.000Z
|
metalacc/api/tests/test_journal_entry.py
|
stricoff92/metalaccounting
|
6c9f650b3dd3c74c3ebbe847e0c05bb233e14153
|
[
"MIT"
] | null | null | null |
import datetime as dt
import json
from django.urls import reverse
from rest_framework import status
from .base import BaseTestBase
from api.models import JournalEntry, JournalEntryLine, Account
class JournalEntryViewTests(BaseTestBase):
def setUp(self):
super().setUp()
self.client.force_login(self.user)
self.company = self.factory.create_company(self.user)
self.period = self.factory.create_period(
self.company, dt.date(2020, 1, 1), dt.date(2020, 3, 31))
self.other_company = self.factory.create_company(self.other_user)
self.other_period = self.factory.create_period(
self.other_company, dt.date(2020, 1, 1), dt.date(2020, 3, 31))
def tearDown(self):
super().tearDown()
def test_user_can_create_entry(self):
""" Test that a user can create journal entry with 1 DR account and 1 CR account
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.date, dt.date(2020, 1, 15))
self.assertEqual(journal_entry.memo, 'investing in biz with common stock')
self.assertEqual(journal_entry.period, self.period)
self.assertFalse(journal_entry.is_adjusting_entry)
self.assertFalse(journal_entry.is_closing_entry)
self.assertEqual(journal_entry.dr_total, 50000)
self.assertEqual(journal_entry.cr_total, 50000)
self.assertEqual(journal_entry.display_id, 1)
self.assertEqual(journal_entry.lines.count(), 2)
cr_line = journal_entry.lines.get(type=JournalEntryLine.TYPE_CREDIT)
dr_line = journal_entry.lines.get(type=JournalEntryLine.TYPE_DEBIT)
self.assertEqual(cr_line.amount, 50000)
self.assertEqual(dr_line.amount, 50000)
self.assertEqual(cr_line.account, Account.objects.get(name='Common Stock'))
self.assertEqual(dr_line.account, Account.objects.get(name='Cash'))
def test_creating_a_new_journal_entry_updates_the_periods_version_hash(self):
""" Test that creating a new journal entry resets the periods version hash
"""
hash_before = self.period.version_hash
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.period.refresh_from_db()
self.assertNotEqual(self.period.version_hash, hash_before)
def test_journal_entry_display_ids_increment_by_1(self):
""" Test that a new journal entry increments it's display ID by 1 VS the previous entry
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.display_id, 1)
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.display_id, 2)
def test_journal_entry_display_ids_can_collide_between_companies(self):
""" Test that a new journal entry display IDs can collide between companies
"""
# Create a second company/period and set of accounts for the same user.
another_company = self.factory.create_company(self.user)
another_period = self.factory.create_period(
another_company, dt.date(2020, 1, 1), dt.date(2020, 3, 31))
Account.objects.create_default_accounts(self.company)
Account.objects.create_default_accounts(another_company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash', company=self.company).slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock', company=self.company).slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.display_id, 1)
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':another_period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash', company=another_company).slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock', company=another_company).slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.display_id, 1) # same display ID as entry associated with another company
def test_user_can_create_adjustment_entry(self):
""" Test that a user can create an adjusting journal entry with 1 DR account and 1 CR account
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'is_adjusting_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertTrue(journal_entry.is_adjusting_entry)
self.assertFalse(journal_entry.is_closing_entry)
def test_user_cant_create_entry_with_diplicate_account(self):
""" Test that a user cannot create an entry that uses the same account more than once.
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'is_adjusting_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":45000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_DEBIT,
"amount":500,
"account":Account.objects.get(name='Common Stock').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'account': 'cannot use more than once'})
def test_user_can_create_closing_entry(self):
""" Test that a user can create a closing journal entry with 1 DR account and 1 CR account
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'is_closing_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertFalse(journal_entry.is_adjusting_entry)
self.assertTrue(journal_entry.is_closing_entry)
def test_user_can_create_entry_multiple_dr_and_cr_accounts(self):
""" Test that a user can create journal entry with 2+ DR account and 2+ CR account
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock and prepaid services',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":40000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_DEBIT,
"amount":20000,
"account":Account.objects.get(name='Prepaid Expenses').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":10000,
"account":Account.objects.get(name='APIC').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
journal_entry = JournalEntry.objects.get(slug=response.data['slug'])
self.assertEqual(journal_entry.date, dt.date(2020, 1, 15))
self.assertEqual(journal_entry.memo, 'investing in biz with common stock and prepaid services')
self.assertEqual(journal_entry.period, self.period)
self.assertFalse(journal_entry.is_adjusting_entry)
self.assertFalse(journal_entry.is_closing_entry)
self.assertEqual(journal_entry.dr_total, 60000)
self.assertEqual(journal_entry.cr_total, 60000)
self.assertEqual(journal_entry.lines.count(), 4)
cr_line_1 = journal_entry.lines.get(type=JournalEntryLine.TYPE_CREDIT, amount=50000)
cr_line_2 = journal_entry.lines.get(type=JournalEntryLine.TYPE_CREDIT, amount=10000)
dr_line_1 = journal_entry.lines.get(type=JournalEntryLine.TYPE_DEBIT, amount=40000)
dr_line_2 = journal_entry.lines.get(type=JournalEntryLine.TYPE_DEBIT, amount=20000)
self.assertEqual(cr_line_1.account, Account.objects.get(name='Common Stock'))
self.assertEqual(cr_line_2.account, Account.objects.get(name='APIC'))
self.assertEqual(dr_line_1.account, Account.objects.get(name='Cash'))
self.assertEqual(dr_line_2.account, Account.objects.get(name='Prepaid Expenses'))
def test_user_cant_create_an_entry_using_another_users_period(self):
""" Test that a user cannot create an entry using another user's period
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.other_period.slug,
'is_closing_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data, {'period': 'period not found'})
def test_user_cant_create_an_entry_using_another_users_account(self):
""" Test that a user cannot create an entry using another user's account
"""
Account.objects.create_default_accounts(self.other_company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'is_closing_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data, {'account': 'account not found'})
def test_user_cant_create_an_entry_using_another_companys_account(self):
""" Test that a user cannot create an entry using accounts associated with a different company
"""
another_company = self.factory.create_company(self.user)
Account.objects.create_default_accounts(another_company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'is_closing_entry':True,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":50000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":50000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'account': 'account belongs to another company'})
def test_user_cant_create_entry_with_0_dollar_amounts(self):
""" Test that a user cant create journal entries with 0 changes
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":0,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":0,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'dr/cr balance': 'zero changes in balance'})
def test_user_cant_create_entry_with_negative_dollar_amounts(self):
""" Test that a user cant create journal entries with negative values
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":-5000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":-5000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_cant_create_entry_with_unequal_dr_cr_amounts(self):
""" Test that a user cant create journal entries with unequal dr/cr amounts
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":5000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":6000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {'dr/cr balance': 'debits dont match credits'})
def test_user_cant_create_entry_with_date_outside_period(self):
""" Test that a user cant create journal entries with dates outside the period boundary
"""
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-04-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":5000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":5000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {"date":"entry date does not fall within period"})
def test_user_cant_create_entry_if_user_is_at_journal_entry_object_limit(self):
""" Test that a user cant create journal entries of doing so would violate their
object limit.
"""
self.user_profile.object_limit_entries_per_period = 0
self.user_profile.save(update_fields=['object_limit_entries_per_period'])
Account.objects.create_default_accounts(self.company)
url = reverse('je-new')
data = {
'date':"2020-01-15",
'memo':'investing in biz with common stock',
'period':self.period.slug,
'journal_entry_lines':[
{
"type":JournalEntryLine.TYPE_DEBIT,
"amount":5000,
"account":Account.objects.get(name='Cash').slug,
}, {
"type":JournalEntryLine.TYPE_CREDIT,
"amount":5000,
"account":Account.objects.get(name='Common Stock').slug,
}
],
}
response = self.client.post(url, json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {"object limit":"cannot create additional entries for this period"})
def test_user_can_delete_own_journal_entries(self):
""" Test that a user can delete their own journal entries
"""
Account.objects.create_default_accounts(self.company)
je = self.factory.create_journal_entry(self.period, dt.date(2020, 1, 15))
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Cash'), 'd', 5000)
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Common Stock'), JournalEntryLine.TYPE_CREDIT, 5000)
self.assertEqual(JournalEntry.objects.count(), 1)
self.assertEqual(JournalEntryLine.objects.count(), 2)
url = reverse("je-delete", kwargs={'slug':je.slug})
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(JournalEntry.objects.count(), 0)
self.assertEqual(JournalEntryLine.objects.count(), 0)
def test_that_deleting_a_journal_entry_resets_the_periods_version_hash(self):
""" Test that deleting a journal entry resets the periods version hash
"""
hash_before = self.period.version_hash
Account.objects.create_default_accounts(self.company)
je = self.factory.create_journal_entry(self.period, dt.date(2020, 1, 15))
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Cash'), 'd', 5000)
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Common Stock'), JournalEntryLine.TYPE_CREDIT, 5000)
url = reverse("je-delete", kwargs={'slug':je.slug})
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.period.refresh_from_db()
self.assertNotEqual(self.period.version_hash, hash_before)
def test_user_cant_delete_another_users_journal_entry(self):
""" Test that a user cant delete another user's journal entry
"""
Account.objects.create_default_accounts(self.other_company)
je = self.factory.create_journal_entry(self.other_period, dt.date(2020, 1, 15))
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Cash'), 'd', 5000)
self.factory.create_journal_entry_line(
je, Account.objects.get(name='Common Stock'), JournalEntryLine.TYPE_CREDIT, 5000)
self.assertEqual(JournalEntry.objects.count(), 1)
self.assertEqual(JournalEntryLine.objects.count(), 2)
url = reverse("je-delete", kwargs={'slug':je.slug})
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(JournalEntry.objects.count(), 1)
self.assertEqual(JournalEntryLine.objects.count(), 2)
def test_user_cant_cant_list_journal_entries_for_another_users_period(self):
""" Test that a user cant view a list of journal entries associated with another user's period
"""
url = reverse("je-list", kwargs={'slug':self.other_period.slug})
# can't see other user's entries
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# owner can see own entries
self.client.force_login(self.other_user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
| 43.872786
| 112
| 0.595008
| 2,978
| 27,245
| 5.257555
| 0.071525
| 0.061314
| 0.053203
| 0.065721
| 0.906687
| 0.880118
| 0.846011
| 0.811969
| 0.772115
| 0.747142
| 0
| 0.028843
| 0.292457
| 27,245
| 620
| 113
| 43.943548
| 0.783369
| 0.0686
| 0
| 0.706931
| 0
| 0
| 0.13266
| 0.001229
| 0
| 0
| 0
| 0
| 0.146535
| 1
| 0.043564
| false
| 0
| 0.011881
| 0
| 0.057426
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
999d306b585e534f023ac8d795efc880e993e3af
| 98
|
py
|
Python
|
groot-backend/django/code/groot/models/__init__.py
|
ayushmantripathy9/We-Are-Groot
|
db919dbe24363d02ee08ea79520813e9dc9af174
|
[
"MIT"
] | null | null | null |
groot-backend/django/code/groot/models/__init__.py
|
ayushmantripathy9/We-Are-Groot
|
db919dbe24363d02ee08ea79520813e9dc9af174
|
[
"MIT"
] | null | null | null |
groot-backend/django/code/groot/models/__init__.py
|
ayushmantripathy9/We-Are-Groot
|
db919dbe24363d02ee08ea79520813e9dc9af174
|
[
"MIT"
] | null | null | null |
from groot.models.user import *
from groot.models.room import *
from groot.models.message import *
| 32.666667
| 34
| 0.795918
| 15
| 98
| 5.2
| 0.466667
| 0.346154
| 0.576923
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 98
| 3
| 34
| 32.666667
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
999d6270e31fdb7b4a0a73dc04bedcbe4a37ca05
| 1,568
|
py
|
Python
|
djangoBackend/payment_module/migrations/0004_auto_20210924_0009.py
|
muhanzi/Django-REST-API
|
08b8b2bbd08a74589cca7b5fd4e1d604d9a6d7eb
|
[
"Apache-2.0"
] | null | null | null |
djangoBackend/payment_module/migrations/0004_auto_20210924_0009.py
|
muhanzi/Django-REST-API
|
08b8b2bbd08a74589cca7b5fd4e1d604d9a6d7eb
|
[
"Apache-2.0"
] | null | null | null |
djangoBackend/payment_module/migrations/0004_auto_20210924_0009.py
|
muhanzi/Django-REST-API
|
08b8b2bbd08a74589cca7b5fd4e1d604d9a6d7eb
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.2 on 2021-09-23 21:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payment_module', '0003_auto_20210924_0005'),
]
operations = [
migrations.AlterField(
model_name='commission',
name='createdAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='commission',
name='updatedAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='employer',
name='createdAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='employer',
name='updatedAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoice',
name='createdAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoice',
name='updatedAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='payment',
name='createdAt',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='payment',
name='updatedAt',
field=models.DateTimeField(auto_now_add=True),
),
]
| 29.037037
| 58
| 0.566327
| 142
| 1,568
| 6.056338
| 0.274648
| 0.186047
| 0.232558
| 0.269767
| 0.818605
| 0.818605
| 0.761628
| 0.761628
| 0.761628
| 0.706977
| 0
| 0.029301
| 0.325255
| 1,568
| 53
| 59
| 29.584906
| 0.783554
| 0.028699
| 0
| 0.851064
| 1
| 0
| 0.113741
| 0.015122
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
999fe6ead97220d309dece9c569832b818a3a3e6
| 84
|
py
|
Python
|
nami/tf/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | 2
|
2020-08-03T10:37:45.000Z
|
2020-10-11T14:55:05.000Z
|
nami/tf/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
nami/tf/__init__.py
|
namirinz/nami
|
b74b3d302aa241470ce6ae210c5a2f0e09dd8bf2
|
[
"MIT"
] | null | null | null |
from nami.tf.datasets import ImageDataframe
from nami.tf.datasets import ImageFolder
| 42
| 43
| 0.869048
| 12
| 84
| 6.083333
| 0.583333
| 0.219178
| 0.273973
| 0.493151
| 0.657534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 84
| 2
| 44
| 42
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
99a6a2f76bb4d3558abf121a0961c40c3600a5fb
| 227
|
py
|
Python
|
controller/controllers/__init__.py
|
JinneGeelen/ML-MoCap
|
abaea5abb35f939e93ec76086cd15bb660948ae1
|
[
"BSD-3-Clause"
] | null | null | null |
controller/controllers/__init__.py
|
JinneGeelen/ML-MoCap
|
abaea5abb35f939e93ec76086cd15bb660948ae1
|
[
"BSD-3-Clause"
] | null | null | null |
controller/controllers/__init__.py
|
JinneGeelen/ML-MoCap
|
abaea5abb35f939e93ec76086cd15bb660948ae1
|
[
"BSD-3-Clause"
] | null | null | null |
from controllers.cameras import *
from controllers.diagnostic_results import *
from controllers.diagnostics import *
from controllers.participants import *
from controllers.recordings import *
from controllers.studies import *
| 32.428571
| 44
| 0.84141
| 25
| 227
| 7.6
| 0.4
| 0.473684
| 0.552632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105727
| 227
| 6
| 45
| 37.833333
| 0.935961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
512fc471af71e1dd6bc085a9ccbd6822ef32baac
| 163
|
py
|
Python
|
admin.py
|
contraslash/scheduler_engine-django
|
588637e0526895dead04e43aa8fa18d2552bd825
|
[
"MIT"
] | null | null | null |
admin.py
|
contraslash/scheduler_engine-django
|
588637e0526895dead04e43aa8fa18d2552bd825
|
[
"MIT"
] | null | null | null |
admin.py
|
contraslash/scheduler_engine-django
|
588637e0526895dead04e43aa8fa18d2552bd825
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models as scheduler_engine_models
# Register your models here.
admin.site.register(scheduler_engine_models.Event)
| 23.285714
| 50
| 0.834356
| 23
| 163
| 5.73913
| 0.608696
| 0.227273
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110429
| 163
| 6
| 51
| 27.166667
| 0.910345
| 0.159509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5abb128fa5598da761d76133a135aa85039a141f
| 14,753
|
py
|
Python
|
tests/test_modelelement.py
|
Informasjonsforvaltning/modelldcatnotordf
|
995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1
|
[
"Apache-2.0"
] | 1
|
2020-11-29T18:36:21.000Z
|
2020-11-29T18:36:21.000Z
|
tests/test_modelelement.py
|
Informasjonsforvaltning/modelldcatnotordf
|
995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1
|
[
"Apache-2.0"
] | 142
|
2020-10-07T08:52:55.000Z
|
2021-11-18T15:09:31.000Z
|
tests/test_modelelement.py
|
Informasjonsforvaltning/modelldcatnotordf
|
995129ff9f6fb95f9a9d875b27f3aa14bac9b7f1
|
[
"Apache-2.0"
] | null | null | null |
"""Test cases for the model element module."""
from typing import List, Union
from concepttordf import Concept
from datacatalogtordf import URI
import pytest
from pytest_mock import MockFixture
from rdflib import Graph
from skolemizer.testutils import skolemization, SkolemUtils
from modelldcatnotordf.modelldcatno import (
ModelElement,
ModelProperty,
Module,
ObjectType,
Role,
)
from tests.testutils import assert_isomorphic
"""
A test class for testing the class ModelElement.
"""
def test_instantiate_resource_should_fail_with_typeerror() -> None:
"""It returns a TypeErro exception."""
with pytest.raises(TypeError):
_ = ModelElement() # type: ignore
def test_to_graph_should_return_title_and_identifier() -> None:
"""It returns a title graph isomorphic to spec."""
"""It returns an identifier graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelelement.title = {"nb": "Tittel 1", "en": "Title 1"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType;
dct:title "Title 1"@en, "Tittel 1"@nb ;
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_title_and_skolemization(mocker: MockFixture) -> None:
"""It returns a title graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.title = {"nb": "Tittel 1", "en": "Title 1"}
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:ObjectType ;
dct:title "Title 1"@en, "Tittel 1"@nb ;
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_dct_identifier_as_graph() -> None:
"""It returns a dct_identifier graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelelement.dct_identifier = "123456789"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
dct:identifier "123456789";
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject() -> None:
"""It returns a subject graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
subject = Concept()
subject.identifier = "https://example.com/subjects/1"
modelelement.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
dct:subject <https://example.com/subjects/1> ;
.
<https://example.com/subjects/1> a skos:Concept .
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_property_both_identifiers() -> None:
"""It returns a has_property graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
has_properties: List[Union[ModelProperty, URI]] = [modelproperty]
modelelement.has_property = has_properties
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
modelldcatno:hasProperty <http://example.com/properties/1> .
<http://example.com/properties/1> a modelldcatno:Role ;
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_property_skolemized_modelelement_id(
mocker: MockFixture,
) -> None:
"""It returns a has_property graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelproperty = Role()
modelelement.has_property.append(modelproperty)
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
modelldcatno:hasProperty
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Role .
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_property_skolemization_property_id(
mocker: MockFixture,
) -> None:
"""It returns a has_property graph isomorphic to spec."""
modelelement = ObjectType()
modelproperty = Role()
modelproperty.identifier = "http://example.com/properties/1"
modelelement.has_property.append(modelproperty)
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:ObjectType ;
modelldcatno:hasProperty <http://example.com/properties/1>
.
<http://example.com/properties/1> a modelldcatno:Role .
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_property_both_skolemizations(
mocker: MockFixture,
) -> None:
"""It returns a has_property graph isomorphic to spec."""
modelelement = ObjectType()
modelproperty = Role()
modelelement.has_property.append(modelproperty)
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:ObjectType ; modelldcatno:hasProperty
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
.
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
a modelldcatno:Role
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_belongs_to_module_as_graph(mocker: MockFixture) -> None:
"""It returns a belongs_to_module graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
module = Module()
module.title = {None: "core"}
modelelement.belongs_to_module = [module]
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
modelldcatno:belongsToModule
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:Module ;
dct:title "core"
.
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_description() -> None:
"""It returns a description graph isomorphic to spec."""
"""It returns an identifier graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelelement.description = {"nb": "Beskrivelse", "en": "Description"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
dct:description "Description"@en, "Beskrivelse"@nb ;
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_belongs_to_module_str() -> None:
"""It returns a belongs_to_module graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
module = "http://www.example.org/core"
belongs_to_module: List[Union[Module, str]] = [module]
modelelement.belongs_to_module = belongs_to_module
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
modelldcatno:belongsToModule <http://www.example.org/core> .
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_has_property_as_uri() -> None:
"""It returns a has_property graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
modelproperty = "http://example.com/properties/1"
has_properties: List[Union[ModelProperty, URI]] = [modelproperty]
modelelement.has_property = has_properties
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
modelldcatno:hasProperty <http://example.com/properties/1>
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject_as_uri() -> None:
"""It returns a subject graph isomorphic to spec."""
modelelement = ObjectType()
modelelement.identifier = "http://example.com/modelelements/1"
subject = "https://example.com/subjects/1"
modelelement.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/modelelements/1> a modelldcatno:ObjectType ;
dct:subject <https://example.com/subjects/1> ;
.
"""
g1 = Graph().parse(data=modelelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
| 35.463942
| 88
| 0.663865
| 1,832
| 14,753
| 5.252183
| 0.076965
| 0.03201
| 0.039285
| 0.05238
| 0.889212
| 0.882769
| 0.873311
| 0.86666
| 0.856475
| 0.835585
| 0
| 0.043728
| 0.173795
| 14,753
| 415
| 89
| 35.549398
| 0.745672
| 0.050837
| 0
| 0.727891
| 0
| 0.12585
| 0.570141
| 0.048772
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.047619
| false
| 0
| 0.030612
| 0
| 0.078231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8506baf4338799718faf390a674992009aada5e8
| 1,163
|
py
|
Python
|
src/archtype/archtypes.py
|
illusion0001/py-patcher
|
77d08fec6bbf30f9d2820fb8e6baac5ef6a46933
|
[
"MIT"
] | 6
|
2022-03-06T14:20:09.000Z
|
2022-03-20T22:38:32.000Z
|
src/archtype/archtypes.py
|
illusion0001/py-patcher
|
77d08fec6bbf30f9d2820fb8e6baac5ef6a46933
|
[
"MIT"
] | null | null | null |
src/archtype/archtypes.py
|
illusion0001/py-patcher
|
77d08fec6bbf30f9d2820fb8e6baac5ef6a46933
|
[
"MIT"
] | null | null | null |
from src.archtype.types.types import types
class Cell:
def __init__(self):
self.baseAddr = 0x10000
self.endian = 'big'
def convertData(self, var_type, offset, value):
return {
'offset': offset - self.baseAddr,
'value': types(self.endian).convert(value, var_type)
}
class Generic:
def __init__(self):
self.endian = 'little'
def convertData(self, var_type, offset, value):
return {
'offset': offset, # - self.baseAddr
'value': types(self.endian).convert(value, var_type)
}
class GenericOrbis:
def __init__(self):
self.endian = 'little'
def convertData(self, var_type, offset, value):
return {
'offset': offset, # - self.baseAddr
'value': types(self.endian).convert(value, var_type)
}
class Orbis:
def __init__(self):
self.baseAddr = 0x3FC000
self.endian = 'little'
def convertData(self, var_type, offset, value):
return {
'offset': offset - self.baseAddr,
'value': types(self.endian).convert(value, var_type)
}
| 24.229167
| 64
| 0.577816
| 125
| 1,163
| 5.184
| 0.208
| 0.123457
| 0.067901
| 0.092593
| 0.850309
| 0.779321
| 0.779321
| 0.779321
| 0.779321
| 0.779321
| 0
| 0.013597
| 0.304385
| 1,163
| 47
| 65
| 24.744681
| 0.787392
| 0.026655
| 0
| 0.657143
| 0
| 0
| 0.057573
| 0
| 0
| 0
| 0.013286
| 0
| 0
| 1
| 0.228571
| false
| 0
| 0.028571
| 0.114286
| 0.485714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
51fb16ead9769302b9b08caf759187f290f36023
| 6,005
|
py
|
Python
|
infrastructure/users.py
|
YossyMejia/api-hacienda_python
|
82c83fbf7673c37005ded12f1787b5d65247c4aa
|
[
"MIT"
] | null | null | null |
infrastructure/users.py
|
YossyMejia/api-hacienda_python
|
82c83fbf7673c37005ded12f1787b5d65247c4aa
|
[
"MIT"
] | null | null | null |
infrastructure/users.py
|
YossyMejia/api-hacienda_python
|
82c83fbf7673c37005ded12f1787b5d65247c4aa
|
[
"MIT"
] | null | null | null |
import json
from extensions import mysql
def save_user(id_user, password, name, idrol):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_createUser', (id_user, password, name, idrol))
data = cursor.fetchall()
if len(data) is 0:
conn.commit()
return True
else:
return {'error': str(data[0])}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def save_user_company(id_user,idcompany):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_createUser_Company', (id_user,idcompany))
data = cursor.fetchall()
if len(data) is 0:
conn.commit()
return {'message': 'user and data created successfully '}
else:
return {'error': str(data[0])}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def get_user_data(id_user):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_getUserInfo', (id_user,))
row_headers = [x[0] for x in cursor.description]
data = cursor.fetchall()
if len(data) is not 0:
conn.commit()
json_data = []
for row in data:
json_data.append(dict(zip(row_headers, row)))
companies = {'companies': get_user_company_data(id_user)}
json_data[0].update(companies)
return json_data
else:
return {'error': 'Error: Not get information of the user'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def get_user_company_data(id_user):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_getUserInfoCompanies', (id_user,))
row_headers = [x[0] for x in cursor.description]
data = cursor.fetchall()
if len(data) is not 0:
conn.commit()
json_data = []
for row in data:
json_data.append(dict(zip(row_headers, row)))
return json_data
else:
return {'error': 'Error: Not get information of the user'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def get_users():
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_getUsers', ())
row_headers = [x[0] for x in cursor.description]
data = cursor.fetchall()
if len(data) is not 0:
conn.commit()
json_data = []
for row in data:
json_data.append(dict(zip(row_headers, row)))
return json_data
else:
return {'error': 'Error: Not get information of your company'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def modify_user(id_user, password, name, idrol):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_ModifyUser', (id_user, password, name, idrol))
data = cursor.rowcount
if data != 0:
conn.commit()
return {'message': 'User data modify'}
else:
return {'error': 'The user can not be modify'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def verify_email(id_user):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_getUserInfo', (id_user,))
data = cursor.fetchall()
if len(data) is not 0:
return True
else:
return False
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def verify_user_company(id_user,idcompany):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_getUserCompany_info', (id_user,idcompany,))
data = cursor.fetchall()
if len(data) is not 0:
return True
else:
return False
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def delete_user_data(id_user):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_deleteUser', (id_user,))
data = cursor.rowcount
if data != 0:
conn.commit()
return {'message': 'The user has been deleted'}
else:
return {'error': 'The user can not be deleted'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def delete_user_company(id_user,idcompany):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_deleteUserCompany', (id_user, idcompany,))
data = cursor.rowcount
if data != 0:
conn.commit()
return True
else:
return {'error': 'The user company can not be deleted'}
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
def check_user(email, password):
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.callproc('sp_CheckUser', (email, password,))
row_headers = [x[0] for x in cursor.description]
data = cursor.fetchall()
if len(data) is not 0:
conn.commit()
json_data = []
for row in data:
json_data.append(dict(zip(row_headers, row)))
return json_data
except Exception as e:
return {'error': str(e)}
finally:
cursor.close()
conn.close()
| 27.545872
| 74
| 0.545878
| 696
| 6,005
| 4.603448
| 0.117816
| 0.035581
| 0.056804
| 0.065231
| 0.898564
| 0.891698
| 0.877341
| 0.862984
| 0.839576
| 0.826779
| 0
| 0.004537
| 0.339384
| 6,005
| 217
| 75
| 27.672811
| 0.803126
| 0
| 0
| 0.838542
| 0
| 0
| 0.097102
| 0.010993
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057292
| false
| 0.03125
| 0.010417
| 0
| 0.234375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf9386271a71bdb9fb59ab73f6a8e379b8ba182e
| 1,912
|
py
|
Python
|
OCBO/strategies/random_strat.py
|
zhaofeng-shu33/OCBO
|
a8e132f3ab77da4a6f58c630b27bfe7e37cc0380
|
[
"MIT"
] | null | null | null |
OCBO/strategies/random_strat.py
|
zhaofeng-shu33/OCBO
|
a8e132f3ab77da4a6f58c630b27bfe7e37cc0380
|
[
"MIT"
] | null | null | null |
OCBO/strategies/random_strat.py
|
zhaofeng-shu33/OCBO
|
a8e132f3ab77da4a6f58c630b27bfe7e37cc0380
|
[
"MIT"
] | null | null | null |
"""
Strategy that picks which function and which point randomly.
"""
from __future__ import division
import numpy as np
from OCBO.strategies.multi_opt import MultiOpt
from OCBO.strategies.joint_opt import JointOpt
class RandomOpt(MultiOpt):
def decide_next_query(self):
"""Choose function index and query point randomly."""
idx = np.random.randint(len(self.fcns))
low, high = zip(*self.domains[idx])
pt = np.random.uniform(low, high)
return idx, pt
@staticmethod
def get_opt_method_name():
return 'Random'
def _draw_next_gps(self):
if self.risk_neutral:
return super(RandomOpt, self)._draw_next_gps()
else:
pass
def _update_models(self):
if self.risk_neutral:
return super(RandomOpt, self)._update_models()
else:
pass
def _add_point_to_gp(self, f_idx, pt, val):
if self.risk_neutral:
return super(RandomOpt, self)._add_point_to_gp(f_idx, pt, val)
else:
pass
class JointRandom(JointOpt):
def decide_next_query(self):
"""Choose function index and query point randomly."""
idx = np.random.randint(len(self.fcns))
low, high = zip(*self.domains[idx])
pt = np.random.uniform(low, high)
return idx, pt
@staticmethod
def get_opt_method_name():
return 'joint-rand'
def _draw_next_gps(self):
if self.risk_neutral:
return super(JointRandom, self)._draw_next_gps()
else:
pass
def _update_models(self):
if self.risk_neutral:
return super(JointRandom, self)._update_models()
else:
pass
def _add_point_to_gp(self, f_idx, pt, val):
if self.risk_neutral:
return super(JointRandom, self)._add_point_to_gp(f_idx, pt, val)
else:
pass
| 26.555556
| 76
| 0.61977
| 246
| 1,912
| 4.565041
| 0.260163
| 0.035619
| 0.053428
| 0.090828
| 0.794301
| 0.794301
| 0.794301
| 0.794301
| 0.776492
| 0.740873
| 0
| 0
| 0.287134
| 1,912
| 71
| 77
| 26.929577
| 0.823918
| 0.08159
| 0
| 0.730769
| 0
| 0
| 0.009206
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0.115385
| 0.076923
| 0.038462
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cf942cd9e3b6d3489e1254c17c720b1658e07a63
| 2,227
|
py
|
Python
|
tests/test_provider_tozny_tozny.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_tozny_tozny.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_tozny_tozny.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_tozny_tozny.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:28:59 UTC)
def test_provider_import():
import terrascript.provider.tozny.tozny
def test_resource_import():
from terrascript.resource.tozny.tozny import tozny_account
from terrascript.resource.tozny.tozny import tozny_client_registration_token
from terrascript.resource.tozny.tozny import tozny_realm
from terrascript.resource.tozny.tozny import tozny_realm_application
from terrascript.resource.tozny.tozny import tozny_realm_application_client_secret
from terrascript.resource.tozny.tozny import tozny_realm_application_mapper
from terrascript.resource.tozny.tozny import tozny_realm_application_role
from terrascript.resource.tozny.tozny import tozny_realm_broker_delegation
from terrascript.resource.tozny.tozny import tozny_realm_broker_identity
from terrascript.resource.tozny.tozny import tozny_realm_default_groups
from terrascript.resource.tozny.tozny import tozny_realm_group
from terrascript.resource.tozny.tozny import tozny_realm_group_role_mappings
from terrascript.resource.tozny.tozny import tozny_realm_identity
from terrascript.resource.tozny.tozny import tozny_realm_identity_group_membership
from terrascript.resource.tozny.tozny import tozny_realm_provider
from terrascript.resource.tozny.tozny import tozny_realm_provider_mapper
from terrascript.resource.tozny.tozny import tozny_realm_role
def test_datasource_import():
from terrascript.data.tozny.tozny import tozny_realm_application
from terrascript.data.tozny.tozny import tozny_realm_application_role
from terrascript.data.tozny.tozny import tozny_realm_application_saml_description
from terrascript.data.tozny.tozny import tozny_realm_role
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.tozny.tozny
#
# t = terrascript.provider.tozny.tozny.tozny()
# s = str(t)
#
# assert 'https://github.com/tozny/terraform-provider-tozny' in s
# assert '0.14.0' in s
| 33.238806
| 86
| 0.809609
| 294
| 2,227
| 5.901361
| 0.255102
| 0.149856
| 0.19366
| 0.254179
| 0.714697
| 0.674352
| 0.669741
| 0.61902
| 0.564842
| 0
| 0
| 0.008269
| 0.131118
| 2,227
| 66
| 87
| 33.742424
| 0.888372
| 0.214639
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0
| 1
| 0.12
| true
| 0
| 1
| 0
| 1.12
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cfcda3d89be5a744d2344823848e28b956cb6132
| 108
|
py
|
Python
|
flask/HTTPBasicAuth/generate_password.py
|
custa/python-skeleton
|
81f335046d254ea40e137a35fea433b1c9d9ddbb
|
[
"Apache-2.0"
] | null | null | null |
flask/HTTPBasicAuth/generate_password.py
|
custa/python-skeleton
|
81f335046d254ea40e137a35fea433b1c9d9ddbb
|
[
"Apache-2.0"
] | null | null | null |
flask/HTTPBasicAuth/generate_password.py
|
custa/python-skeleton
|
81f335046d254ea40e137a35fea433b1c9d9ddbb
|
[
"Apache-2.0"
] | null | null | null |
import sys
from werkzeug.security import generate_password_hash
print(generate_password_hash(sys.argv[1]))
| 21.6
| 52
| 0.851852
| 16
| 108
| 5.5
| 0.6875
| 0.363636
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0.074074
| 108
| 4
| 53
| 27
| 0.87
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.666667
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
321a74cea39342e434b03eac18fb757a8b35a5b0
| 2,507
|
py
|
Python
|
test/py/architectural/test01.py
|
cvdlab/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 1
|
2016-09-20T04:48:12.000Z
|
2016-09-20T04:48:12.000Z
|
test/py/architectural/test01.py
|
Ahdhn/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 1
|
2018-02-20T21:57:07.000Z
|
2018-02-21T07:18:11.000Z
|
test/py/architectural/test01.py
|
Ahdhn/lar-cc
|
7092965acf7c0c78a5fab4348cf2c2aa01c4b130
|
[
"MIT",
"Unlicense"
] | 7
|
2016-11-04T10:47:42.000Z
|
2018-04-10T17:32:50.000Z
|
""" test file """
from larlib import *
V = [[3,-3],
[9,-3],[0,0],[3,0],[9,0],[15,0],
[3,3],[6,3],[9,3],[15,3],[21,3],
[0,9],[6,9],[15,9],[18,9],[0,13],
[6,13],[9,13],[15,13],[18,10],[21,10],
[18,13],[6,16],[9,16],[9,17],[15,17],
[18,17],[-3,24],[6,24],[15,24],[-3,13]]
FV = [
[22,23,24,25,29,28], [15,16,22,28,27,30], [18,21,26,25],
[13,14,19,21,18], [16,17,23,22], [11,12,16,15],
[9,10,20,19,14,13], [2,3,6,7,12,11], [0,1,4,8,7,6,3],
[4,5,9,13,18,17,16,12,7,8],[17,18,25,24,23]]
dwelling = [V,FV]
bU = AA(SOLIDIFY)(AA(POLYLINE)(lar2polylines (dwelling)))
EV = face2edge(FV)
VIEW(EXPLODE(1.2,1.2,1)(MKPOLS((V,EV))))
eE,iP = bUnit_to_eEiP(FV,EV)
modEe1D = V, [EV[e] for e in eE]
modIp1D = V, [EV[e] for e in iP]
eE1D = AA(COLOR(RED))(MKPOLS(modEe1D))
iP1D = AA(COLOR(GREEN))(MKPOLS(modIp1D))
VIEW(EXPLODE(1.2,1.2,1)(eE1D))
VIEW(EXPLODE(1.2,1.2,1)(iP1D))
VIEW(STRUCT(bU + iP1D + eE1D))
VIEW(EXPLODE(1.2,1.2,1)(bU + iP1D + eE1D))
floorHeight = larIntervals([1])([4])
modIp2D = larModelProduct([ modIp1D, floorHeight ])
modEe2D = larModelProduct([ modEe1D, floorHeight ])
VIEW(EXPLODE(1.2,1.2,1)(bU + MKPOLS(modIp2D) + eE1D))
VIEW(EXPLODE(1.2,1.2,1)(bU + iP1D + MKPOLS(modEe2D)))
VIEW(EXPLODE(1.2,1.2,1)(bU + MKPOLS(modIp2D) + MKPOLS(modEe2D)))
""" Concept design """
from larlib import *
V = [[3,-3],
[9,-3],[0,0],[3,0],[9,0],[15,0],
[3,3],[6,3],[9,3],[15,3],[21,3],
[0,9],[6,9],[15,9],[18,9],[0,13],
[6,13],[9,13],[15,13],[18,10],[21,10],
[18,13],[6,16],[9,16],[9,17],[15,17],
[18,17],[-3,24],[6,24],[15,24],[-3,13]]
FV = [
[22,23,24,25,29,28], [15,16,22,28,27,30], [18,21,26,25],
[13,14,19,21,18], [16,17,23,22], [11,12,16,15],
[9,10,20,19,14,13], [2,3,6,7,12,11], [0,1,4,8,7,6,3],
[4,5,9,13,18,17,16,12,7,8],[17,18,25,24,23]]
dwelling = [V,FV]
bU = AA(SOLIDIFY)(AA(POLYLINE)(lar2polylines (dwelling)))
EV = face2edge(FV)
VIEW(EXPLODE(1.2,1.2,1)(MKPOLS((V,EV))))
eE,iP = bUnit_to_eEiP(FV,EV)
modEe1D = V, [EV[e] for e in eE]
modIp1D = V, [EV[e] for e in iP]
eE1D = AA(COLOR(RED))(MKPOLS(modEe1D))
iP1D = AA(COLOR(GREEN))(MKPOLS(modIp1D))
VIEW(EXPLODE(1.2,1.2,1)(eE1D))
VIEW(EXPLODE(1.2,1.2,1)(iP1D))
VIEW(STRUCT(bU + iP1D + eE1D))
VIEW(EXPLODE(1.2,1.2,1)(bU + iP1D + eE1D))
floorHeight = larIntervals([1])([4])
modIp2D = larModelProduct([ modIp1D, floorHeight ])
modEe2D = larModelProduct([ modEe1D, floorHeight ])
VIEW(EXPLODE(1.2,1.2,1)(bU + MKPOLS(modIp2D) + eE1D))
VIEW(EXPLODE(1.2,1.2,1)(bU + iP1D + MKPOLS(modEe2D)))
VIEW(EXPLODE(1.2,1.2,1)(bU + MKPOLS(modIp2D) + MKPOLS(modEe2D)))
| 30.950617
| 64
| 0.590746
| 534
| 2,507
| 2.765918
| 0.134831
| 0.037915
| 0.056872
| 0.123223
| 0.985782
| 0.985782
| 0.985782
| 0.985782
| 0.985782
| 0.985782
| 0
| 0.228974
| 0.094136
| 2,507
| 80
| 65
| 31.3375
| 0.4214
| 0.00359
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
322522e8a50aae4fc41dbbc8292910b350ab134d
| 475,867
|
py
|
Python
|
openconfig/ydk/models/openconfig/openconfig_interfaces.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
openconfig/ydk/models/openconfig/openconfig_interfaces.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
openconfig/ydk/models/openconfig/openconfig_interfaces.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" openconfig_interfaces
Model for managing network interfaces.
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7223 with an alternate structure
(particularly for operational state data) and with additional
configuration items.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class Interfaces(object):
"""
Top level container for interfaces, including configuration
and state data.
.. attribute:: interface
The list of named interfaces on the device
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
The list of named interfaces on the device.
.. attribute:: name <key>
References the configured name of the interface
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Config>`
.. attribute:: aggregation
Options for logical interfaces representing aggregates
**type**\: :py:class:`Aggregation <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation>`
**presence node**\: True
.. attribute:: config
Configurable items at the global, physical interface level
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Config>`
.. attribute:: ethernet
Top\-level container for ethernet configuration and state
**type**\: :py:class:`Ethernet <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet>`
.. attribute:: hold_time
Top\-level container for hold\-time settings to enable dampening advertisements of interface transitions
**type**\: :py:class:`HoldTime <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.HoldTime>`
.. attribute:: routed_vlan
Top\-level container for routed vlan interfaces. These logical interfaces are also known as SVI (switched virtual interface), IRB (integrated routing and bridging), RVI (routed VLAN interface)
**type**\: :py:class:`RoutedVlan <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan>`
.. attribute:: state
Operational state data at the global interface level
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.State>`
.. attribute:: subinterfaces
Enclosing container for the list of subinterfaces associated with a physical interface
**type**\: :py:class:`Subinterfaces <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces>`
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.name = None
self.aggregation = None
self.config = Interfaces.Interface.Config()
self.config.parent = self
self.ethernet = Interfaces.Interface.Ethernet()
self.ethernet.parent = self
self.hold_time = Interfaces.Interface.HoldTime()
self.hold_time.parent = self
self.routed_vlan = Interfaces.Interface.RoutedVlan()
self.routed_vlan.parent = self
self.state = Interfaces.Interface.State()
self.state.parent = self
self.subinterfaces = Interfaces.Interface.Subinterfaces()
self.subinterfaces.parent = self
class Config(object):
"""
Configurable items at the global, physical interface
level
.. attribute:: description
[adapted from IETF interfaces model (RFC 7223)] A textual description of the interface. A server implementation MAY map this leaf to the ifAlias MIB object. Such an implementation needs to use some mechanism to handle the differences in size and characters allowed between this leaf and ifAlias. The definition of such a mechanism is outside the scope of this document. Since ifAlias is defined to be stored in non\-volatile storage, the MIB implementation MUST map ifAlias to the value of 'description' in the persistently stored datastore. Specifically, if the device supports '\:startup', when ifAlias is read the device MUST return the value of 'description' in the 'startup' datastore, and when it is written, it MUST be written to the 'running' and 'startup' datastores. Note that it is up to the implementation to decide whether to modify this single leaf in 'startup' or perform an implicit copy\-config from 'running' to 'startup'. If the device does not support '\:startup', ifAlias MUST be mapped to the 'description' leaf in the 'running' datastore
**type**\: str
.. attribute:: enabled
[adapted from IETF interfaces model (RFC 7223)] This leaf contains the configured, desired state of the interface. Systems that implement the IF\-MIB use the value of this leaf in the 'running' datastore to set IF\-MIB.ifAdminStatus to 'up' or 'down' after an ifEntry has been initialized, as described in RFC 2863. Changes in this leaf in the 'running' datastore are reflected in ifAdminStatus, but if ifAdminStatus is changed over SNMP, this leaf is not affected
**type**\: bool
**default value**\: true
.. attribute:: mtu
Set the max transmission unit size in octets for the physical interface. If this is not set, the mtu is set to the operational default \-\- e.g., 1514 bytes on an Ethernet interface
**type**\: int
**range:** 0..65535
.. attribute:: name
[adapted from IETF interfaces model (RFC 7223)] The name of the interface. A device MAY restrict the allowed values for this leaf, possibly depending on the type of the interface. For system\-controlled interfaces, this leaf is the device\-specific name of the interface. The 'config false' list interfaces/interface[name]/state contains the currently existing interfaces on the device. If a client tries to create configuration for a system\-controlled interface that is not present in the corresponding state list, the server MAY reject the request if the implementation does not support pre\-provisioning of interfaces or if the name refers to an interface that can never exist in the system. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case. The IETF model in RFC 7223 provides YANG features for the following (i.e., pre\-provisioning and arbitrary\-names), however they are omitted here\: If the device supports pre\-provisioning of interface configuration, the 'pre\-provisioning' feature is advertised. If the device allows arbitrarily named user\-controlled interfaces, the 'arbitrary\-names' feature is advertised. When a configured user\-controlled interface is created by the system, it is instantiated with the same name in the /interfaces/interface[name]/state list
**type**\: str
.. attribute:: type
[adapted from IETF interfaces model (RFC 7223)] The type of the interface. When an interface entry is created, a server MAY initialize the type leaf with a valid value, e.g., if it is possible to derive the type from the name of the interface. If a client tries to set the type of an interface to a value that can never be used by the system, e.g., if the type is not supported or if the type does not match the name of the interface, the server MUST reject the request. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case
**type**\: :py:class:`InterfaceTypeIdentity <ydk.models.ietf.ietf_interfaces.InterfaceTypeIdentity>`
**mandatory**\: True
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.description = None
self.enabled = None
self.mtu = None
self.name = None
self.type = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.description is not None:
return True
if self.enabled is not None:
return True
if self.mtu is not None:
return True
if self.name is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Config']['meta_info']
class State(object):
"""
Operational state data at the global interface level
.. attribute:: admin_status
[adapted from IETF interfaces model (RFC 7223)] The desired state of the interface. In RFC 7223 this leaf has the same read semantics as ifAdminStatus. Here, it reflects the administrative state as set by enabling or disabling the interface
**type**\: :py:class:`AdminStatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.State.AdminStatusEnum>`
**mandatory**\: True
.. attribute:: counters
A collection of interface\-related statistics objects
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.State.Counters>`
.. attribute:: description
[adapted from IETF interfaces model (RFC 7223)] A textual description of the interface. A server implementation MAY map this leaf to the ifAlias MIB object. Such an implementation needs to use some mechanism to handle the differences in size and characters allowed between this leaf and ifAlias. The definition of such a mechanism is outside the scope of this document. Since ifAlias is defined to be stored in non\-volatile storage, the MIB implementation MUST map ifAlias to the value of 'description' in the persistently stored datastore. Specifically, if the device supports '\:startup', when ifAlias is read the device MUST return the value of 'description' in the 'startup' datastore, and when it is written, it MUST be written to the 'running' and 'startup' datastores. Note that it is up to the implementation to decide whether to modify this single leaf in 'startup' or perform an implicit copy\-config from 'running' to 'startup'. If the device does not support '\:startup', ifAlias MUST be mapped to the 'description' leaf in the 'running' datastore
**type**\: str
.. attribute:: enabled
[adapted from IETF interfaces model (RFC 7223)] This leaf contains the configured, desired state of the interface. Systems that implement the IF\-MIB use the value of this leaf in the 'running' datastore to set IF\-MIB.ifAdminStatus to 'up' or 'down' after an ifEntry has been initialized, as described in RFC 2863. Changes in this leaf in the 'running' datastore are reflected in ifAdminStatus, but if ifAdminStatus is changed over SNMP, this leaf is not affected
**type**\: bool
**default value**\: true
.. attribute:: hardware_port
References the hardware port in the device inventory
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_platform.Components.Component>`
.. attribute:: ifindex
System assigned number for each interface. Corresponds to ifIndex object in SNMP Interface MIB
**type**\: int
**range:** 0..4294967295
.. attribute:: last_change
Date and time of the last state change of the interface (e.g., up\-to\-down transition). This corresponds to the ifLastChange object in the standard interface MIB
**type**\: int
**range:** 0..4294967295
.. attribute:: mtu
Set the max transmission unit size in octets for the physical interface. If this is not set, the mtu is set to the operational default \-\- e.g., 1514 bytes on an Ethernet interface
**type**\: int
**range:** 0..65535
.. attribute:: name
[adapted from IETF interfaces model (RFC 7223)] The name of the interface. A device MAY restrict the allowed values for this leaf, possibly depending on the type of the interface. For system\-controlled interfaces, this leaf is the device\-specific name of the interface. The 'config false' list interfaces/interface[name]/state contains the currently existing interfaces on the device. If a client tries to create configuration for a system\-controlled interface that is not present in the corresponding state list, the server MAY reject the request if the implementation does not support pre\-provisioning of interfaces or if the name refers to an interface that can never exist in the system. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case. The IETF model in RFC 7223 provides YANG features for the following (i.e., pre\-provisioning and arbitrary\-names), however they are omitted here\: If the device supports pre\-provisioning of interface configuration, the 'pre\-provisioning' feature is advertised. If the device allows arbitrarily named user\-controlled interfaces, the 'arbitrary\-names' feature is advertised. When a configured user\-controlled interface is created by the system, it is instantiated with the same name in the /interfaces/interface[name]/state list
**type**\: str
.. attribute:: oper_status
[adapted from IETF interfaces model (RFC 7223)] The current operational state of the interface. This leaf has the same semantics as ifOperStatus
**type**\: :py:class:`OperStatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.State.OperStatusEnum>`
**mandatory**\: True
.. attribute:: type
[adapted from IETF interfaces model (RFC 7223)] The type of the interface. When an interface entry is created, a server MAY initialize the type leaf with a valid value, e.g., if it is possible to derive the type from the name of the interface. If a client tries to set the type of an interface to a value that can never be used by the system, e.g., if the type is not supported or if the type does not match the name of the interface, the server MUST reject the request. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case
**type**\: :py:class:`InterfaceTypeIdentity <ydk.models.ietf.ietf_interfaces.InterfaceTypeIdentity>`
**mandatory**\: True
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.admin_status = None
self.counters = Interfaces.Interface.State.Counters()
self.counters.parent = self
self.description = None
self.enabled = None
self.hardware_port = None
self.ifindex = None
self.last_change = None
self.mtu = None
self.name = None
self.oper_status = None
self.type = None
class AdminStatusEnum(Enum):
"""
AdminStatusEnum
[adapted from IETF interfaces model (RFC 7223)]
The desired state of the interface. In RFC 7223 this leaf
has the same read semantics as ifAdminStatus. Here, it
reflects the administrative state as set by enabling or
disabling the interface.
.. data:: UP = 0
Ready to pass packets.
.. data:: DOWN = 1
Not ready to pass packets and not in some test mode.
.. data:: TESTING = 2
In some test mode.
"""
UP = 0
DOWN = 1
TESTING = 2
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.State.AdminStatusEnum']
class OperStatusEnum(Enum):
"""
OperStatusEnum
[adapted from IETF interfaces model (RFC 7223)]
The current operational state of the interface.
This leaf has the same semantics as ifOperStatus.
.. data:: UP = 1
Ready to pass packets.
.. data:: DOWN = 2
The interface does not pass any packets.
.. data:: TESTING = 3
In some test mode. No operational packets can
be passed.
.. data:: UNKNOWN = 4
Status cannot be determined for some reason.
.. data:: DORMANT = 5
Waiting for some external event.
.. data:: NOT_PRESENT = 6
Some component (typically hardware) is missing.
.. data:: LOWER_LAYER_DOWN = 7
Down due to state of lower-layer interface(s).
"""
UP = 1
DOWN = 2
TESTING = 3
UNKNOWN = 4
DORMANT = 5
NOT_PRESENT = 6
LOWER_LAYER_DOWN = 7
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.State.OperStatusEnum']
class Counters(object):
"""
A collection of interface\-related statistics objects.
.. attribute:: in_broadcast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were addressed to a broadcast address at this sub\-layer. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_discards
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The number of inbound packets that were chosen to be discarded even though no errors had been detected to prevent their being deliverable to a higher\-layer protocol. One possible reason for discarding such a packet could be to free up buffer space. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_errors
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of inbound packets that contained errors preventing them from being deliverable to a higher\-layer protocol. For character\- oriented or fixed\-length interfaces, the number of inbound transmission units that contained errors preventing them from being deliverable to a higher\-layer protocol. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_multicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were addressed to a multicast address at this sub\-layer. For a MAC\-layer protocol, this includes both Group and Functional addresses. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_octets
[adapted from IETF interfaces model (RFC 7223)] The total number of octets received on the interface, including framing characters. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_unicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were not addressed to a multicast or broadcast address at this sub\-layer. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_unknown_protos
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of packets received via the interface that were discarded because of an unknown or unsupported protocol. For character\-oriented or fixed\-length interfaces that support protocol multiplexing, the number of transmission units received via the interface that were discarded because of an unknown or unsupported protocol. For any interface that does not support protocol multiplexing, this counter is not present. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..4294967295
.. attribute:: last_clear
Indicates the last time the interface counters were cleared
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
.. attribute:: out_broadcast_pkts
[adapted from IETF interfaces model (RFC 7223)] The total number of packets that higher\-level protocols requested be transmitted, and that were addressed to a broadcast address at this sub\-layer, including those that were discarded or not sent. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_discards
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The number of outbound packets that were chosen to be discarded even though no errors had been detected to prevent their being transmitted. One possible reason for discarding such a packet could be to free up buffer space. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_errors
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of outbound packets that could not be transmitted because of errors. For character\-oriented or fixed\-length interfaces, the number of outbound transmission units that could not be transmitted because of errors. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_multicast_pkts
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The total number of packets that higher\-level protocols requested be transmitted, and that were addressed to a multicast address at this sub\-layer, including those that were discarded or not sent. For a MAC\-layer protocol, this includes both Group and Functional addresses. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_octets
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The total number of octets transmitted out of the interface, including framing characters. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_unicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The total number of packets that higher\-level protocols requested be transmitted, and that were not addressed to a multicast or broadcast address at this sub\-layer, including those that were discarded or not sent. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.in_broadcast_pkts = None
self.in_discards = None
self.in_errors = None
self.in_multicast_pkts = None
self.in_octets = None
self.in_unicast_pkts = None
self.in_unknown_protos = None
self.last_clear = None
self.out_broadcast_pkts = None
self.out_discards = None
self.out_errors = None
self.out_multicast_pkts = None
self.out_octets = None
self.out_unicast_pkts = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.in_broadcast_pkts is not None:
return True
if self.in_discards is not None:
return True
if self.in_errors is not None:
return True
if self.in_multicast_pkts is not None:
return True
if self.in_octets is not None:
return True
if self.in_unicast_pkts is not None:
return True
if self.in_unknown_protos is not None:
return True
if self.last_clear is not None:
return True
if self.out_broadcast_pkts is not None:
return True
if self.out_discards is not None:
return True
if self.out_errors is not None:
return True
if self.out_multicast_pkts is not None:
return True
if self.out_octets is not None:
return True
if self.out_unicast_pkts is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.State.Counters']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.admin_status is not None:
return True
if self.counters is not None and self.counters._has_data():
return True
if self.description is not None:
return True
if self.enabled is not None:
return True
if self.hardware_port is not None:
return True
if self.ifindex is not None:
return True
if self.last_change is not None:
return True
if self.mtu is not None:
return True
if self.name is not None:
return True
if self.oper_status is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.State']['meta_info']
class HoldTime(object):
"""
Top\-level container for hold\-time settings to enable
dampening advertisements of interface transitions.
.. attribute:: config
Configuration data for interface hold\-time settings
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.HoldTime.Config>`
.. attribute:: state
Operational state data for interface hold\-time
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.HoldTime.State>`
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.HoldTime.Config()
self.config.parent = self
self.state = Interfaces.Interface.HoldTime.State()
self.state.parent = self
class Config(object):
"""
Configuration data for interface hold\-time settings.
.. attribute:: down
Dampens advertisement when the interface transitions from up to down. A zero value means dampening is turned off, i.e., immediate notification
**type**\: int
**range:** 0..4294967295
**units**\: milliseconds
**default value**\: 0
.. attribute:: up
Dampens advertisement when the interface transitions from down to up. A zero value means dampening is turned off, i.e., immediate notification
**type**\: int
**range:** 0..4294967295
**units**\: milliseconds
**default value**\: 0
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.down = None
self.up = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.down is not None:
return True
if self.up is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.HoldTime.Config']['meta_info']
class State(object):
"""
Operational state data for interface hold\-time.
.. attribute:: down
Dampens advertisement when the interface transitions from up to down. A zero value means dampening is turned off, i.e., immediate notification
**type**\: int
**range:** 0..4294967295
**units**\: milliseconds
**default value**\: 0
.. attribute:: up
Dampens advertisement when the interface transitions from down to up. A zero value means dampening is turned off, i.e., immediate notification
**type**\: int
**range:** 0..4294967295
**units**\: milliseconds
**default value**\: 0
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.down = None
self.up = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.down is not None:
return True
if self.up is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.HoldTime.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:hold-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.HoldTime']['meta_info']
class Subinterfaces(object):
"""
Enclosing container for the list of subinterfaces associated
with a physical interface
.. attribute:: subinterface
The list of subinterfaces (logical interfaces) associated with a physical interface
**type**\: list of :py:class:`Subinterface <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.subinterface = YList()
self.subinterface.parent = self
self.subinterface.name = 'subinterface'
class Subinterface(object):
"""
The list of subinterfaces (logical interfaces) associated
with a physical interface
.. attribute:: index <key>
The index number of the subinterface \-\- used to address the logical interface
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Config>`
.. attribute:: config
Configurable items at the subinterface level
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Config>`
.. attribute:: ipv4
Parameters for the IPv4 address family
**type**\: :py:class:`Ipv4 <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4>`
**presence node**\: True
.. attribute:: ipv6
Parameters for the IPv6 address family
**type**\: :py:class:`Ipv6 <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6>`
**presence node**\: True
.. attribute:: state
Operational state data for logical interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.State>`
.. attribute:: vlan
Enclosing container for VLAN interface\-specific data on subinterfaces
**type**\: :py:class:`Vlan <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Vlan>`
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.index = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Config()
self.config.parent = self
self.ipv4 = None
self.ipv6 = None
self.state = Interfaces.Interface.Subinterfaces.Subinterface.State()
self.state.parent = self
self.vlan = Interfaces.Interface.Subinterfaces.Subinterface.Vlan()
self.vlan.parent = self
class Config(object):
"""
Configurable items at the subinterface level
.. attribute:: description
[adapted from IETF interfaces model (RFC 7223)] A textual description of the interface. A server implementation MAY map this leaf to the ifAlias MIB object. Such an implementation needs to use some mechanism to handle the differences in size and characters allowed between this leaf and ifAlias. The definition of such a mechanism is outside the scope of this document. Since ifAlias is defined to be stored in non\-volatile storage, the MIB implementation MUST map ifAlias to the value of 'description' in the persistently stored datastore. Specifically, if the device supports '\:startup', when ifAlias is read the device MUST return the value of 'description' in the 'startup' datastore, and when it is written, it MUST be written to the 'running' and 'startup' datastores. Note that it is up to the implementation to decide whether to modify this single leaf in 'startup' or perform an implicit copy\-config from 'running' to 'startup'. If the device does not support '\:startup', ifAlias MUST be mapped to the 'description' leaf in the 'running' datastore
**type**\: str
.. attribute:: enabled
[adapted from IETF interfaces model (RFC 7223)] This leaf contains the configured, desired state of the interface. Systems that implement the IF\-MIB use the value of this leaf in the 'running' datastore to set IF\-MIB.ifAdminStatus to 'up' or 'down' after an ifEntry has been initialized, as described in RFC 2863. Changes in this leaf in the 'running' datastore are reflected in ifAdminStatus, but if ifAdminStatus is changed over SNMP, this leaf is not affected
**type**\: bool
**default value**\: true
.. attribute:: index
The index of the subinterface, or logical interface number. On systems with no support for subinterfaces, or not using subinterfaces, this value should default to 0, i.e., the default subinterface
**type**\: int
**range:** 0..4294967295
**default value**\: 0
.. attribute:: name
[adapted from IETF interfaces model (RFC 7223)] The name of the interface. A device MAY restrict the allowed values for this leaf, possibly depending on the type of the interface. For system\-controlled interfaces, this leaf is the device\-specific name of the interface. The 'config false' list interfaces/interface[name]/state contains the currently existing interfaces on the device. If a client tries to create configuration for a system\-controlled interface that is not present in the corresponding state list, the server MAY reject the request if the implementation does not support pre\-provisioning of interfaces or if the name refers to an interface that can never exist in the system. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case. The IETF model in RFC 7223 provides YANG features for the following (i.e., pre\-provisioning and arbitrary\-names), however they are omitted here\: If the device supports pre\-provisioning of interface configuration, the 'pre\-provisioning' feature is advertised. If the device allows arbitrarily named user\-controlled interfaces, the 'arbitrary\-names' feature is advertised. When a configured user\-controlled interface is created by the system, it is instantiated with the same name in the /interfaces/interface[name]/state list
**type**\: str
.. attribute:: unnumbered
Indicates that the subinterface is unnumbered, and provides a reference to the subinterface that provides the IP address information (v4, v6 or both) for the current subinterface
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
**default value**\: false
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.description = None
self.enabled = None
self.index = None
self.name = None
self.unnumbered = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.description is not None:
return True
if self.enabled is not None:
return True
if self.index is not None:
return True
if self.name is not None:
return True
if self.unnumbered is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Config']['meta_info']
class State(object):
"""
Operational state data for logical interfaces
.. attribute:: admin_status
[adapted from IETF interfaces model (RFC 7223)] The desired state of the interface. In RFC 7223 this leaf has the same read semantics as ifAdminStatus. Here, it reflects the administrative state as set by enabling or disabling the interface
**type**\: :py:class:`AdminStatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.State.AdminStatusEnum>`
**mandatory**\: True
.. attribute:: counters
A collection of interface\-related statistics objects
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.State.Counters>`
.. attribute:: description
[adapted from IETF interfaces model (RFC 7223)] A textual description of the interface. A server implementation MAY map this leaf to the ifAlias MIB object. Such an implementation needs to use some mechanism to handle the differences in size and characters allowed between this leaf and ifAlias. The definition of such a mechanism is outside the scope of this document. Since ifAlias is defined to be stored in non\-volatile storage, the MIB implementation MUST map ifAlias to the value of 'description' in the persistently stored datastore. Specifically, if the device supports '\:startup', when ifAlias is read the device MUST return the value of 'description' in the 'startup' datastore, and when it is written, it MUST be written to the 'running' and 'startup' datastores. Note that it is up to the implementation to decide whether to modify this single leaf in 'startup' or perform an implicit copy\-config from 'running' to 'startup'. If the device does not support '\:startup', ifAlias MUST be mapped to the 'description' leaf in the 'running' datastore
**type**\: str
.. attribute:: enabled
[adapted from IETF interfaces model (RFC 7223)] This leaf contains the configured, desired state of the interface. Systems that implement the IF\-MIB use the value of this leaf in the 'running' datastore to set IF\-MIB.ifAdminStatus to 'up' or 'down' after an ifEntry has been initialized, as described in RFC 2863. Changes in this leaf in the 'running' datastore are reflected in ifAdminStatus, but if ifAdminStatus is changed over SNMP, this leaf is not affected
**type**\: bool
**default value**\: true
.. attribute:: ifindex
System assigned number for each interface. Corresponds to ifIndex object in SNMP Interface MIB
**type**\: int
**range:** 0..4294967295
.. attribute:: index
The index of the subinterface, or logical interface number. On systems with no support for subinterfaces, or not using subinterfaces, this value should default to 0, i.e., the default subinterface
**type**\: int
**range:** 0..4294967295
**default value**\: 0
.. attribute:: last_change
Date and time of the last state change of the interface (e.g., up\-to\-down transition). This corresponds to the ifLastChange object in the standard interface MIB
**type**\: int
**range:** 0..4294967295
.. attribute:: name
[adapted from IETF interfaces model (RFC 7223)] The name of the interface. A device MAY restrict the allowed values for this leaf, possibly depending on the type of the interface. For system\-controlled interfaces, this leaf is the device\-specific name of the interface. The 'config false' list interfaces/interface[name]/state contains the currently existing interfaces on the device. If a client tries to create configuration for a system\-controlled interface that is not present in the corresponding state list, the server MAY reject the request if the implementation does not support pre\-provisioning of interfaces or if the name refers to an interface that can never exist in the system. A NETCONF server MUST reply with an rpc\-error with the error\-tag 'invalid\-value' in this case. The IETF model in RFC 7223 provides YANG features for the following (i.e., pre\-provisioning and arbitrary\-names), however they are omitted here\: If the device supports pre\-provisioning of interface configuration, the 'pre\-provisioning' feature is advertised. If the device allows arbitrarily named user\-controlled interfaces, the 'arbitrary\-names' feature is advertised. When a configured user\-controlled interface is created by the system, it is instantiated with the same name in the /interfaces/interface[name]/state list
**type**\: str
.. attribute:: oper_status
[adapted from IETF interfaces model (RFC 7223)] The current operational state of the interface. This leaf has the same semantics as ifOperStatus
**type**\: :py:class:`OperStatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.State.OperStatusEnum>`
**mandatory**\: True
.. attribute:: unnumbered
Indicates that the subinterface is unnumbered, and provides a reference to the subinterface that provides the IP address information (v4, v6 or both) for the current subinterface
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`index <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface>`
**default value**\: false
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.admin_status = None
self.counters = Interfaces.Interface.Subinterfaces.Subinterface.State.Counters()
self.counters.parent = self
self.description = None
self.enabled = None
self.ifindex = None
self.index = None
self.last_change = None
self.name = None
self.oper_status = None
self.unnumbered = None
class AdminStatusEnum(Enum):
"""
AdminStatusEnum
[adapted from IETF interfaces model (RFC 7223)]
The desired state of the interface. In RFC 7223 this leaf
has the same read semantics as ifAdminStatus. Here, it
reflects the administrative state as set by enabling or
disabling the interface.
.. data:: UP = 0
Ready to pass packets.
.. data:: DOWN = 1
Not ready to pass packets and not in some test mode.
.. data:: TESTING = 2
In some test mode.
"""
UP = 0
DOWN = 1
TESTING = 2
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.State.AdminStatusEnum']
class OperStatusEnum(Enum):
"""
OperStatusEnum
[adapted from IETF interfaces model (RFC 7223)]
The current operational state of the interface.
This leaf has the same semantics as ifOperStatus.
.. data:: UP = 1
Ready to pass packets.
.. data:: DOWN = 2
The interface does not pass any packets.
.. data:: TESTING = 3
In some test mode. No operational packets can
be passed.
.. data:: UNKNOWN = 4
Status cannot be determined for some reason.
.. data:: DORMANT = 5
Waiting for some external event.
.. data:: NOT_PRESENT = 6
Some component (typically hardware) is missing.
.. data:: LOWER_LAYER_DOWN = 7
Down due to state of lower-layer interface(s).
"""
UP = 1
DOWN = 2
TESTING = 3
UNKNOWN = 4
DORMANT = 5
NOT_PRESENT = 6
LOWER_LAYER_DOWN = 7
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.State.OperStatusEnum']
class Counters(object):
"""
A collection of interface\-related statistics objects.
.. attribute:: in_broadcast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were addressed to a broadcast address at this sub\-layer. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_discards
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The number of inbound packets that were chosen to be discarded even though no errors had been detected to prevent their being deliverable to a higher\-layer protocol. One possible reason for discarding such a packet could be to free up buffer space. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_errors
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of inbound packets that contained errors preventing them from being deliverable to a higher\-layer protocol. For character\- oriented or fixed\-length interfaces, the number of inbound transmission units that contained errors preventing them from being deliverable to a higher\-layer protocol. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_multicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were addressed to a multicast address at this sub\-layer. For a MAC\-layer protocol, this includes both Group and Functional addresses. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_octets
[adapted from IETF interfaces model (RFC 7223)] The total number of octets received on the interface, including framing characters. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_unicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The number of packets, delivered by this sub\-layer to a higher (sub\-)layer, that were not addressed to a multicast or broadcast address at this sub\-layer. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_unknown_protos
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of packets received via the interface that were discarded because of an unknown or unsupported protocol. For character\-oriented or fixed\-length interfaces that support protocol multiplexing, the number of transmission units received via the interface that were discarded because of an unknown or unsupported protocol. For any interface that does not support protocol multiplexing, this counter is not present. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..4294967295
.. attribute:: last_clear
Indicates the last time the interface counters were cleared
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
.. attribute:: out_broadcast_pkts
[adapted from IETF interfaces model (RFC 7223)] The total number of packets that higher\-level protocols requested be transmitted, and that were addressed to a broadcast address at this sub\-layer, including those that were discarded or not sent. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_discards
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The number of outbound packets that were chosen to be discarded even though no errors had been detected to prevent their being transmitted. One possible reason for discarding such a packet could be to free up buffer space. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_errors
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. For packet\-oriented interfaces, the number of outbound packets that could not be transmitted because of errors. For character\-oriented or fixed\-length interfaces, the number of outbound transmission units that could not be transmitted because of errors. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_multicast_pkts
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The total number of packets that higher\-level protocols requested be transmitted, and that were addressed to a multicast address at this sub\-layer, including those that were discarded or not sent. For a MAC\-layer protocol, this includes both Group and Functional addresses. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_octets
[adapted from IETF interfaces model (RFC 7223)] Changed the counter type to counter64. The total number of octets transmitted out of the interface, including framing characters. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_unicast_pkts
[adapted from IETF interfaces model (RFC 7223)] The total number of packets that higher\-level protocols requested be transmitted, and that were not addressed to a multicast or broadcast address at this sub\-layer, including those that were discarded or not sent. Discontinuities in the value of this counter can occur at re\-initialization of the management system, and at other times as indicated by the value of 'discontinuity\-time'
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'ocif'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.in_broadcast_pkts = None
self.in_discards = None
self.in_errors = None
self.in_multicast_pkts = None
self.in_octets = None
self.in_unicast_pkts = None
self.in_unknown_protos = None
self.last_clear = None
self.out_broadcast_pkts = None
self.out_discards = None
self.out_errors = None
self.out_multicast_pkts = None
self.out_octets = None
self.out_unicast_pkts = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.in_broadcast_pkts is not None:
return True
if self.in_discards is not None:
return True
if self.in_errors is not None:
return True
if self.in_multicast_pkts is not None:
return True
if self.in_octets is not None:
return True
if self.in_unicast_pkts is not None:
return True
if self.in_unknown_protos is not None:
return True
if self.last_clear is not None:
return True
if self.out_broadcast_pkts is not None:
return True
if self.out_discards is not None:
return True
if self.out_errors is not None:
return True
if self.out_multicast_pkts is not None:
return True
if self.out_octets is not None:
return True
if self.out_unicast_pkts is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.State.Counters']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.admin_status is not None:
return True
if self.counters is not None and self.counters._has_data():
return True
if self.description is not None:
return True
if self.enabled is not None:
return True
if self.ifindex is not None:
return True
if self.index is not None:
return True
if self.last_change is not None:
return True
if self.name is not None:
return True
if self.oper_status is not None:
return True
if self.unnumbered is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.State']['meta_info']
class Vlan(object):
"""
Enclosing container for VLAN interface\-specific
data on subinterfaces
.. attribute:: config
Configuration parameters for VLANs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Vlan.Config>`
.. attribute:: state
State variables for VLANs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Vlan.State>`
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Vlan.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Vlan.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters for VLANs
.. attribute:: global_vlan_id
VLAN id for the subinterface \-\- references a global VLAN by name or id
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
----
.. attribute:: vlan_id
VLAN id for the subinterface \-\- specified inline for the case of a local VLAN
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.global_vlan_id = None
self.vlan_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.global_vlan_id is not None:
return True
if self.vlan_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Vlan.Config']['meta_info']
class State(object):
"""
State variables for VLANs
.. attribute:: global_vlan_id
VLAN id for the subinterface \-\- references a global VLAN by name or id
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
----
.. attribute:: vlan_id
VLAN id for the subinterface \-\- specified inline for the case of a local VLAN
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.global_vlan_id = None
self.vlan_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.global_vlan_id is not None:
return True
if self.vlan_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Vlan.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:vlan'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Vlan']['meta_info']
class Ipv4(object):
"""
Parameters for the IPv4 address family.
.. attribute:: address
The list of configured IPv4 addresses on the interface
**type**\: list of :py:class:`Address <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address>`
.. attribute:: config
Top\-level IPv4 configuration data for the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Config>`
.. attribute:: neighbor
A list of mappings from IPv4 addresses to link\-layer addresses. Entries in this list are used as static entries in the ARP Cache
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor>`
.. attribute:: state
Top level IPv4 operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.State>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.address = YList()
self.address.parent = self
self.address.name = 'address'
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Config()
self.config.parent = self
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.State()
self.state.parent = self
class Address(object):
"""
The list of configured IPv4 addresses on the interface.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Config>`
.. attribute:: config
Configuration data for each configured IPv4 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Config>`
.. attribute:: state
Operational state data for each IPv4 address configured on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.State>`
.. attribute:: vrrp
Enclosing container for VRRP groups handled by this IP interface
**type**\: :py:class:`Vrrp <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.State()
self.state.parent = self
self.vrrp = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp()
self.vrrp.parent = self
class Config(object):
"""
Configuration data for each configured IPv4
address on the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv4 address on the interface
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..32
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Config']['meta_info']
class State(object):
"""
Operational state data for each IPv4 address
configured on the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv4 address on the interface
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: origin
The origin of this address, e.g., statically configured, assigned by DHCP, etc.
**type**\: :py:class:`IpAddressOriginEnum <ydk.models.openconfig.openconfig_if_ip.IpAddressOriginEnum>`
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..32
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.origin = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.origin is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.State']['meta_info']
class Vrrp(object):
"""
Enclosing container for VRRP groups handled by this
IP interface
.. attribute:: vrrp_group
List of VRRP groups, keyed by virtual router id
**type**\: list of :py:class:`VrrpGroup <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.vrrp_group = YList()
self.vrrp_group.parent = self
self.vrrp_group.name = 'vrrp_group'
class VrrpGroup(object):
"""
List of VRRP groups, keyed by virtual router id
.. attribute:: virtual_router_id <key>
References the configured virtual router id for this VRRP group
**type**\: int
**range:** 1..255
**refers to**\: :py:class:`virtual_router_id <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: config
Configuration data for the VRRP group
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: interface_tracking
Top\-level container for VRRP interface tracking
**type**\: :py:class:`InterfaceTracking <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking>`
.. attribute:: state
Operational state data for the VRRP group
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.virtual_router_id = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.Config()
self.config.parent = self
self.interface_tracking = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking()
self.interface_tracking.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.State()
self.state.parent = self
class Config(object):
"""
Configuration data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.Config']['meta_info']
class State(object):
"""
Operational state data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: current_priority
Operational value of the priority for the interface in the VRRP group
**type**\: int
**range:** 0..255
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.current_priority = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.current_priority is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.State']['meta_info']
class InterfaceTracking(object):
"""
Top\-level container for VRRP interface tracking
.. attribute:: config
Configuration data for VRRP interface tracking
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config>`
.. attribute:: state
Operational state data for VRRP interface tracking
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State()
self.state.parent = self
class Config(object):
"""
Configuration data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config']['meta_info']
class State(object):
"""
Operational state data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:interface-tracking'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.virtual_router_id is None:
raise YPYModelError('Key property virtual_router_id is None')
return self.parent._common_path +'/openconfig-if-ip:vrrp-group[openconfig-if-ip:virtual-router-id = ' + str(self.virtual_router_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.virtual_router_id is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.interface_tracking is not None and self.interface_tracking._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp.VrrpGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:vrrp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrrp_group is not None:
for child_ref in self.vrrp_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address.Vrrp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:address[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vrrp is not None and self.vrrp._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Address']['meta_info']
class Neighbor(object):
"""
A list of mappings from IPv4 addresses to
link\-layer addresses.
Entries in this list are used as static entries in the
ARP Cache.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.Config>`
.. attribute:: config
Configuration data for each configured IPv4 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.Config>`
.. attribute:: state
Operational state data for each IPv4 address configured on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.State()
self.state.parent = self
class Config(object):
"""
Configuration data for each configured IPv4
address on the interface
.. attribute:: ip
The IPv4 address of the neighbor node
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.Config']['meta_info']
class State(object):
"""
Operational state data for each IPv4 address
configured on the interface
.. attribute:: ip
The IPv4 address of the neighbor node
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
.. attribute:: origin
The origin of this neighbor entry, static or dynamic
**type**\: :py:class:`NeighborOriginEnum <ydk.models.openconfig.openconfig_if_ip.NeighborOriginEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
self.origin = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
if self.origin is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:neighbor[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Neighbor']['meta_info']
class Config(object):
"""
Top\-level IPv4 configuration data for the interface
.. attribute:: enabled
Controls whether IPv4 is enabled or disabled on this interface. When IPv4 is enabled, this interface is connected to an IPv4 stack, and the interface can send and receive IPv4 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
The size, in octets, of the largest IPv4 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 68..65535
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.Config']['meta_info']
class State(object):
"""
Top level IPv4 operational state data
.. attribute:: enabled
Controls whether IPv4 is enabled or disabled on this interface. When IPv4 is enabled, this interface is connected to an IPv4 stack, and the interface can send and receive IPv4 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
The size, in octets, of the largest IPv4 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 68..65535
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
if self.config is not None and self.config._has_data():
return True
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv4']['meta_info']
class Ipv6(object):
"""
Parameters for the IPv6 address family.
.. attribute:: address
The list of configured IPv6 addresses on the interface
**type**\: list of :py:class:`Address <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address>`
.. attribute:: autoconf
Top\-level container for IPv6 autoconf
**type**\: :py:class:`Autoconf <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf>`
.. attribute:: config
Top\-level config data for the IPv6 interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Config>`
.. attribute:: neighbor
A list of mappings from IPv6 addresses to link\-layer addresses. Entries in this list are used as static entries in the Neighbor Cache
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor>`
.. attribute:: state
Top\-level operational state data for the IPv6 interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.State>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.address = YList()
self.address.parent = self
self.address.name = 'address'
self.autoconf = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf()
self.autoconf.parent = self
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Config()
self.config.parent = self
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.State()
self.state.parent = self
class Address(object):
"""
The list of configured IPv6 addresses on the interface.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Config>`
.. attribute:: config
Configuration data for each IPv6 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Config>`
.. attribute:: state
State data for each IPv6 address on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.State>`
.. attribute:: vrrp
Enclosing container for VRRP groups handled by this IP interface
**type**\: :py:class:`Vrrp <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.State()
self.state.parent = self
self.vrrp = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp()
self.vrrp.parent = self
class Config(object):
"""
Configuration data for each IPv6 address on
the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address on the interface
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..128
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Config']['meta_info']
class State(object):
"""
State data for each IPv6 address on the
interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address on the interface
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: origin
[adapted from IETF IP model RFC 7277] The origin of this address, e.g., static, dhcp, etc
**type**\: :py:class:`IpAddressOriginEnum <ydk.models.openconfig.openconfig_if_ip.IpAddressOriginEnum>`
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: status
[adapted from IETF IP model RFC 7277] The status of an address. Most of the states correspond to states from the IPv6 Stateless Address Autoconfiguration protocol
**type**\: :py:class:`StatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.State.StatusEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.origin = None
self.prefix_length = None
self.status = None
class StatusEnum(Enum):
"""
StatusEnum
[adapted from IETF IP model RFC 7277]
The status of an address. Most of the states correspond
to states from the IPv6 Stateless Address
Autoconfiguration protocol.
.. data:: PREFERRED = 0
This is a valid address that can appear as the
destination or source address of a packet.
.. data:: DEPRECATED = 1
This is a valid but deprecated address that should
no longer be used as a source address in new
communications, but packets addressed to such an
address are processed as expected.
.. data:: INVALID = 2
This isn't a valid address, and it shouldn't appear
as the destination or source address of a packet.
.. data:: INACCESSIBLE = 3
The address is not accessible because the interface
to which this address is assigned is not
operational.
.. data:: UNKNOWN = 4
The status cannot be determined for some reason.
.. data:: TENTATIVE = 5
The uniqueness of the address on the link is being
verified. Addresses in this state should not be
used for general communication and should only be
used to determine the uniqueness of the address.
.. data:: DUPLICATE = 6
The address has been determined to be non-unique on
the link and so must not be used.
.. data:: OPTIMISTIC = 7
The address is available for use, subject to
restrictions, while its uniqueness on a link is
being verified.
"""
PREFERRED = 0
DEPRECATED = 1
INVALID = 2
INACCESSIBLE = 3
UNKNOWN = 4
TENTATIVE = 5
DUPLICATE = 6
OPTIMISTIC = 7
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.State.StatusEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.origin is not None:
return True
if self.prefix_length is not None:
return True
if self.status is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.State']['meta_info']
class Vrrp(object):
"""
Enclosing container for VRRP groups handled by this
IP interface
.. attribute:: vrrp_group
List of VRRP groups, keyed by virtual router id
**type**\: list of :py:class:`VrrpGroup <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.vrrp_group = YList()
self.vrrp_group.parent = self
self.vrrp_group.name = 'vrrp_group'
class VrrpGroup(object):
"""
List of VRRP groups, keyed by virtual router id
.. attribute:: virtual_router_id <key>
References the configured virtual router id for this VRRP group
**type**\: int
**range:** 1..255
**refers to**\: :py:class:`virtual_router_id <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: config
Configuration data for the VRRP group
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: interface_tracking
Top\-level container for VRRP interface tracking
**type**\: :py:class:`InterfaceTracking <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking>`
.. attribute:: state
Operational state data for the VRRP group
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.virtual_router_id = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.Config()
self.config.parent = self
self.interface_tracking = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking()
self.interface_tracking.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.State()
self.state.parent = self
class Config(object):
"""
Configuration data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_link_local
For VRRP on IPv6 interfaces, sets the virtual link local address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_link_local = None
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_link_local is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.Config']['meta_info']
class State(object):
"""
Operational state data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: current_priority
Operational value of the priority for the interface in the VRRP group
**type**\: int
**range:** 0..255
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_link_local
For VRRP on IPv6 interfaces, sets the virtual link local address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.current_priority = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_link_local = None
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.current_priority is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_link_local is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.State']['meta_info']
class InterfaceTracking(object):
"""
Top\-level container for VRRP interface tracking
.. attribute:: config
Configuration data for VRRP interface tracking
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config>`
.. attribute:: state
Operational state data for VRRP interface tracking
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State()
self.state.parent = self
class Config(object):
"""
Configuration data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config']['meta_info']
class State(object):
"""
Operational state data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:interface-tracking'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.virtual_router_id is None:
raise YPYModelError('Key property virtual_router_id is None')
return self.parent._common_path +'/openconfig-if-ip:vrrp-group[openconfig-if-ip:virtual-router-id = ' + str(self.virtual_router_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.virtual_router_id is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.interface_tracking is not None and self.interface_tracking._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp.VrrpGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:vrrp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrrp_group is not None:
for child_ref in self.vrrp_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address.Vrrp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:address[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vrrp is not None and self.vrrp._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Address']['meta_info']
class Neighbor(object):
"""
A list of mappings from IPv6 addresses to
link\-layer addresses.
Entries in this list are used as static entries in the
Neighbor Cache.
.. attribute:: ip <key>
References the configured IP neighbor address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.Config>`
.. attribute:: config
Configuration data for each IPv6 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.Config>`
.. attribute:: state
State data for each IPv6 address on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.State()
self.state.parent = self
class Config(object):
"""
Configuration data for each IPv6 address on
the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address of the neighbor node
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
[adapted from IETF IP model RFC 7277] The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.Config']['meta_info']
class State(object):
"""
State data for each IPv6 address on the
interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address of the neighbor node
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: is_router
[adapted from IETF IP model RFC 7277] Indicates that the neighbor node acts as a router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: link_layer_address
[adapted from IETF IP model RFC 7277] The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
.. attribute:: neighbor_state
[adapted from IETF IP model RFC 7277] The Neighbor Unreachability Detection state of this entry
**type**\: :py:class:`NeighborStateEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.State.NeighborStateEnum>`
.. attribute:: origin
[adapted from IETF IP model RFC 7277] The origin of this neighbor entry
**type**\: :py:class:`NeighborOriginEnum <ydk.models.openconfig.openconfig_if_ip.NeighborOriginEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.is_router = None
self.link_layer_address = None
self.neighbor_state = None
self.origin = None
class NeighborStateEnum(Enum):
"""
NeighborStateEnum
[adapted from IETF IP model RFC 7277]
The Neighbor Unreachability Detection state of this
entry.
.. data:: INCOMPLETE = 0
Address resolution is in progress, and the link-layer
address of the neighbor has not yet been
determined.
.. data:: REACHABLE = 1
Roughly speaking, the neighbor is known to have been
reachable recently (within tens of seconds ago).
.. data:: STALE = 2
The neighbor is no longer known to be reachable, but
until traffic is sent to the neighbor no attempt
should be made to verify its reachability.
.. data:: DELAY = 3
The neighbor is no longer known to be reachable, and
traffic has recently been sent to the neighbor.
Rather than probe the neighbor immediately, however,
delay sending probes for a short while in order to
give upper-layer protocols a chance to provide
reachability confirmation.
.. data:: PROBE = 4
The neighbor is no longer known to be reachable, and
unicast Neighbor Solicitation probes are being sent
to verify reachability.
"""
INCOMPLETE = 0
REACHABLE = 1
STALE = 2
DELAY = 3
PROBE = 4
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.State.NeighborStateEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.is_router is not None:
return True
if self.link_layer_address is not None:
return True
if self.neighbor_state is not None:
return True
if self.origin is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:neighbor[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Neighbor']['meta_info']
class Config(object):
"""
Top\-level config data for the IPv6 interface
.. attribute:: dup_addr_detect_transmits
[adapted from IETF IP model RFC 7277] The number of consecutive Neighbor Solicitation messages sent while performing Duplicate Address Detection on a tentative address. A value of zero indicates that Duplicate Address Detection is not performed on tentative addresses. A value of one indicates a single transmission with no follow\-up retransmissions
**type**\: int
**range:** 0..4294967295
**default value**\: 1
.. attribute:: enabled
[adapted from IETF IP model RFC 7277] Controls whether IPv6 is enabled or disabled on this interface. When IPv6 is enabled, this interface is connected to an IPv6 stack, and the interface can send and receive IPv6 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
[adapted from IETF IP model RFC 7277] The size, in octets, of the largest IPv6 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 1280..4294967295
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.dup_addr_detect_transmits = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dup_addr_detect_transmits is not None:
return True
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Config']['meta_info']
class State(object):
"""
Top\-level operational state data for the IPv6 interface
.. attribute:: dup_addr_detect_transmits
[adapted from IETF IP model RFC 7277] The number of consecutive Neighbor Solicitation messages sent while performing Duplicate Address Detection on a tentative address. A value of zero indicates that Duplicate Address Detection is not performed on tentative addresses. A value of one indicates a single transmission with no follow\-up retransmissions
**type**\: int
**range:** 0..4294967295
**default value**\: 1
.. attribute:: enabled
[adapted from IETF IP model RFC 7277] Controls whether IPv6 is enabled or disabled on this interface. When IPv6 is enabled, this interface is connected to an IPv6 stack, and the interface can send and receive IPv6 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
[adapted from IETF IP model RFC 7277] The size, in octets, of the largest IPv6 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 1280..4294967295
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.dup_addr_detect_transmits = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.dup_addr_detect_transmits is not None:
return True
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.State']['meta_info']
class Autoconf(object):
"""
Top\-level container for IPv6 autoconf
.. attribute:: config
[adapted from IETF IP model RFC 7277] Parameters to control the autoconfiguration of IPv6 addresses, as described in RFC 4862
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.Config>`
.. attribute:: state
Operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.Config()
self.config.parent = self
self.state = Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.State()
self.state.parent = self
class Config(object):
"""
[adapted from IETF IP model RFC 7277]
Parameters to control the autoconfiguration of IPv6
addresses, as described in RFC 4862.
.. attribute:: create_global_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates global addresses as described in RFC 4862
**type**\: bool
**default value**\: true
.. attribute:: create_temporary_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates temporary addresses as described in RFC 4941
**type**\: bool
**default value**\: false
.. attribute:: temporary_preferred_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is preferred
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 86400
.. attribute:: temporary_valid_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is valid
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 604800
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.create_global_addresses = None
self.create_temporary_addresses = None
self.temporary_preferred_lifetime = None
self.temporary_valid_lifetime = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.create_global_addresses is not None:
return True
if self.create_temporary_addresses is not None:
return True
if self.temporary_preferred_lifetime is not None:
return True
if self.temporary_valid_lifetime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.Config']['meta_info']
class State(object):
"""
Operational state data
.. attribute:: create_global_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates global addresses as described in RFC 4862
**type**\: bool
**default value**\: true
.. attribute:: create_temporary_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates temporary addresses as described in RFC 4941
**type**\: bool
**default value**\: false
.. attribute:: temporary_preferred_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is preferred
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 86400
.. attribute:: temporary_valid_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is valid
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 604800
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.create_global_addresses = None
self.create_temporary_addresses = None
self.temporary_preferred_lifetime = None
self.temporary_valid_lifetime = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.create_global_addresses is not None:
return True
if self.create_temporary_addresses is not None:
return True
if self.temporary_preferred_lifetime is not None:
return True
if self.temporary_valid_lifetime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:autoconf'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6.Autoconf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
if self.autoconf is not None and self.autoconf._has_data():
return True
if self.config is not None and self.config._has_data():
return True
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface.Ipv6']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.index is None:
raise YPYModelError('Key property index is None')
return self.parent._common_path +'/openconfig-interfaces:subinterface[openconfig-interfaces:index = ' + str(self.index) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.index is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vlan is not None and self.vlan._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces.Subinterface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-interfaces:subinterfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.subinterface is not None:
for child_ref in self.subinterface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Subinterfaces']['meta_info']
class Ethernet(object):
"""
Top\-level container for ethernet configuration
and state
.. attribute:: config
Configuration data for ethernet interfaces
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.Config>`
.. attribute:: state
State variables for Ethernet interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.State>`
.. attribute:: vlan
Enclosing container for VLAN interface\-specific data on Ethernet interfaces
**type**\: :py:class:`Vlan <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.Vlan>`
"""
_prefix = 'eth'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Ethernet.Config()
self.config.parent = self
self.state = Interfaces.Interface.Ethernet.State()
self.state.parent = self
self.vlan = Interfaces.Interface.Ethernet.Vlan()
self.vlan.parent = self
class Config(object):
"""
Configuration data for ethernet interfaces
.. attribute:: aggregate_id
Specify the logical aggregate interface to which this interface belongs
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: auto_negotiate
Set to TRUE to request the interface to auto\-negotiate transmission parameters with its peer interface. When set to FALSE, the transmission parameters are specified manually
**type**\: bool
**default value**\: true
.. attribute:: duplex_mode
When auto\-negotiate is TRUE, this optionally sets the duplex mode that will be advertised to the peer. If unspecified, the interface should negotiate the duplex mode directly (typically full\-duplex). When auto\-negotiate is FALSE, this sets the duplex mode on the interface directly
**type**\: :py:class:`DuplexModeEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.Config.DuplexModeEnum>`
.. attribute:: enable_flow_control
Enable or disable flow control for this interface. Ethernet flow control is a mechanism by which a receiver may send PAUSE frames to a sender to stop transmission for a specified time. This setting should override auto\-negotiated flow control settings. If left unspecified, and auto\-negotiate is TRUE, flow control mode is negotiated with the peer interface
**type**\: bool
**default value**\: false
.. attribute:: mac_address
Assigns a MAC address to the Ethernet interface. If not specified, the corresponding operational state leaf is expected to show the system\-assigned MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: port_speed
When auto\-negotiate is TRUE, this optionally sets the port\-speed mode that will be advertised to the peer for negotiation. If unspecified, it is expected that the interface will select the highest speed available based on negotiation. When auto\-negotiate is set to FALSE, sets the link speed to a fixed value \-\- supported values are defined by ethernet\-speed identities
**type**\: :py:class:`EthernetSpeedIdentity <ydk.models.openconfig.openconfig_if_ethernet.EthernetSpeedIdentity>`
"""
_prefix = 'eth'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.aggregate_id = None
self.auto_negotiate = None
self.duplex_mode = None
self.enable_flow_control = None
self.mac_address = None
self.port_speed = None
class DuplexModeEnum(Enum):
"""
DuplexModeEnum
When auto\-negotiate is TRUE, this optionally sets the
duplex mode that will be advertised to the peer. If
unspecified, the interface should negotiate the duplex mode
directly (typically full\-duplex). When auto\-negotiate is
FALSE, this sets the duplex mode on the interface directly.
.. data:: FULL = 0
Full duplex mode
.. data:: HALF = 1
Half duplex mode
"""
FULL = 0
HALF = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.Config.DuplexModeEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ethernet:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.aggregate_id is not None:
return True
if self.auto_negotiate is not None:
return True
if self.duplex_mode is not None:
return True
if self.enable_flow_control is not None:
return True
if self.mac_address is not None:
return True
if self.port_speed is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.Config']['meta_info']
class State(object):
"""
State variables for Ethernet interfaces
.. attribute:: aggregate_id
Specify the logical aggregate interface to which this interface belongs
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: auto_negotiate
Set to TRUE to request the interface to auto\-negotiate transmission parameters with its peer interface. When set to FALSE, the transmission parameters are specified manually
**type**\: bool
**default value**\: true
.. attribute:: counters
Ethernet interface counters
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.State.Counters>`
.. attribute:: duplex_mode
When auto\-negotiate is TRUE, this optionally sets the duplex mode that will be advertised to the peer. If unspecified, the interface should negotiate the duplex mode directly (typically full\-duplex). When auto\-negotiate is FALSE, this sets the duplex mode on the interface directly
**type**\: :py:class:`DuplexModeEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.State.DuplexModeEnum>`
.. attribute:: enable_flow_control
Enable or disable flow control for this interface. Ethernet flow control is a mechanism by which a receiver may send PAUSE frames to a sender to stop transmission for a specified time. This setting should override auto\-negotiated flow control settings. If left unspecified, and auto\-negotiate is TRUE, flow control mode is negotiated with the peer interface
**type**\: bool
**default value**\: false
.. attribute:: hw_mac_address
Represenets the 'burned\-in', or system\-assigned, MAC address for the Ethernet interface
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: mac_address
Assigns a MAC address to the Ethernet interface. If not specified, the corresponding operational state leaf is expected to show the system\-assigned MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: port_speed
When auto\-negotiate is TRUE, this optionally sets the port\-speed mode that will be advertised to the peer for negotiation. If unspecified, it is expected that the interface will select the highest speed available based on negotiation. When auto\-negotiate is set to FALSE, sets the link speed to a fixed value \-\- supported values are defined by ethernet\-speed identities
**type**\: :py:class:`EthernetSpeedIdentity <ydk.models.openconfig.openconfig_if_ethernet.EthernetSpeedIdentity>`
"""
_prefix = 'eth'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.aggregate_id = None
self.auto_negotiate = None
self.counters = Interfaces.Interface.Ethernet.State.Counters()
self.counters.parent = self
self.duplex_mode = None
self.enable_flow_control = None
self.hw_mac_address = None
self.mac_address = None
self.port_speed = None
class DuplexModeEnum(Enum):
"""
DuplexModeEnum
When auto\-negotiate is TRUE, this optionally sets the
duplex mode that will be advertised to the peer. If
unspecified, the interface should negotiate the duplex mode
directly (typically full\-duplex). When auto\-negotiate is
FALSE, this sets the duplex mode on the interface directly.
.. data:: FULL = 0
Full duplex mode
.. data:: HALF = 1
Half duplex mode
"""
FULL = 0
HALF = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.State.DuplexModeEnum']
class Counters(object):
"""
Ethernet interface counters
.. attribute:: in_8021q_frames
Number of 802.1q tagged frames received on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_crc_errors
Number of receive error events due to FCS/CRC check failure
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_fragment_frames
Number of fragment frames received on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_jabber_frames
Number of jabber frames received on the interface. Jabber frames are typically defined as oversize frames which also have a bad CRC. Implementations may use slightly different definitions of what constitutes a jabber frame. Often indicative of a NIC hardware problem
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_mac_control_frames
MAC layer control frames received on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_mac_pause_frames
MAC layer PAUSE frames received on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: in_oversize_frames
Number of oversize frames received on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_8021q_frames
Number of 802.1q tagged frames sent on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_mac_control_frames
MAC layer control frames sent on the interface
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: out_mac_pause_frames
MAC layer PAUSE frames sent on the interface
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'eth'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.in_8021q_frames = None
self.in_crc_errors = None
self.in_fragment_frames = None
self.in_jabber_frames = None
self.in_mac_control_frames = None
self.in_mac_pause_frames = None
self.in_oversize_frames = None
self.out_8021q_frames = None
self.out_mac_control_frames = None
self.out_mac_pause_frames = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ethernet:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.in_8021q_frames is not None:
return True
if self.in_crc_errors is not None:
return True
if self.in_fragment_frames is not None:
return True
if self.in_jabber_frames is not None:
return True
if self.in_mac_control_frames is not None:
return True
if self.in_mac_pause_frames is not None:
return True
if self.in_oversize_frames is not None:
return True
if self.out_8021q_frames is not None:
return True
if self.out_mac_control_frames is not None:
return True
if self.out_mac_pause_frames is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.State.Counters']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ethernet:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.aggregate_id is not None:
return True
if self.auto_negotiate is not None:
return True
if self.counters is not None and self.counters._has_data():
return True
if self.duplex_mode is not None:
return True
if self.enable_flow_control is not None:
return True
if self.hw_mac_address is not None:
return True
if self.mac_address is not None:
return True
if self.port_speed is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.State']['meta_info']
class Vlan(object):
"""
Enclosing container for VLAN interface\-specific
data on Ethernet interfaces
.. attribute:: config
Configuration parameters for VLANs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.Vlan.Config>`
.. attribute:: state
State variables for VLANs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Ethernet.Vlan.State>`
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Ethernet.Vlan.Config()
self.config.parent = self
self.state = Interfaces.Interface.Ethernet.Vlan.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters for VLANs
.. attribute:: access_vlan
Assign the access vlan to the access port
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: interface_mode
Set the interface to access or trunk mode for VLANs
**type**\: :py:class:`VlanModeTypeEnum <ydk.models.openconfig.openconfig_vlan.VlanModeTypeEnum>`
.. attribute:: native_vlan
Set the native VLAN id for untagged frames arriving on a trunk interface. This configuration is only valid on a trunk interface
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: trunk_vlans
Specify VLANs, or ranges thereof, that the interface may carry when in trunk mode. If not specified, all VLANs are allowed on the interface. Ranges are specified in the form x..y, where x<y \- ranges are assumed to be inclusive (such that the VLAN range is x <= range <= y
**type**\: one of the below types:
**type**\: list of int
**range:** 1..4094
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (\\\*\|(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9]))\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.access_vlan = None
self.interface_mode = None
self.native_vlan = None
self.trunk_vlans = YLeafList()
self.trunk_vlans.parent = self
self.trunk_vlans.name = 'trunk_vlans'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.access_vlan is not None:
return True
if self.interface_mode is not None:
return True
if self.native_vlan is not None:
return True
if self.trunk_vlans is not None:
for child in self.trunk_vlans:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.Vlan.Config']['meta_info']
class State(object):
"""
State variables for VLANs
.. attribute:: access_vlan
Assign the access vlan to the access port
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: interface_mode
Set the interface to access or trunk mode for VLANs
**type**\: :py:class:`VlanModeTypeEnum <ydk.models.openconfig.openconfig_vlan.VlanModeTypeEnum>`
.. attribute:: native_vlan
Set the native VLAN id for untagged frames arriving on a trunk interface. This configuration is only valid on a trunk interface
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: trunk_vlans
Specify VLANs, or ranges thereof, that the interface may carry when in trunk mode. If not specified, all VLANs are allowed on the interface. Ranges are specified in the form x..y, where x<y \- ranges are assumed to be inclusive (such that the VLAN range is x <= range <= y
**type**\: one of the below types:
**type**\: list of int
**range:** 1..4094
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (\\\*\|(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9]))\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.access_vlan = None
self.interface_mode = None
self.native_vlan = None
self.trunk_vlans = YLeafList()
self.trunk_vlans.parent = self
self.trunk_vlans.name = 'trunk_vlans'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.access_vlan is not None:
return True
if self.interface_mode is not None:
return True
if self.native_vlan is not None:
return True
if self.trunk_vlans is not None:
for child in self.trunk_vlans:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.Vlan.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:vlan'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet.Vlan']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ethernet:ethernet'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vlan is not None and self.vlan._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Ethernet']['meta_info']
class Aggregation(object):
"""
Options for logical interfaces representing
aggregates
.. attribute:: config
Configuration variables for logical aggregate / LAG interfaces
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Config>`
.. attribute:: lacp
Configuration for LACP protocol operation on the aggregate interface
**type**\: :py:class:`Lacp <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp>`
**presence node**\: True
.. attribute:: state
Operational state variables for logical aggregate / LAG interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.State>`
.. attribute:: vlan
Enclosing container for VLAN interface\-specific data on Ethernet interfaces
**type**\: :py:class:`Vlan <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Vlan>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.config = Interfaces.Interface.Aggregation.Config()
self.config.parent = self
self.lacp = None
self.state = Interfaces.Interface.Aggregation.State()
self.state.parent = self
self.vlan = Interfaces.Interface.Aggregation.Vlan()
self.vlan.parent = self
class Config(object):
"""
Configuration variables for logical aggregate /
LAG interfaces
.. attribute:: lag_type
Sets the type of LAG, i.e., how it is configured / maintained
**type**\: :py:class:`AggregationTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.AggregationTypeEnum>`
.. attribute:: min_links
Specifies the mininum number of member interfaces that must be active for the aggregate interface to be available
**type**\: int
**range:** 0..65535
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.lag_type = None
self.min_links = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.lag_type is not None:
return True
if self.min_links is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Config']['meta_info']
class State(object):
"""
Operational state variables for logical
aggregate / LAG interfaces
.. attribute:: lag_type
Sets the type of LAG, i.e., how it is configured / maintained
**type**\: :py:class:`AggregationTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.AggregationTypeEnum>`
.. attribute:: members
List of current member interfaces for the aggregate, expressed as references to existing interfaces
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: min_links
Specifies the mininum number of member interfaces that must be active for the aggregate interface to be available
**type**\: int
**range:** 0..65535
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.lag_type = None
self.members = YLeafList()
self.members.parent = self
self.members.name = 'members'
self.min_links = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.lag_type is not None:
return True
if self.members is not None:
for child in self.members:
if child is not None:
return True
if self.min_links is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.State']['meta_info']
class Lacp(object):
"""
Configuration for LACP protocol operation on the
aggregate interface
.. attribute:: config
Configuration data for LACP
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Config>`
.. attribute:: members
Enclosing container for the list of members interfaces of the aggregate. This list is considered operational state only so is labeled config false and has no config container
**type**\: :py:class:`Members <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Members>`
.. attribute:: state
Operational state data for LACP
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.State>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.config = Interfaces.Interface.Aggregation.Lacp.Config()
self.config.parent = self
self.members = Interfaces.Interface.Aggregation.Lacp.Members()
self.members.parent = self
self.state = Interfaces.Interface.Aggregation.Lacp.State()
self.state.parent = self
class Config(object):
"""
Configuration data for LACP
.. attribute:: interval
Set the period between LACP messages \-\- uses the lacp\-period\-type enumeration
**type**\: :py:class:`LacpPeriodTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpPeriodTypeEnum>`
**default value**\: SLOW
.. attribute:: lacp_mode
ACTIVE is to initiate the transmission of LACP packets. PASSIVE is to wait for peer to initiate the transmission of LACP packets
**type**\: :py:class:`LacpActivityTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpActivityTypeEnum>`
**default value**\: ACTIVE
.. attribute:: system_id_mac
The MAC address portion of the node's System ID. This is combined with the system priority to construct the 8\-octet system\-id
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: system_priority
Sytem priority used by the node on this LAG interface. Lower value is higher priority for determining which node is the controlling system
**type**\: int
**range:** 0..65535
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.interval = None
self.lacp_mode = None
self.system_id_mac = None
self.system_priority = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interval is not None:
return True
if self.lacp_mode is not None:
return True
if self.system_id_mac is not None:
return True
if self.system_priority is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.Config']['meta_info']
class State(object):
"""
Operational state data for LACP
.. attribute:: interval
Set the period between LACP messages \-\- uses the lacp\-period\-type enumeration
**type**\: :py:class:`LacpPeriodTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpPeriodTypeEnum>`
**default value**\: SLOW
.. attribute:: lacp_mode
ACTIVE is to initiate the transmission of LACP packets. PASSIVE is to wait for peer to initiate the transmission of LACP packets
**type**\: :py:class:`LacpActivityTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpActivityTypeEnum>`
**default value**\: ACTIVE
.. attribute:: system_id_mac
The MAC address portion of the node's System ID. This is combined with the system priority to construct the 8\-octet system\-id
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: system_priority
Sytem priority used by the node on this LAG interface. Lower value is higher priority for determining which node is the controlling system
**type**\: int
**range:** 0..65535
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.interval = None
self.lacp_mode = None
self.system_id_mac = None
self.system_priority = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interval is not None:
return True
if self.lacp_mode is not None:
return True
if self.system_id_mac is not None:
return True
if self.system_priority is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.State']['meta_info']
class Members(object):
"""
Enclosing container for the list of members interfaces of
the aggregate. This list is considered operational state
only so is labeled config false and has no config container
.. attribute:: member
List of member interfaces and their associated status for a LACP\-controlled aggregate interface. Member list is not configurable here \-\- each interface indicates items its participation in the LAG
**type**\: list of :py:class:`Member <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Members.Member>`
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.member = YList()
self.member.parent = self
self.member.name = 'member'
class Member(object):
"""
List of member interfaces and their associated status for
a LACP\-controlled aggregate interface. Member list is not
configurable here \-\- each interface indicates items
its participation in the LAG.
.. attribute:: interface <key>
Reference to aggregate member interface
**type**\: str
**refers to**\: :py:class:`interface <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Members.Member.State>`
.. attribute:: state
Operational state data for aggregate members
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Members.Member.State>`
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.interface = None
self.state = Interfaces.Interface.Aggregation.Lacp.Members.Member.State()
self.state.parent = self
class State(object):
"""
Operational state data for aggregate members
.. attribute:: activity
Indicates participant is active or passive
**type**\: :py:class:`LacpActivityTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpActivityTypeEnum>`
.. attribute:: aggregatable
A true value indicates that the participant will allow the link to be used as part of the aggregate. A false value indicates the link should be used as an individual link
**type**\: bool
.. attribute:: collecting
If true, the participant is collecting incoming frames on the link, otherwise false
**type**\: bool
.. attribute:: counters
LACP protocol counters
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Lacp.Members.Member.State.Counters>`
.. attribute:: distributing
When true, the participant is distributing outgoing frames; when false, distribution is disabled
**type**\: bool
.. attribute:: interface
Interface member of the LACP aggregate
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: oper_key
Current operational value of the key for the aggregate interface
**type**\: int
**range:** 0..65535
.. attribute:: partner_id
MAC address representing the protocol partner's interface system ID
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: partner_key
Operational value of the protocol partner's key
**type**\: int
**range:** 0..65535
.. attribute:: synchronization
Indicates whether the participant is in\-sync or out\-of\-sync
**type**\: :py:class:`LacpSynchronizationTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpSynchronizationTypeEnum>`
.. attribute:: system_id
MAC address that defines the local system ID for the aggregate interface
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: timeout
The timeout type (short or long) used by the participant
**type**\: :py:class:`LacpTimeoutTypeEnum <ydk.models.openconfig.openconfig_if_aggregate.LacpTimeoutTypeEnum>`
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.activity = None
self.aggregatable = None
self.collecting = None
self.counters = Interfaces.Interface.Aggregation.Lacp.Members.Member.State.Counters()
self.counters.parent = self
self.distributing = None
self.interface = None
self.oper_key = None
self.partner_id = None
self.partner_key = None
self.synchronization = None
self.system_id = None
self.timeout = None
class Counters(object):
"""
LACP protocol counters
.. attribute:: lacp_errors
Number of LACPDU illegal packet errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: lacp_in_pkts
Number of LACPDUs received
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: lacp_out_pkts
Number of LACPDUs transmitted
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: lacp_rx_errors
Number of LACPDU receive packet errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: lacp_tx_errors
Number of LACPDU transmit packet errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: lacp_unknown_errors
Number of LACPDU unknown packet errors
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'lag'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.lacp_errors = None
self.lacp_in_pkts = None
self.lacp_out_pkts = None
self.lacp_rx_errors = None
self.lacp_tx_errors = None
self.lacp_unknown_errors = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.lacp_errors is not None:
return True
if self.lacp_in_pkts is not None:
return True
if self.lacp_out_pkts is not None:
return True
if self.lacp_rx_errors is not None:
return True
if self.lacp_tx_errors is not None:
return True
if self.lacp_unknown_errors is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.Members.Member.State.Counters']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.activity is not None:
return True
if self.aggregatable is not None:
return True
if self.collecting is not None:
return True
if self.counters is not None and self.counters._has_data():
return True
if self.distributing is not None:
return True
if self.interface is not None:
return True
if self.oper_key is not None:
return True
if self.partner_id is not None:
return True
if self.partner_key is not None:
return True
if self.synchronization is not None:
return True
if self.system_id is not None:
return True
if self.timeout is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.Members.Member.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface is None:
raise YPYModelError('Key property interface is None')
return self.parent._common_path +'/openconfig-if-aggregate:member[openconfig-if-aggregate:interface = ' + str(self.interface) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.Members.Member']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:members'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.member is not None:
for child_ref in self.member:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp.Members']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:lacp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.config is not None and self.config._has_data():
return True
if self.members is not None and self.members._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Lacp']['meta_info']
class Vlan(object):
"""
Enclosing container for VLAN interface\-specific
data on Ethernet interfaces
.. attribute:: config
Configuration parameters for VLANs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Vlan.Config>`
.. attribute:: state
State variables for VLANs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.Aggregation.Vlan.State>`
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.Aggregation.Vlan.Config()
self.config.parent = self
self.state = Interfaces.Interface.Aggregation.Vlan.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters for VLANs
.. attribute:: access_vlan
Assign the access vlan to the access port
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: interface_mode
Set the interface to access or trunk mode for VLANs
**type**\: :py:class:`VlanModeTypeEnum <ydk.models.openconfig.openconfig_vlan.VlanModeTypeEnum>`
.. attribute:: native_vlan
Set the native VLAN id for untagged frames arriving on a trunk interface. This configuration is only valid on a trunk interface
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: trunk_vlans
Specify VLANs, or ranges thereof, that the interface may carry when in trunk mode. If not specified, all VLANs are allowed on the interface. Ranges are specified in the form x..y, where x<y \- ranges are assumed to be inclusive (such that the VLAN range is x <= range <= y
**type**\: one of the below types:
**type**\: list of int
**range:** 1..4094
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (\\\*\|(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9]))\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.access_vlan = None
self.interface_mode = None
self.native_vlan = None
self.trunk_vlans = YLeafList()
self.trunk_vlans.parent = self
self.trunk_vlans.name = 'trunk_vlans'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.access_vlan is not None:
return True
if self.interface_mode is not None:
return True
if self.native_vlan is not None:
return True
if self.trunk_vlans is not None:
for child in self.trunk_vlans:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Vlan.Config']['meta_info']
class State(object):
"""
State variables for VLANs
.. attribute:: access_vlan
Assign the access vlan to the access port
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: interface_mode
Set the interface to access or trunk mode for VLANs
**type**\: :py:class:`VlanModeTypeEnum <ydk.models.openconfig.openconfig_vlan.VlanModeTypeEnum>`
.. attribute:: native_vlan
Set the native VLAN id for untagged frames arriving on a trunk interface. This configuration is only valid on a trunk interface
**type**\: one of the below types:
**type**\: int
**range:** 1..4094
----
**type**\: str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
.. attribute:: trunk_vlans
Specify VLANs, or ranges thereof, that the interface may carry when in trunk mode. If not specified, all VLANs are allowed on the interface. Ranges are specified in the form x..y, where x<y \- ranges are assumed to be inclusive (such that the VLAN range is x <= range <= y
**type**\: one of the below types:
**type**\: list of int
**range:** 1..4094
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.((409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\|\\\*)
----
**type**\: list of str
**pattern:** (\\\*\|(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9]))\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])\\.\\.(409[0\-4]\|40[0\-8][0\-9]\|[1\-3][0\-9]{3}\|[1\-9][0\-9]{1,2}\|[1\-9])
----
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.access_vlan = None
self.interface_mode = None
self.native_vlan = None
self.trunk_vlans = YLeafList()
self.trunk_vlans.parent = self
self.trunk_vlans.name = 'trunk_vlans'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.access_vlan is not None:
return True
if self.interface_mode is not None:
return True
if self.native_vlan is not None:
return True
if self.trunk_vlans is not None:
for child in self.trunk_vlans:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Vlan.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:vlan'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation.Vlan']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-aggregate:aggregation'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.config is not None and self.config._has_data():
return True
if self.lacp is not None and self.lacp._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vlan is not None and self.vlan._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.Aggregation']['meta_info']
class RoutedVlan(object):
"""
Top\-level container for routed vlan interfaces. These
logical interfaces are also known as SVI (switched virtual
interface), IRB (integrated routing and bridging), RVI
(routed VLAN interface)
.. attribute:: config
Configuration data for routed vlan interfaces
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Config>`
.. attribute:: ipv4
Parameters for the IPv4 address family
**type**\: :py:class:`Ipv4 <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4>`
**presence node**\: True
.. attribute:: ipv6
Parameters for the IPv6 address family
**type**\: :py:class:`Ipv6 <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6>`
**presence node**\: True
.. attribute:: state
Operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.State>`
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.RoutedVlan.Config()
self.config.parent = self
self.ipv4 = None
self.ipv6 = None
self.state = Interfaces.Interface.RoutedVlan.State()
self.state.parent = self
class Config(object):
"""
Configuration data for routed vlan interfaces
.. attribute:: vlan
References the VLAN for which this IP interface provides routing services \-\- similar to a switch virtual interface (SVI), or integrated routing and bridging interface (IRB) in some implementations
**type**\: one of the below types:
**type**\: int
**range:** 0..65535
----
**type**\: str
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.vlan = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vlan is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Config']['meta_info']
class State(object):
"""
Operational state data
.. attribute:: vlan
References the VLAN for which this IP interface provides routing services \-\- similar to a switch virtual interface (SVI), or integrated routing and bridging interface (IRB) in some implementations
**type**\: one of the below types:
**type**\: int
**range:** 0..65535
----
**type**\: str
----
"""
_prefix = 'vlan'
_revision = '2015-10-09'
def __init__(self):
self.parent = None
self.vlan = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.vlan is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.State']['meta_info']
class Ipv4(object):
"""
Parameters for the IPv4 address family.
.. attribute:: address
The list of configured IPv4 addresses on the interface
**type**\: list of :py:class:`Address <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address>`
.. attribute:: config
Top\-level IPv4 configuration data for the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Config>`
.. attribute:: neighbor
A list of mappings from IPv4 addresses to link\-layer addresses. Entries in this list are used as static entries in the ARP Cache
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Neighbor>`
.. attribute:: state
Top level IPv4 operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.State>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.address = YList()
self.address.parent = self
self.address.name = 'address'
self.config = Interfaces.Interface.RoutedVlan.Ipv4.Config()
self.config.parent = self
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
self.state = Interfaces.Interface.RoutedVlan.Ipv4.State()
self.state.parent = self
class Address(object):
"""
The list of configured IPv4 addresses on the interface.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Config>`
.. attribute:: config
Configuration data for each configured IPv4 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Config>`
.. attribute:: state
Operational state data for each IPv4 address configured on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.State>`
.. attribute:: vrrp
Enclosing container for VRRP groups handled by this IP interface
**type**\: :py:class:`Vrrp <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.RoutedVlan.Ipv4.Address.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv4.Address.State()
self.state.parent = self
self.vrrp = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp()
self.vrrp.parent = self
class Config(object):
"""
Configuration data for each configured IPv4
address on the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv4 address on the interface
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..32
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Config']['meta_info']
class State(object):
"""
Operational state data for each IPv4 address
configured on the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv4 address on the interface
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: origin
The origin of this address, e.g., statically configured, assigned by DHCP, etc.
**type**\: :py:class:`IpAddressOriginEnum <ydk.models.openconfig.openconfig_if_ip.IpAddressOriginEnum>`
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..32
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.origin = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.origin is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.State']['meta_info']
class Vrrp(object):
"""
Enclosing container for VRRP groups handled by this
IP interface
.. attribute:: vrrp_group
List of VRRP groups, keyed by virtual router id
**type**\: list of :py:class:`VrrpGroup <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.vrrp_group = YList()
self.vrrp_group.parent = self
self.vrrp_group.name = 'vrrp_group'
class VrrpGroup(object):
"""
List of VRRP groups, keyed by virtual router id
.. attribute:: virtual_router_id <key>
References the configured virtual router id for this VRRP group
**type**\: int
**range:** 1..255
**refers to**\: :py:class:`virtual_router_id <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: config
Configuration data for the VRRP group
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: interface_tracking
Top\-level container for VRRP interface tracking
**type**\: :py:class:`InterfaceTracking <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking>`
.. attribute:: state
Operational state data for the VRRP group
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.virtual_router_id = None
self.config = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.Config()
self.config.parent = self
self.interface_tracking = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking()
self.interface_tracking.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.State()
self.state.parent = self
class Config(object):
"""
Configuration data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.Config']['meta_info']
class State(object):
"""
Operational state data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: current_priority
Operational value of the priority for the interface in the VRRP group
**type**\: int
**range:** 0..255
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.current_priority = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.current_priority is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.State']['meta_info']
class InterfaceTracking(object):
"""
Top\-level container for VRRP interface tracking
.. attribute:: config
Configuration data for VRRP interface tracking
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config>`
.. attribute:: state
Operational state data for VRRP interface tracking
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State()
self.state.parent = self
class Config(object):
"""
Configuration data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.Config']['meta_info']
class State(object):
"""
Operational state data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:interface-tracking'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup.InterfaceTracking']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.virtual_router_id is None:
raise YPYModelError('Key property virtual_router_id is None')
return self.parent._common_path +'/openconfig-if-ip:vrrp-group[openconfig-if-ip:virtual-router-id = ' + str(self.virtual_router_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.virtual_router_id is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.interface_tracking is not None and self.interface_tracking._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp.VrrpGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:vrrp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrrp_group is not None:
for child_ref in self.vrrp_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address.Vrrp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:address[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vrrp is not None and self.vrrp._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Address']['meta_info']
class Neighbor(object):
"""
A list of mappings from IPv4 addresses to
link\-layer addresses.
Entries in this list are used as static entries in the
ARP Cache.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.Config>`
.. attribute:: config
Configuration data for each configured IPv4 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.Config>`
.. attribute:: state
Operational state data for each IPv4 address configured on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.State()
self.state.parent = self
class Config(object):
"""
Configuration data for each configured IPv4
address on the interface
.. attribute:: ip
The IPv4 address of the neighbor node
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.Config']['meta_info']
class State(object):
"""
Operational state data for each IPv4 address
configured on the interface
.. attribute:: ip
The IPv4 address of the neighbor node
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
.. attribute:: origin
The origin of this neighbor entry, static or dynamic
**type**\: :py:class:`NeighborOriginEnum <ydk.models.openconfig.openconfig_if_ip.NeighborOriginEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
self.origin = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
if self.origin is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Neighbor.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:neighbor[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Neighbor']['meta_info']
class Config(object):
"""
Top\-level IPv4 configuration data for the interface
.. attribute:: enabled
Controls whether IPv4 is enabled or disabled on this interface. When IPv4 is enabled, this interface is connected to an IPv4 stack, and the interface can send and receive IPv4 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
The size, in octets, of the largest IPv4 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 68..65535
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.Config']['meta_info']
class State(object):
"""
Top level IPv4 operational state data
.. attribute:: enabled
Controls whether IPv4 is enabled or disabled on this interface. When IPv4 is enabled, this interface is connected to an IPv4 stack, and the interface can send and receive IPv4 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
The size, in octets, of the largest IPv4 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 68..65535
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
if self.config is not None and self.config._has_data():
return True
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv4']['meta_info']
class Ipv6(object):
"""
Parameters for the IPv6 address family.
.. attribute:: address
The list of configured IPv6 addresses on the interface
**type**\: list of :py:class:`Address <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address>`
.. attribute:: autoconf
Top\-level container for IPv6 autoconf
**type**\: :py:class:`Autoconf <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Autoconf>`
.. attribute:: config
Top\-level config data for the IPv6 interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Config>`
.. attribute:: neighbor
A list of mappings from IPv6 addresses to link\-layer addresses. Entries in this list are used as static entries in the Neighbor Cache
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Neighbor>`
.. attribute:: state
Top\-level operational state data for the IPv6 interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.State>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self._is_presence = True
self.address = YList()
self.address.parent = self
self.address.name = 'address'
self.autoconf = Interfaces.Interface.RoutedVlan.Ipv6.Autoconf()
self.autoconf.parent = self
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Config()
self.config.parent = self
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
self.state = Interfaces.Interface.RoutedVlan.Ipv6.State()
self.state.parent = self
class Address(object):
"""
The list of configured IPv6 addresses on the interface.
.. attribute:: ip <key>
References the configured IP address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Config>`
.. attribute:: config
Configuration data for each IPv6 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Config>`
.. attribute:: state
State data for each IPv6 address on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.State>`
.. attribute:: vrrp
Enclosing container for VRRP groups handled by this IP interface
**type**\: :py:class:`Vrrp <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Address.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv6.Address.State()
self.state.parent = self
self.vrrp = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp()
self.vrrp.parent = self
class Config(object):
"""
Configuration data for each IPv6 address on
the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address on the interface
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..128
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.prefix_length = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.prefix_length is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Config']['meta_info']
class State(object):
"""
State data for each IPv6 address on the
interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address on the interface
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: origin
[adapted from IETF IP model RFC 7277] The origin of this address, e.g., static, dhcp, etc
**type**\: :py:class:`IpAddressOriginEnum <ydk.models.openconfig.openconfig_if_ip.IpAddressOriginEnum>`
.. attribute:: prefix_length
[adapted from IETF IP model RFC 7277] The length of the subnet prefix
**type**\: int
**range:** 0..128
**mandatory**\: True
.. attribute:: status
[adapted from IETF IP model RFC 7277] The status of an address. Most of the states correspond to states from the IPv6 Stateless Address Autoconfiguration protocol
**type**\: :py:class:`StatusEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.State.StatusEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.origin = None
self.prefix_length = None
self.status = None
class StatusEnum(Enum):
"""
StatusEnum
[adapted from IETF IP model RFC 7277]
The status of an address. Most of the states correspond
to states from the IPv6 Stateless Address
Autoconfiguration protocol.
.. data:: PREFERRED = 0
This is a valid address that can appear as the
destination or source address of a packet.
.. data:: DEPRECATED = 1
This is a valid but deprecated address that should
no longer be used as a source address in new
communications, but packets addressed to such an
address are processed as expected.
.. data:: INVALID = 2
This isn't a valid address, and it shouldn't appear
as the destination or source address of a packet.
.. data:: INACCESSIBLE = 3
The address is not accessible because the interface
to which this address is assigned is not
operational.
.. data:: UNKNOWN = 4
The status cannot be determined for some reason.
.. data:: TENTATIVE = 5
The uniqueness of the address on the link is being
verified. Addresses in this state should not be
used for general communication and should only be
used to determine the uniqueness of the address.
.. data:: DUPLICATE = 6
The address has been determined to be non-unique on
the link and so must not be used.
.. data:: OPTIMISTIC = 7
The address is available for use, subject to
restrictions, while its uniqueness on a link is
being verified.
"""
PREFERRED = 0
DEPRECATED = 1
INVALID = 2
INACCESSIBLE = 3
UNKNOWN = 4
TENTATIVE = 5
DUPLICATE = 6
OPTIMISTIC = 7
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.State.StatusEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.origin is not None:
return True
if self.prefix_length is not None:
return True
if self.status is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.State']['meta_info']
class Vrrp(object):
"""
Enclosing container for VRRP groups handled by this
IP interface
.. attribute:: vrrp_group
List of VRRP groups, keyed by virtual router id
**type**\: list of :py:class:`VrrpGroup <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.vrrp_group = YList()
self.vrrp_group.parent = self
self.vrrp_group.name = 'vrrp_group'
class VrrpGroup(object):
"""
List of VRRP groups, keyed by virtual router id
.. attribute:: virtual_router_id <key>
References the configured virtual router id for this VRRP group
**type**\: int
**range:** 1..255
**refers to**\: :py:class:`virtual_router_id <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: config
Configuration data for the VRRP group
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.Config>`
.. attribute:: interface_tracking
Top\-level container for VRRP interface tracking
**type**\: :py:class:`InterfaceTracking <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking>`
.. attribute:: state
Operational state data for the VRRP group
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.virtual_router_id = None
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.Config()
self.config.parent = self
self.interface_tracking = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking()
self.interface_tracking.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.State()
self.state.parent = self
class Config(object):
"""
Configuration data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_link_local
For VRRP on IPv6 interfaces, sets the virtual link local address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_link_local = None
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_link_local is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.Config']['meta_info']
class State(object):
"""
Operational state data for the VRRP group
.. attribute:: accept_mode
Configure whether packets destined for virtual addresses are accepted even when the virtual address is not owned by the router interface
**type**\: bool
**default value**\: false
.. attribute:: advertisement_interval
Sets the interval between successive VRRP advertisements \-\- RFC 5798 defines this as a 12\-bit value expressed as 0.1 seconds, with default 100, i.e., 1 second. Several implementation express this in units of seconds
**type**\: int
**range:** 1..4095
**units**\: centiseconds
**default value**\: 100
.. attribute:: current_priority
Operational value of the priority for the interface in the VRRP group
**type**\: int
**range:** 0..255
.. attribute:: preempt
When set to true, enables preemption by a higher priority backup router of a lower priority master router
**type**\: bool
**default value**\: true
.. attribute:: preempt_delay
Set the delay the higher priority router waits before preempting
**type**\: int
**range:** 0..3600
**default value**\: 0
.. attribute:: priority
Specifies the sending VRRP interface's priority for the virtual router. Higher values equal higher priority
**type**\: int
**range:** 1..254
**default value**\: 100
.. attribute:: virtual_address
Configure one or more virtual addresses for the VRRP group
**type**\: one of the below types:
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: list of str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_link_local
For VRRP on IPv6 interfaces, sets the virtual link local address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: virtual_router_id
Set the virtual router id for use by the VRRP group. This usually also determines the virtual MAC address that is generated for the VRRP group
**type**\: int
**range:** 1..255
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.accept_mode = None
self.advertisement_interval = None
self.current_priority = None
self.preempt = None
self.preempt_delay = None
self.priority = None
self.virtual_address = YLeafList()
self.virtual_address.parent = self
self.virtual_address.name = 'virtual_address'
self.virtual_link_local = None
self.virtual_router_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.accept_mode is not None:
return True
if self.advertisement_interval is not None:
return True
if self.current_priority is not None:
return True
if self.preempt is not None:
return True
if self.preempt_delay is not None:
return True
if self.priority is not None:
return True
if self.virtual_address is not None:
for child in self.virtual_address:
if child is not None:
return True
if self.virtual_link_local is not None:
return True
if self.virtual_router_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.State']['meta_info']
class InterfaceTracking(object):
"""
Top\-level container for VRRP interface tracking
.. attribute:: config
Configuration data for VRRP interface tracking
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config>`
.. attribute:: state
Operational state data for VRRP interface tracking
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State()
self.state.parent = self
class Config(object):
"""
Configuration data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.Config']['meta_info']
class State(object):
"""
Operational state data for VRRP interface tracking
.. attribute:: priority_decrement
Set the value to subtract from priority when the tracked interface goes down
**type**\: int
**range:** 0..254
**default value**\: 0
.. attribute:: track_interface
Sets an interface that should be tracked for up/down events to dynamically change the priority state of the VRRP group, and potentially change the mastership if the tracked interface going down lowers the priority sufficiently
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.priority_decrement = None
self.track_interface = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.priority_decrement is not None:
return True
if self.track_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:interface-tracking'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup.InterfaceTracking']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.virtual_router_id is None:
raise YPYModelError('Key property virtual_router_id is None')
return self.parent._common_path +'/openconfig-if-ip:vrrp-group[openconfig-if-ip:virtual-router-id = ' + str(self.virtual_router_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.virtual_router_id is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.interface_tracking is not None and self.interface_tracking._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp.VrrpGroup']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:vrrp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrrp_group is not None:
for child_ref in self.vrrp_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address.Vrrp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:address[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.vrrp is not None and self.vrrp._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Address']['meta_info']
class Neighbor(object):
"""
A list of mappings from IPv6 addresses to
link\-layer addresses.
Entries in this list are used as static entries in the
Neighbor Cache.
.. attribute:: ip <key>
References the configured IP neighbor address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**refers to**\: :py:class:`ip <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.Config>`
.. attribute:: config
Configuration data for each IPv6 address on the interface
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.Config>`
.. attribute:: state
State data for each IPv6 address on the interface
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.State()
self.state.parent = self
class Config(object):
"""
Configuration data for each IPv6 address on
the interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address of the neighbor node
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: link_layer_address
[adapted from IETF IP model RFC 7277] The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.link_layer_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.link_layer_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.Config']['meta_info']
class State(object):
"""
State data for each IPv6 address on the
interface
.. attribute:: ip
[adapted from IETF IP model RFC 7277] The IPv6 address of the neighbor node
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: is_router
[adapted from IETF IP model RFC 7277] Indicates that the neighbor node acts as a router
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: link_layer_address
[adapted from IETF IP model RFC 7277] The link\-layer address of the neighbor node
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**mandatory**\: True
.. attribute:: neighbor_state
[adapted from IETF IP model RFC 7277] The Neighbor Unreachability Detection state of this entry
**type**\: :py:class:`NeighborStateEnum <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.State.NeighborStateEnum>`
.. attribute:: origin
[adapted from IETF IP model RFC 7277] The origin of this neighbor entry
**type**\: :py:class:`NeighborOriginEnum <ydk.models.openconfig.openconfig_if_ip.NeighborOriginEnum>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.ip = None
self.is_router = None
self.link_layer_address = None
self.neighbor_state = None
self.origin = None
class NeighborStateEnum(Enum):
"""
NeighborStateEnum
[adapted from IETF IP model RFC 7277]
The Neighbor Unreachability Detection state of this
entry.
.. data:: INCOMPLETE = 0
Address resolution is in progress, and the link-layer
address of the neighbor has not yet been
determined.
.. data:: REACHABLE = 1
Roughly speaking, the neighbor is known to have been
reachable recently (within tens of seconds ago).
.. data:: STALE = 2
The neighbor is no longer known to be reachable, but
until traffic is sent to the neighbor no attempt
should be made to verify its reachability.
.. data:: DELAY = 3
The neighbor is no longer known to be reachable, and
traffic has recently been sent to the neighbor.
Rather than probe the neighbor immediately, however,
delay sending probes for a short while in order to
give upper-layer protocols a chance to provide
reachability confirmation.
.. data:: PROBE = 4
The neighbor is no longer known to be reachable, and
unicast Neighbor Solicitation probes are being sent
to verify reachability.
"""
INCOMPLETE = 0
REACHABLE = 1
STALE = 2
DELAY = 3
PROBE = 4
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.State.NeighborStateEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.is_router is not None:
return True
if self.link_layer_address is not None:
return True
if self.neighbor_state is not None:
return True
if self.origin is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Neighbor.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.ip is None:
raise YPYModelError('Key property ip is None')
return self.parent._common_path +'/openconfig-if-ip:neighbor[openconfig-if-ip:ip = ' + str(self.ip) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.ip is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Neighbor']['meta_info']
class Config(object):
"""
Top\-level config data for the IPv6 interface
.. attribute:: dup_addr_detect_transmits
[adapted from IETF IP model RFC 7277] The number of consecutive Neighbor Solicitation messages sent while performing Duplicate Address Detection on a tentative address. A value of zero indicates that Duplicate Address Detection is not performed on tentative addresses. A value of one indicates a single transmission with no follow\-up retransmissions
**type**\: int
**range:** 0..4294967295
**default value**\: 1
.. attribute:: enabled
[adapted from IETF IP model RFC 7277] Controls whether IPv6 is enabled or disabled on this interface. When IPv6 is enabled, this interface is connected to an IPv6 stack, and the interface can send and receive IPv6 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
[adapted from IETF IP model RFC 7277] The size, in octets, of the largest IPv6 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 1280..4294967295
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.dup_addr_detect_transmits = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.dup_addr_detect_transmits is not None:
return True
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Config']['meta_info']
class State(object):
"""
Top\-level operational state data for the IPv6 interface
.. attribute:: dup_addr_detect_transmits
[adapted from IETF IP model RFC 7277] The number of consecutive Neighbor Solicitation messages sent while performing Duplicate Address Detection on a tentative address. A value of zero indicates that Duplicate Address Detection is not performed on tentative addresses. A value of one indicates a single transmission with no follow\-up retransmissions
**type**\: int
**range:** 0..4294967295
**default value**\: 1
.. attribute:: enabled
[adapted from IETF IP model RFC 7277] Controls whether IPv6 is enabled or disabled on this interface. When IPv6 is enabled, this interface is connected to an IPv6 stack, and the interface can send and receive IPv6 packets
**type**\: bool
**default value**\: true
.. attribute:: mtu
[adapted from IETF IP model RFC 7277] The size, in octets, of the largest IPv6 packet that the interface will send and receive. The server may restrict the allowed values for this leaf, depending on the interface's type. If this leaf is not configured, the operationally used MTU depends on the interface's type
**type**\: int
**range:** 1280..4294967295
**units**\: octets
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.dup_addr_detect_transmits = None
self.enabled = None
self.mtu = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.dup_addr_detect_transmits is not None:
return True
if self.enabled is not None:
return True
if self.mtu is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.State']['meta_info']
class Autoconf(object):
"""
Top\-level container for IPv6 autoconf
.. attribute:: config
[adapted from IETF IP model RFC 7277] Parameters to control the autoconfiguration of IPv6 addresses, as described in RFC 4862
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.Config>`
.. attribute:: state
Operational state data
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.State>`
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.config = Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.Config()
self.config.parent = self
self.state = Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.State()
self.state.parent = self
class Config(object):
"""
[adapted from IETF IP model RFC 7277]
Parameters to control the autoconfiguration of IPv6
addresses, as described in RFC 4862.
.. attribute:: create_global_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates global addresses as described in RFC 4862
**type**\: bool
**default value**\: true
.. attribute:: create_temporary_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates temporary addresses as described in RFC 4941
**type**\: bool
**default value**\: false
.. attribute:: temporary_preferred_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is preferred
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 86400
.. attribute:: temporary_valid_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is valid
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 604800
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.create_global_addresses = None
self.create_temporary_addresses = None
self.temporary_preferred_lifetime = None
self.temporary_valid_lifetime = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.create_global_addresses is not None:
return True
if self.create_temporary_addresses is not None:
return True
if self.temporary_preferred_lifetime is not None:
return True
if self.temporary_valid_lifetime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.Config']['meta_info']
class State(object):
"""
Operational state data
.. attribute:: create_global_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates global addresses as described in RFC 4862
**type**\: bool
**default value**\: true
.. attribute:: create_temporary_addresses
[adapted from IETF IP model RFC 7277] If enabled, the host creates temporary addresses as described in RFC 4941
**type**\: bool
**default value**\: false
.. attribute:: temporary_preferred_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is preferred
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 86400
.. attribute:: temporary_valid_lifetime
[adapted from IETF IP model RFC 7277] The time period during which the temporary address is valid
**type**\: int
**range:** 0..4294967295
**units**\: seconds
**default value**\: 604800
"""
_prefix = 'ocip'
_revision = '2015-11-20'
def __init__(self):
self.parent = None
self.create_global_addresses = None
self.create_temporary_addresses = None
self.temporary_preferred_lifetime = None
self.temporary_valid_lifetime = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.create_global_addresses is not None:
return True
if self.create_temporary_addresses is not None:
return True
if self.temporary_preferred_lifetime is not None:
return True
if self.temporary_valid_lifetime is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Autoconf.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:autoconf'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6.Autoconf']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-if-ip:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.address is not None:
for child_ref in self.address:
if child_ref._has_data():
return True
if self.autoconf is not None and self.autoconf._has_data():
return True
if self.config is not None and self.config._has_data():
return True
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan.Ipv6']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-vlan:routed-vlan'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface.RoutedVlan']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/openconfig-interfaces:interfaces/openconfig-interfaces:interface[openconfig-interfaces:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.aggregation is not None and self.aggregation._has_data():
return True
if self.config is not None and self.config._has_data():
return True
if self.ethernet is not None and self.ethernet._has_data():
return True
if self.hold_time is not None and self.hold_time._has_data():
return True
if self.routed_vlan is not None and self.routed_vlan._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.subinterfaces is not None and self.subinterfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/openconfig-interfaces:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_interfaces as meta
return meta._meta_table['Interfaces']['meta_info']
| 48.326089
| 1,361
| 0.416726
| 40,679
| 475,867
| 4.777281
| 0.019642
| 0.014948
| 0.020099
| 0.026182
| 0.976443
| 0.972187
| 0.966223
| 0.961165
| 0.954604
| 0.945486
| 0
| 0.032186
| 0.508232
| 475,867
| 9,846
| 1,362
| 48.330997
| 0.798249
| 0.33912
| 0
| 0.912929
| 0
| 0.001055
| 0.077945
| 0.044345
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146438
| false
| 0
| 0.032718
| 0.000264
| 0.470185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5c6da0608d5293dc1b8ecac72da53ff9bc01d247
| 106
|
py
|
Python
|
montepython/likelihoods/Planck15_lowl/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 69
|
2018-04-20T07:38:33.000Z
|
2022-03-11T06:55:36.000Z
|
montepython/likelihoods/Planck15_lowl/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 263
|
2018-05-20T21:58:11.000Z
|
2022-03-30T21:45:48.000Z
|
montepython/likelihoods/Planck15_lowl/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 78
|
2018-04-21T13:11:54.000Z
|
2022-02-01T01:57:31.000Z
|
from montepython.likelihood_class import Likelihood_clik
class Planck15_lowl(Likelihood_clik):
pass
| 17.666667
| 56
| 0.839623
| 13
| 106
| 6.538462
| 0.692308
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021505
| 0.122642
| 106
| 5
| 57
| 21.2
| 0.892473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
5cce7432865b2790ea3633e9058052edf5f5baea
| 17,214
|
py
|
Python
|
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | 5
|
2018-06-03T05:19:40.000Z
|
2021-04-16T17:10:49.000Z
|
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
|
wbear2/ambari
|
a1891193984da47015cd5483b5b95e040677d7df
|
[
"Apache-2.0"
] | 6
|
2019-05-07T13:24:39.000Z
|
2021-02-15T14:12:37.000Z
|
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
class TestOozieServer(RMFTestCase):
def test_configure_default(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="default.json"
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "start",
config_file="default.json"
)
self.assert_configure_default()
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
ignore_failures = True,
user = 'oozie',
)
self.assertResourceCalled('Execute', ' hadoop dfs -put /usr/lib/oozie/share /user/oozie ; hadoop dfs -chmod -R 755 /user/oozie/share',
not_if = " hadoop dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
user = 'oozie',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
user = 'oozie',
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "stop",
config_file="default.json"
)
self.assertResourceCalled('Execute', "su - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="secured.json"
)
self.assert_configure_secured()
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "start",
config_file="secured.json"
)
self.assert_configure_secured()
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
ignore_failures = True,
user = 'oozie',
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop dfs -put /usr/lib/oozie/share /user/oozie ; hadoop dfs -chmod -R 755 /user/oozie/share',
not_if = "/usr/bin/kinit -kt /etc/security/keytabs/oozie.service.keytab oozie/c6402.ambari.apache.org@EXAMPLE.COM; hadoop dfs -ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else {exit 1}}'",
user = 'oozie',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
user = 'oozie',
)
self.assertNoMoreResources()
def test_stop_secured(self):
self.executeScript("2.0.6/services/OOZIE/package/scripts/oozie_server.py",
classname = "OozieServer",
command = "stop",
config_file="secured.json"
)
self.assertResourceCalled('Execute', "su - oozie -c 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-stop.sh' && rm -f /var/run/oozie/oozie.pid",
only_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertNoMoreResources()
def assert_configure_default(self):
# Hack for oozie.py changing conf on fly
oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
self.assertResourceCalled('HdfsDirectory', '/user/oozie',
security_enabled = False,
keytab = UnknownConfigurationMock(),
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
mode = 0775,
owner = 'oozie',
action = ['create'],
)
self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
owner = 'oozie',
group = 'hadoop',
mode = 0664,
conf_dir = '/etc/oozie/conf',
configurations = oozie_site,
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('TemplateConfig', '/etc/oozie/conf/oozie-env.sh',
owner = 'oozie',
)
self.assertResourceCalled('File', '/etc/oozie/conf/oozie-log4j.properties',
owner = 'oozie',
group = 'hadoop',
mode = 0644,
content = 'log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/oozie/conf/adminusers.txt',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/hadoop-config.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/oozie-default.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/action-conf/hive.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
action=["delete"],
not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
)
self.assertResourceCalled('Directory', '/var/run/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/log/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/tmp/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/hadoop/oozie/data',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/webapps/',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie && mkdir -p /usr/lib/oozie/libext && cp /usr/share/HDP-oozie/ext-2.2.zip /usr/lib/oozie/libext && cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
user = 'oozie',
)
def assert_configure_secured(self):
# Hack for oozie.py changing conf on fly
oozie_site = self.getConfig()['configurations']['oozie-site'].copy()
oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]
self.assertResourceCalled('HdfsDirectory', '/user/oozie',
security_enabled = True,
keytab = '/etc/security/keytabs/hdfs.headless.keytab',
conf_dir = '/etc/hadoop/conf',
hdfs_user = 'hdfs',
kinit_path_local = '/usr/bin/kinit',
mode = 0775,
owner = 'oozie',
action = ['create'],
)
self.assertResourceCalled('XmlConfig', 'oozie-site.xml',
owner = 'oozie',
group = 'hadoop',
mode = 0664,
conf_dir = '/etc/oozie/conf',
configurations = oozie_site,
)
self.assertResourceCalled('Directory', '/etc/oozie/conf',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('TemplateConfig', '/etc/oozie/conf/oozie-env.sh',
owner = 'oozie',
)
self.assertResourceCalled('File', '/etc/oozie/conf/oozie-log4j.properties',
owner = 'oozie',
group = 'hadoop',
mode = 0644,
content = 'log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/oozie/conf/adminusers.txt',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/hadoop-config.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/oozie-default.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/oozie/conf/action-conf/hive.xml',
owner = 'oozie',
group = 'hadoop',
)
self.assertResourceCalled('File', '/var/run/oozie/oozie.pid',
action=["delete"],
not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 2>&1)"
)
self.assertResourceCalled('Directory', '/var/run/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/log/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/tmp/oozie',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/hadoop/oozie/data',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/webapps/',
owner = 'oozie',
recursive = True,
mode = 0755,
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && mkdir -p /var/tmp/oozie',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /usr/lib/oozie && chown oozie:hadoop /var/tmp/oozie && mkdir -p /usr/lib/oozie/libext && cp /usr/share/HDP-oozie/ext-2.2.zip /usr/lib/oozie/libext && cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
)
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
not_if = 'ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1',
user = 'oozie',
)
| 53.962382
| 279
| 0.478215
| 1,656
| 17,214
| 4.922705
| 0.135266
| 0.147203
| 0.045878
| 0.062807
| 0.892296
| 0.882483
| 0.882483
| 0.882483
| 0.882483
| 0.863346
| 0
| 0.018304
| 0.397002
| 17,214
| 318
| 280
| 54.132075
| 0.767052
| 0.005693
| 0
| 0.733096
| 0
| 0.106762
| 0.352682
| 0.156585
| 0
| 0
| 0
| 0
| 0.220641
| 0
| null | null | 0
| 0.007117
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5cd9d0fac732ee474a81d2a514058d5da1160210
| 72
|
py
|
Python
|
dqrf/__init__.py
|
hhy-ee/PedestrianDetection-NohNMS
|
482078a6bd0ff8cf03fbf7f6988e475f75c56e57
|
[
"Apache-2.0"
] | null | null | null |
dqrf/__init__.py
|
hhy-ee/PedestrianDetection-NohNMS
|
482078a6bd0ff8cf03fbf7f6988e475f75c56e57
|
[
"Apache-2.0"
] | null | null | null |
dqrf/__init__.py
|
hhy-ee/PedestrianDetection-NohNMS
|
482078a6bd0ff8cf03fbf7f6988e475f75c56e57
|
[
"Apache-2.0"
] | null | null | null |
from .config import add_dqrf_config
from .config import add_dataset_path
| 36
| 36
| 0.875
| 12
| 72
| 4.916667
| 0.583333
| 0.338983
| 0.542373
| 0.644068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 72
| 2
| 36
| 36
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8f0e42d1ea6dca9689c2391ebce683eaedddc24d
| 29
|
py
|
Python
|
hha/__init__.py
|
alexanderlarin/3dgnn
|
b9162098851721fe1a3538f0857171ef97f69c73
|
[
"MIT"
] | 2
|
2019-05-30T14:26:03.000Z
|
2019-05-30T20:02:43.000Z
|
hha/__init__.py
|
alexanderlarin/3dgnn
|
b9162098851721fe1a3538f0857171ef97f69c73
|
[
"MIT"
] | null | null | null |
hha/__init__.py
|
alexanderlarin/3dgnn
|
b9162098851721fe1a3538f0857171ef97f69c73
|
[
"MIT"
] | null | null | null |
from .get_hha import get_hha
| 14.5
| 28
| 0.827586
| 6
| 29
| 3.666667
| 0.666667
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8f1416e5fd0411864273ab451d0de7f85afa04f7
| 16,077
|
py
|
Python
|
tools_replacecolors.py
|
elfosardo/PySprint
|
2d74b7944d92d041d0cfdf34140d2fe148a68d14
|
[
"CC0-1.0"
] | 21
|
2021-02-28T13:03:17.000Z
|
2022-02-23T05:21:11.000Z
|
tools_replacecolors.py
|
elfosardo/PySprint
|
2d74b7944d92d041d0cfdf34140d2fe148a68d14
|
[
"CC0-1.0"
] | 72
|
2021-02-28T09:12:02.000Z
|
2021-12-16T08:04:20.000Z
|
tools_replacecolors.py
|
elfosardo/PySprint
|
2d74b7944d92d041d0cfdf34140d2fe148a68d14
|
[
"CC0-1.0"
] | 2
|
2021-11-28T21:44:04.000Z
|
2021-12-06T09:09:17.000Z
|
import numpy as np
from PIL import Image
red_color = (238, 0, 34) #EE0022
blue_color = (68, 102, 238) # 4466EE
yellow_color = (238, 238, 102) #EEEE66
green_color = (34, 170, 102) #22AA66
green_secondary_color = (170, 204, 102) #AACC66
blue_secondary_color = (170, 204, 238) #AACCEE
red_secondary_color = (170, 0, 0) #AA0000
yellow_secondary_color = (170, 170, 0) #AAAA00
yellow_tertiary_color = (238, 204, 102) #EECC66
#Yellow Wheel
im = Image.open('Assets/SelectionWheel.png')
data = np.array(im)
r1, g1, b1 = 0, 0, 0 # Original value
r2, g2, b2 = 238, 238, 102 # Value that we want to replace it with
red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
mask = (red == r1) & (green == g1) & (blue == b1)
data[:,:,:3][mask] = [r2, g2, b2]
im = Image.fromarray(data)
im.save('Assets/YellowSelectionWheel.png')
#RED Wheel
im = Image.open('Assets/SelectionWheel.png')
data = np.array(im)
r1, g1, b1 = 0, 0, 0 # Original value
r2, g2, b2 = 238, 0, 34 # Value that we want to replace it with
red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
mask = (red == r1) & (green == g1) & (blue == b1)
data[:,:,:3][mask] = [r2, g2, b2]
im = Image.fromarray(data)
im.save('Assets/RedSelectionWheel.png')
#GREEN Wheel
im = Image.open('Assets/SelectionWheel.png')
data = np.array(im)
r1, g1, b1 = 0, 0, 0 # Original value
r2, g2, b2 = 34, 170, 102 # Value that we want to replace it with
red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
mask = (red == r1) & (green == g1) & (blue == b1)
data[:,:,:3][mask] = [r2, g2, b2]
im = Image.fromarray(data)
im.save('Assets/GreenSelectionWheel.png')
#BLUE Wheel
im = Image.open('Assets/SelectionWheel.png')
data = np.array(im)
r1, g1, b1 = 0, 0, 0 # Original value
r2, g2, b2 = 68, 102, 238 # Value that we want to replace it with
red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
mask = (red == r1) & (green == g1) & (blue == b1)
data[:,:,:3][mask] = [r2, g2, b2]
im = Image.fromarray(data)
im.save('Assets/BlueSelectionWheel.png')
# #Yellow HElicopter Horizontal
# for i in range(0,4):
# im = Image.open('Assets/BlueHelicopter{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 68, 102, 238 # Original value
# r2, g2, b2 = 238, 238, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 170, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/YellowHelicopter{}.png'.format(i))
# #RED HElicopter Horizontal
# for i in range(0,4):
# im = Image.open('Assets/BlueHelicopter{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 68, 102, 238 # Original value
# r2, g2, b2 = 238, 0, 34 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 0, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/RedHelicopter{}.png'.format(i))
# #GREEN HElicopter Horizontal
# for i in range(0,4):
# im = Image.open('Assets/BlueHelicopter{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 68, 102, 238 # Original value
# r2, g2, b2 = 34, 170, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 204, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/GreenHelicopter{}.png'.format(i))
#GREEN HElicopter
# for i in range(0,4):
# im = Image.open('Assets/YellowHelicopterV{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 238, 238, 102 # Original value
# r2, g2, b2 = 170, 204, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 170, 0 # Original value
# r2, g2, b2 = 34, 170, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/GreenHelicopterV{}.png'.format(i))
# # #RED HElicopter
# for i in range(0,4):
# im = Image.open('Assets/YellowHelicopterV{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 238, 238, 102 # Original value
# r2, g2, b2 = 238, 0, 34 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 170, 0 # Original value
# r2, g2, b2 = 170, 0, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/RedHelicopterV{}.png'.format(i))
# #BLUE HElicopter
# for i in range(0,4):
# im = Image.open('Assets/YellowHelicopterV{}.png'.format(i))
# data = np.array(im)
# r1, g1, b1 = 238, 238, 102 # Original value
# r2, g2, b2 = 170, 204, 238 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 170, 170, 0 # Original value
# r2, g2, b2 = 68, 102, 238 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/BlueHelicopterV{}.png'.format(i))
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarGreenCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 238, 238, 0 # Original value
# r2, g2, b2 = 136, 136, 136 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarGreenCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarGreenCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 34, 170, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarGreenCar.png')
# #BLUE CARS
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarYellowCar.png')
# data = np.array(im)
# r1, g1, b1 = 238, 204, 102 # Original value
# r2, g2, b2 = 170, 204, 238 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 238, 238, 0 # Original value
# r2, g2, b2 = 136, 136, 136 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# r1, g1, b1 = 170, 136, 0 # Original value
# r2, g2, b2 = 102, 102, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarBlueCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarBlueCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 68, 102, 238 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarBlueCar.png')
# #RED CARS
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarYellowCar.png')
# data = np.array(im)
# r1, g1, b1 = 238, 204, 102 # Original value
# r2, g2, b2 = 170, 0, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# data = np.array(im)
# r1, g1, b1 = 238, 238, 0 # Original value
# r2, g2, b2 = 136, 136, 136 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# r1, g1, b1 = 170, 136, 0 # Original value
# r2, g2, b2 = 102, 102, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarRedCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarRedCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 238, 0, 34 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarRedCar.png')
# #YELLOW CARS
# im = Image.open('Assets/SuperSprintRacePodiumFourthCarYellowCar.png')
# data = np.array(im)
# r1, g1, b1 = 238, 238, 0 # Original value
# r2, g2, b2 = 136, 136, 136 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# r1, g1, b1 = 170, 136, 0 # Original value
# r2, g2, b2 = 102, 102, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumFourthCarYellowCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumSecondCarYellowCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 238, 238, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumSecondCarYellowCar.png')
# #THIRD CAR
# #GREEN CARS
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarBlueCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 204, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarGreenCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarGreenCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 34, 170, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarGreenCar.png')
# #BLUE CARS
# # im = Image.open('Assets/SuperSprintRacePodiumThirdCarBlueCarDrone.png')
# # data = np.array(im)
# # r1, g1, b1 = 170, 204, 238 # Original value
# # r2, g2, b2 = 170, 204, 238 # Value that we want to replace it with
# # red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# # mask = (red == r1) & (green == g1) & (blue == b1)
# # data[:,:,:3][mask] = [r2, g2, b2]
# # im = Image.fromarray(data)
# # im.save('Assets/SuperSprintRacePodiumSecondCarBlueCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarBlueCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 68, 102, 238 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarBlueCar.png')
# #RED CARS
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarBlueCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 0, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarRedCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarRedCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 238, 0, 34 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarRedCar.png')
# #YELLOW CARS
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarBlueCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 170, 204, 238 # Original value
# r2, g2, b2 = 170, 170, 0 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarYellowCarDrone.png')
# im = Image.open('Assets/SuperSprintRacePodiumThirdCarYellowCarDrone.png')
# data = np.array(im)
# r1, g1, b1 = 136, 136, 136 # Original value
# r2, g2, b2 = 238, 238, 102 # Value that we want to replace it with
# red, green, blue = data[:,:,0], data[:,:,1], data[:,:,2]
# mask = (red == r1) & (green == g1) & (blue == b1)
# data[:,:,:3][mask] = [r2, g2, b2]
# im = Image.fromarray(data)
# im.save('Assets/SuperSprintRacePodiumThirdCarYellowCar.png')
| 29.445055
| 76
| 0.582198
| 2,364
| 16,077
| 3.953469
| 0.043147
| 0.031671
| 0.047507
| 0.067302
| 0.848598
| 0.829125
| 0.829125
| 0.824417
| 0.824417
| 0.824417
| 0
| 0.098997
| 0.200162
| 16,077
| 546
| 77
| 29.445055
| 0.627809
| 0.841451
| 0
| 0.595745
| 0
| 0
| 0.10149
| 0.10149
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.042553
| 0
| 0.042553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f5df0661a9b582f97062da77e4e36cf1c3c2cb9
| 31,634
|
py
|
Python
|
lib/turkish_nltk/trnltk/morphology/contextless/parser/test/test_parser_with_copula_graph.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
lib/turkish_nltk/trnltk/morphology/contextless/parser/test/test_parser_with_copula_graph.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
lib/turkish_nltk/trnltk/morphology/contextless/parser/test/test_parser_with_copula_graph.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
Copyright 2012 Ali Ok (aliokATapacheDOTorg)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from copy import copy
import logging
import os
import unittest
from trnltk.morphology.contextless.parser.test.parser_test import ParserTest
from trnltk.morphology.model.lexeme import SyntacticCategory
from trnltk.morphology.lexicon.lexiconloader import LexiconLoader
from trnltk.morphology.lexicon.rootgenerator import RootGenerator, RootMapGenerator
from trnltk.morphology.morphotactics.basicsuffixgraph import BasicSuffixGraph
from trnltk.morphology.morphotactics.copulasuffixgraph import CopulaSuffixGraph
from trnltk.morphology.contextless.parser.parser import ContextlessMorphologicalParser, logger as parser_logger
from trnltk.morphology.contextless.parser.rootfinder import WordRootFinder
from trnltk.morphology.contextless.parser.suffixapplier import logger as suffix_applier_logger
from trnltk.morphology.morphotactics.predefinedpaths import PredefinedPaths
class ParserTestWithExtendedGraph(ParserTest):
@classmethod
def setUpClass(cls):
super(ParserTestWithExtendedGraph, cls).setUpClass()
all_roots = []
lexemes = LexiconLoader.load_from_file(os.path.join(os.path.dirname(__file__), '../../../../resources/master_dictionary.txt'))
for di in lexemes:
all_roots.extend(RootGenerator.generate(di))
cls._org_root_map = (RootMapGenerator()).generate(all_roots)
def setUp(self):
logging.basicConfig(level=logging.INFO)
parser_logger.setLevel(logging.INFO)
suffix_applier_logger.setLevel(logging.INFO)
self.cloned_root_map = copy(self._org_root_map)
suffix_graph = CopulaSuffixGraph(BasicSuffixGraph())
suffix_graph.initialize()
predefined_paths = PredefinedPaths(self.cloned_root_map, suffix_graph)
predefined_paths.create_predefined_paths()
word_root_finder = WordRootFinder(self.cloned_root_map)
self.parser = ContextlessMorphologicalParser(suffix_graph, predefined_paths,
[word_root_finder])
def test_should_parse_other_categories_to_verbs_zero_transition(self):
#remove some roots for keeping the tests simple!
self.cloned_root_map['elmas'] = []
self.cloned_root_map['bent'] = []
self.cloned_root_map['bend'] = []
self.cloned_root_map['se'] = []
self.cloned_root_map['oy'] = []
self.cloned_root_map['ben'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map['ben'])
self.assert_parse_correct_for_verb(u'elmayım', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[yım])')
self.assert_parse_correct_for_verb(u'elmasın', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A2sg(sIn[sın])')
self.assert_parse_correct_for_verb(u'elma', u'elma(elma)+Noun+A3sg+Pnon+Nom', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'elmayız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A1pl(+yIz[yız])')
self.assert_parse_correct_for_verb(u'elmasınız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A2pl(sInIz[sınız])')
self.assert_parse_correct_for_verb(u'elmalar', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A3pl(lAr[lar])', u'elma(elma)+Noun+A3pl(lAr[lar])+Pnon+Nom', u'elma(elma)+Noun+A3pl(lAr[lar])+Pnon+Nom+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'elmaymışım', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A1sg(+yIm[ım])')
self.assert_parse_correct_for_verb(u'elmaymışsın', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A2sg(sIn[sın])')
self.assert_parse_correct_for_verb(u'elmaymış', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3sg')
self.assert_parse_correct_for_verb(u'elmaymışız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A1pl(+yIz[ız])')
self.assert_parse_correct_for_verb(u'elmaymışsınız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A2pl(sInIz[sınız])')
self.assert_parse_correct_for_verb(u'elmaymışlar', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])')
self.assert_parse_correct_for_verb(u'elmaydım', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'elmaydın', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A2sg(n[n])')
self.assert_parse_correct_for_verb(u'elmaydı', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A3sg')
self.assert_parse_correct_for_verb(u'elmaydık', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A1pl(k[k])')
self.assert_parse_correct_for_verb(u'elmaydınız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A2pl(nIz[nız])')
self.assert_parse_correct_for_verb(u'elmaydılar', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydı])+A3pl(lAr[lar])')
self.assert_parse_correct_for_verb(u'elmaysam', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'elmaysan', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A2sg(n[n])')
self.assert_parse_correct_for_verb(u'elmaysa', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A3sg')
self.assert_parse_correct_for_verb(u'elmaysak', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A1pl(k[k])')
self.assert_parse_correct_for_verb(u'elmaysanız', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A2pl(nIz[nız])')
self.assert_parse_correct_for_verb(u'elmaysalar', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[ysa])+A3pl(lAr[lar])')
self.assert_parse_correct_for_verb(u'elmansam', u'elma(elma)+Noun+A3sg+P2sg(+In[n])+Nom+Verb+Zero+Cond(+ysA[sa])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'elmamsa', u'elma(elma)+Noun+A3sg+P1sg(+Im[m])+Nom+Verb+Zero+Cond(+ysA[sa])+A3sg')
self.assert_parse_correct_for_verb(u'elmamdın', u'elma(elma)+Noun+A3sg+P1sg(+Im[m])+Nom+Verb+Zero+Past(+ydI[dı])+A2sg(n[n])')
self.assert_parse_correct_for_verb(u'elmanızdık', u'elma(elma)+Noun+A3sg+P2pl(+InIz[nız])+Nom+Verb+Zero+Past(+ydI[dı])+A1pl(k[k])')
self.assert_parse_correct_for_verb(u'elmamızmışsınız', u'elma(elma)+Noun+A3sg+P1pl(+ImIz[mız])+Nom+Verb+Zero+Narr(+ymIş[mış])+A2pl(sInIz[sınız])')
self.assert_parse_correct_for_verb(u'elmalarınızsalar', u'elma(elma)+Noun+A3pl(lAr[lar])+P2pl(+InIz[ınız])+Nom+Verb+Zero+Cond(+ysA[sa])+A3pl(lAr[lar])')
self.assert_parse_correct_for_verb(u'iyiyim', u'iyi(iyi)+Adj+Verb+Zero+Pres+A1sg(+yIm[yim])', u'iyi(iyi)+Adj+Adv+Zero+Verb+Zero+Pres+A1sg(+yIm[yim])', u'iyi(iyi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[yim])')
self.assert_parse_correct_for_verb(u'küçüğümüzdeyseler', u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+P1pl(+ImIz[ümüz])+Loc(dA[de])+Verb+Zero+Cond(+ysA[yse])+A3pl(lAr[ler])')
self.assert_parse_correct_for_verb(u'küçüklerimizindiler',u'küçük(küçük)+Adj+Noun+Zero+A3pl(lAr[ler])+P1pl(+ImIz[imiz])+Gen(+nIn[in])+Verb+Zero+Past(+ydI[di])+A3pl(lAr[ler])')
self.assert_parse_correct_for_verb(u'küçüğüm',
u'küçüğ(küçük)+Adj+Verb+Zero+Pres+A1sg(+yIm[üm])', # ben kucugum.
u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+P1sg(+Im[üm])+Nom', # kucugum geldi.
u'küçüğ(küçük)+Adj+Adv+Zero+Verb+Zero+Pres+A1sg(+yIm[üm])', # TODO: sacma
u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[üm])', # ben kucugum.
u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+P1sg(+Im[üm])+Nom+Verb+Zero+Pres+A3sg') # -kim geldi? -kucugum
self.assert_parse_correct_for_verb(u'bendim', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'benim',
u'ben(ben)+Pron+Pers+A1sg+Pnon+Gen(im[im])', # benim kitabim.
u'ben(ben)+Pron+Pers+A1sg+Pnon+Gen(im[im])+Verb+Zero+Pres+A3sg', # -kimin o? -benim (benim kitabim).
u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[im])' # -kim o? -benim (ben geldim).
)
self.assert_parse_correct_for_verb(u'sensin', u'sen(sen)+Pron+Pers+A2sg+Pnon+Nom+Verb+Zero+Pres+A2sg(sIn[sin])')
self.assert_parse_correct_for_verb(u'oydu', u'o(o)+Pron+Pers+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydu])+A3sg', u'o(o)+Pron+Demons+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[ydu])+A3sg')
self.assert_parse_correct_for_verb(u'hızlıcaymışlar',
u'hızlı(hızlı)+Adj+Adj+Equ(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adj+Quite(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adv+Ly(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adj+Equ(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adj+Quite(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Equ(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Quite(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adv+Ly(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+InTermsOf(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+By(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adj+Equ(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Adj+Quite(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Equ(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Quite(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Noun+Zero+A3sg+Pnon+Nom+Adv+InTermsOf(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Noun+Zero+A3sg+Pnon+Nom+Adv+By(cA[ca])+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Equ(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Adj+Quite(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hızlı(hızlı)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Adv+Zero+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])',
u'hız(hız)+Noun+A3sg+Pnon+Nom+Adj+With(lI[lı])+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ca])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[ymış])+A3pl(lAr[lar])'
)
def test_should_parse_copula_derivations(self):
self.cloned_root_map['elmas'] = []
self.cloned_root_map['on'] = []
self.cloned_root_map['se'] = []
self.assert_parse_correct_for_verb(u'elmayken', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'elmasıyken', u'elma(elma)+Noun+A3sg+P3sg(+sI[sı])+Nom+Verb+Zero+Adv+While(+yken[yken])', u'elma(elma)+Noun+A3sg+P3sg(+sI[sı])+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'kitapken', u'kitap(kitap)+Noun+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])', u'kitap(kitap)+Noun+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'kitaplarıyken', u'kitap(kitap)+Noun+A3sg+P3pl(lArI![ları])+Nom+Verb+Zero+Adv+While(+yken[yken])', u'kitap(kitap)+Noun+A3pl(lAr[lar])+Pnon+Acc(+yI[ı])+Verb+Zero+Adv+While(+yken[yken])', u'kitap(kitap)+Noun+A3pl(lAr[lar])+P3sg(+sI[ı])+Nom+Verb+Zero+Adv+While(+yken[yken])', u'kitap(kitap)+Noun+A3pl(lAr[lar])+P3pl(I![ı])+Nom+Verb+Zero+Adv+While(+yken[yken])', u'kitap(kitap)+Noun+A3sg+P3pl(lArI![ları])+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg', u'kitap(kitap)+Noun+A3pl(lAr[lar])+Pnon+Acc(+yI[ı])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg', u'kitap(kitap)+Noun+A3pl(lAr[lar])+P3sg(+sI[ı])+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg', u'kitap(kitap)+Noun+A3pl(lAr[lar])+P3pl(I![ı])+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'küçükken',
u'küçük(küçük)+Adj+Verb+Zero+Adv+While(+yken[ken])',
u'küçük(küçük)+Adj+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg',
u'küçük(küçük)+Adj+Adv+Zero+Verb+Zero+Adv+While(+yken[ken])',
u'küçük(küçük)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])',
u'küçük(küçük)+Adj+Adv+Zero+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg', # TODO: sacma
u'küçük(küçük)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'küçüğümüzdeyken', u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+P1pl(+ImIz[ümüz])+Loc(dA[de])+Verb+Zero+Adv+While(+yken[yken])', u'küçüğ(küçük)+Adj+Noun+Zero+A3sg+P1pl(+ImIz[ümüz])+Loc(dA[de])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'maviceyken',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adv+Ly(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+InTermsOf(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+By(cA[ce])+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Adv+Ly(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+InTermsOf(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+By(cA[ce])+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])',
u'mavi(mavi)+Adj+Adj+Equ(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Adj+Quite(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Adv+Zero+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg',
u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Adv+While(+yken[yken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'seninken', u'sen(sen)+Pron+Pers+A2sg+Pnon+Gen(in[in])+Verb+Zero+Adv+While(+yken[ken])', u'sen(sen)+Pron+Pers+A2sg+Pnon+Gen(in[in])+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'onlarken', u'o(o)+Pron+Pers+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])', u'o(o)+Pron+Demons+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])', u'o(o)+Pron+Pers+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg', u'o(o)+Pron+Demons+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Adv+While(+yken[ken])+Verb+Zero+Pres+A3sg')
def test_should_parse_verb_degil(self):
self.assert_parse_correct_for_verb(u'değil', u'de\u011fil(de\u011fil)+Conj', u'değil(değil)+Verb+Pres+A3sg')
self.assert_parse_correct_for_verb(u'değilim', u'değil(değil)+Verb+Pres+A1sg(+yIm[im])')
self.assert_parse_correct_for_verb(u'değilsin', u'değil(değil)+Verb+Pres+A2sg(sIn[sin])')
self.assert_parse_correct_for_verb(u'değildik', u'değil(değil)+Verb+Past(+ydI[di])+A1pl(k[k])')
self.assert_parse_correct_for_verb(u'değilmişsiniz', u'değil(değil)+Verb+Narr(+ymIş[miş])+A2pl(sInIz[siniz])')
self.assert_parse_correct_for_verb(u'değildiler', u'değil(değil)+Verb+Past(+ydI[di])+A3pl(lAr[ler])')
self.assert_parse_correct_for_verb(u'değilseler', u'değil(değil)+Verb+Cond(+ysA[se])+A3pl(lAr[ler])')
#TODO: degillerdi, degillerse, degillermis
def test_should_parse_verbs_with_explicit_copula(self):
# remove some roots to keep tests simple
self.cloned_root_map['on'] = []
self.cloned_root_map['gelecek'] = []
self.cloned_root_map['ben'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map['ben'])
self.assert_parse_correct_for_verb(u'elmadır', u'elma(elma)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dır])')
self.assert_parse_correct_for_verb(u'müdürdür', u'müdür(müdür)+Noun+A3sg+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dür])')
self.assert_parse_correct_for_verb(u'zilidir', u'zil(zil)+Noun+A3sg+Pnon+Acc(+yI[i])+Verb+Zero+Pres+A3sg+Cop(dIr[dir])', u'zil(zil)+Noun+A3sg+P3sg(+sI[i])+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'mavidir', u'mavi(mavi)+Adj+Verb+Zero+Pres+A3sg+Cop(dIr[dir])', u'mavi(mavi)+Adj+Adv+Zero+Verb+Zero+Pres+A3sg+Cop(dIr[dir])', u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'mavisindir', u'mavi(mavi)+Adj+Verb+Zero+Pres+A2sg(sIn[sin])+Cop(dIr[dir])', u'mavi(mavi)+Adj+Adv+Zero+Verb+Zero+Pres+A2sg(sIn[sin])+Cop(dIr[dir])', u'mavi(mavi)+Adj+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A2sg(sIn[sin])+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'benimdir', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[im])+Cop(dIr[dir])', u'ben(ben)+Pron+Pers+A1sg+Pnon+Gen(im[im])+Verb+Zero+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'onlardır', u'o(o)+Pron+Pers+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dır])', u'o(o)+Pron+Demons+A3pl(nlar[nlar])+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dır])')
self.assert_parse_correct_for_verb(u'benimledir', u'ben(ben)+Pron+Pers+A1sg+Pnon+Ins(imle[imle])+Verb+Zero+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'sıcakçayımdır',
u'sıcak(sıcak)+Adj+Adj+Equ(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adj+Quite(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adv+Ly(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adj+Equ(cA[ça])+Adv+Zero+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adj+Quite(cA[ça])+Adv+Zero+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+InTermsOf(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adv+By(cA[ça])+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adj+Equ(cA[ça])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Adj+Quite(cA[ça])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ça])+Adv+Zero+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])',
u'sıcak(sıcak)+Adj+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ça])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[yım])+Cop(dIr[dır])'
)
self.assert_parse_correct_for_verb(u'gelmektedir', u'gel(gelmek)+Verb+Pos+Prog(mAktA[mekte])+A3sg+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Noun+Inf(mAk[mek])+A3sg+Pnon+Loc(dA[te])+Verb+Zero+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'geliyorlardır', u'gel(gelmek)+Verb+Pos+Prog(Iyor[iyor])+A3pl(lAr[lar])+Cop(dIr[dır])')
self.assert_parse_correct_for_verb(u'gelmiştir', u'gel(gelmek)+Verb+Pos+Narr(mIş[miş])+A3sg+Cop(dIr[tir])', u'gel(gelmek)+Verb+Pos+Narr(mIş[miş])+Adj+Zero+Verb+Zero+Pres+A3sg+Cop(dIr[tir])', u'gel(gelmek)+Verb+Pos+Narr(mIş[miş])+Adj+Zero+Adv+Zero+Verb+Zero+Pres+A3sg+Cop(dIr[tir])', u'gel(gelmek)+Verb+Pos+Narr(mIş[miş])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[tir])')
self.assert_parse_correct_for_verb(u'geleceksinizdir', u'gel(gelmek)+Verb+Pos+Fut(+yAcAk[ecek])+A2pl(sInIz[siniz])+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Adj+FutPart(+yAcAk[ecek])+Pnon+Verb+Zero+Pres+A2pl(sInIz[siniz])+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Fut(+yAcAk[ecek])+Adj+Zero+Verb+Zero+Pres+A2pl(sInIz[siniz])+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Fut(+yAcAk[ecek])+Adj+Zero+Adv+Zero+Verb+Zero+Pres+A2pl(sInIz[siniz])+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Noun+FutPart(+yAcAk[ecek])+A3sg+Pnon+Nom+Verb+Zero+Pres+A2pl(sInIz[siniz])+Cop(dIr[dir])', u'gel(gelmek)+Verb+Pos+Fut(+yAcAk[ecek])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A2pl(sInIz[siniz])+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'gelmelilerdir',
u'gel(gelmek)+Verb+Pos+Neces(mAlI![meli])+A3pl(lAr[ler])+Cop(dIr[dir])',
u'gel(gelmek)+Verb+Pos+Noun+Inf(mA[me])+A3sg+Pnon+Nom+Adj+With(lI[li])+Verb+Zero+Pres+A3pl(lAr[ler])+Cop(dIr[dir])',
u'gel(gelmek)+Verb+Pos+Noun+Inf(mA[me])+A3sg+Pnon+Nom+Adj+With(lI[li])+Adv+Zero+Verb+Zero+Pres+A3pl(lAr[ler])+Cop(dIr[dir])',
u'gel(gelmek)+Verb+Pos+Noun+Inf(mA[me])+A3sg+Pnon+Nom+Adj+With(lI[li])+Noun+Zero+A3sg+Pnon+Nom+Verb+Zero+Pres+A3pl(lAr[ler])+Cop(dIr[dir])',
u'gel(gelmek)+Verb+Pos+Noun+Inf(mA[me])+A3sg+Pnon+Nom+Adj+With(lI[li])+Noun+Zero+A3pl(lAr[ler])+Pnon+Nom+Verb+Zero+Pres+A3sg+Cop(dIr[dir])'
)
self.assert_parse_correct_for_verb(u'değildir', u'değil(değil)+Verb+Pres+A3sg+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'değillerdir', u'değil(değil)+Verb+Pres+A3pl(lAr[ler])+Cop(dIr[dir])')
self.assert_parse_correct_for_verb(u'mıdır', u'mı(mı)+Ques+Pres+A3sg+Cop(dIr[dır])')
self.assert_parse_correct_for_verb(u'mıyımdır', u'mı(mı)+Ques+Pres+A1sg(yım[yım])+Cop(dIr[dır])')
def test_should_parse_adjectives_as_adverbs(self):
self.assert_parse_exists(u'mavi', u'mavi(mavi)+Adj+Adv+Zero')
self.assert_parse_exists(u'yapan', u'yap(yapmak)+Verb+Pos+Adj+PresPart(+yAn[an])+Adv+Zero')
self.assert_parse_exists(u'kesici', u'kes(kesmek)+Verb+Pos+Adj+Agt(+yIcI[ici])+Adv+Zero')
self.assert_parse_exists(u'pembemsi', u'pembe(pembe)+Adj+Adj+JustLike(+ImsI[msi])+Adv+Zero')
self.assert_parse_exists(u'delice', u'deli(deli)+Adj+Adj+Equ(cA[ce])+Adv+Zero')
def test_should_parse_pronoun_tenses(self):
# remove some roots to make the test simple
self.cloned_root_map[u'bend'] = []
self.cloned_root_map[u'kimi'] = []
self.cloned_root_map[u'kimse'] = []
self.cloned_root_map[u'ben'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map[u'ben'])
self.cloned_root_map[u'ban'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map[u'ban'])
self.cloned_root_map[u'san'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map[u'san'])
self.cloned_root_map[u'biz'] = filter(lambda root : root.lexeme.syntactic_category==SyntacticCategory.PRONOUN, self.cloned_root_map[u'biz'])
self.assert_parse_exists(u'benim', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[im])')
self.assert_parse_correct_for_verb(u'bendim', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'benmişim', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[miş])+A1sg(+yIm[im])')
self.assert_parse_correct_for_verb(u'bensem', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Cond(+ysA[se])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'bense', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Cond(+ysA[se])+A3sg')
self.assert_parse_correct_for_verb(u'bendiyse', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A3sg')
# self.assert_parse_correct_for_verb(u'bendimse', u'xxxx') TODO
self.assert_parse_correct_for_verb(u'bendiysem', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'benmişsem', u'ben(ben)+Pron+Pers+A1sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[miş])+Cond(+ysA[se])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'beniyse', u'ben(ben)+Pron+Pers+A1sg+Pnon+Acc(i[i])+Verb+Zero+Cond(+ysA[yse])+A3sg')
self.assert_parse_correct_for_verb(u'banaymışsa', u'ban(ben)+Pron+Pers+A1sg+Pnon+Dat(a[a])+Verb+Zero+Narr(+ymIş[ymış])+Cond(+ysA[sa])+A3sg')
self.assert_parse_correct_for_verb(u'bendeymişseler', u'ben(ben)+Pron+Pers+A1sg+Pnon+Loc(de[de])+Verb+Zero+Narr(+ymIş[ymiş])+Cond(+ysA[se])+A3pl(lAr[ler])')
self.assert_parse_correct_for_verb(u'bendendiyse', u'ben(ben)+Pron+Pers+A1sg+Pnon+Abl(den[den])+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A3sg')
self.assert_parse_correct_for_verb(u'benimleydiysen', u'ben(ben)+Pron+Pers+A1sg+Pnon+Ins(imle[imle])+Verb+Zero+Past(+ydI[ydi])+Cond(+ysA[yse])+A2sg(n[n])')
self.assert_parse_correct_for_verb(u'benimleymişseler', u'ben(ben)+Pron+Pers+A1sg+Pnon+Ins(imle[imle])+Verb+Zero+Narr(+ymIş[ymiş])+Cond(+ysA[se])+A3pl(lAr[ler])')
# self.assert_parse_correct_for_verb(u'benimleymişlerse', u'xxxx') TODO
self.assert_parse_correct_for_verb(u'kimim', u'kim(kim)+Pron+Ques+A3sg+P1sg(+Im[im])+Nom', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Pres+A1sg(+yIm[im])', u'kim(kim)+Pron+Ques+A3sg+P1sg(+Im[im])+Nom+Verb+Zero+Pres+A3sg')
self.assert_parse_correct_for_verb(u'kimdim', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'kimmişim', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[miş])+A1sg(+yIm[im])')
self.assert_parse_correct_for_verb(u'kimsem', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[se])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'kimse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Cond(+ysA[se])+A3sg')
self.assert_parse_correct_for_verb(u'kimdiyse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A3sg')
# self.assert_parse_correct_for_verb(u'kimdimse', u'xxxx') TODO
self.assert_parse_correct_for_verb(u'kimdiysem', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'kimmişsem', u'kim(kim)+Pron+Ques+A3sg+Pnon+Nom+Verb+Zero+Narr(+ymIş[miş])+Cond(+ysA[se])+A1sg(m[m])')
self.assert_parse_correct_for_verb(u'kimiyse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Acc(+yI[i])+Verb+Zero+Cond(+ysA[yse])+A3sg', u'kim(kim)+Pron+Ques+A3sg+P3sg(+sI[i])+Nom+Verb+Zero+Cond(+ysA[yse])+A3sg')
self.assert_parse_correct_for_verb(u'kimeymişse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Dat(+yA[e])+Verb+Zero+Narr(+ymIş[ymiş])+Cond(+ysA[se])+A3sg')
self.assert_parse_correct_for_verb(u'kimdeymişse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Loc(dA[de])+Verb+Zero+Narr(+ymIş[ymiş])+Cond(+ysA[se])+A3sg')
self.assert_parse_correct_for_verb(u'kimdendiyse', u'kim(kim)+Pron+Ques+A3sg+Pnon+Abl(dAn[den])+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A3sg')
self.assert_parse_correct_for_verb(u'kimlerdendiyse', u'kim(kim)+Pron+Ques+A3pl(lAr[ler])+Pnon+Abl(dAn[den])+Verb+Zero+Past(+ydI[di])+Cond(+ysA[yse])+A3sg')
self.assert_parse_correct_for_verb(u'kimimleydiysen', u'kim(kim)+Pron+Ques+A3sg+P1sg(+Im[im])+Ins(+ylA[le])+Verb+Zero+Past(+ydI[ydi])+Cond(+ysA[yse])+A2sg(n[n])')
# self.assert_parse_correct_for_verb(u'kimimleymişlerse', u'xxxx') TODO
if __name__ == '__main__':
unittest.main()
| 102.707792
| 830
| 0.667826
| 5,238
| 31,634
| 3.925353
| 0.085338
| 0.084821
| 0.080249
| 0.111279
| 0.819124
| 0.786586
| 0.768202
| 0.727056
| 0.711152
| 0.691892
| 0
| 0.014379
| 0.129418
| 31,634
| 307
| 831
| 103.042345
| 0.732208
| 0.039104
| 0
| 0.039841
| 0
| 0.76494
| 0.607105
| 0.572666
| 0
| 0
| 0
| 0.003257
| 0.422311
| 1
| 0.031873
| false
| 0
| 0.055777
| 0
| 0.091633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
8533cd6995819da45815f02693b4b880718eb2a8
| 106
|
py
|
Python
|
random_forest/__init__.py
|
geekyJock8/random_forest_from_scratch
|
460e68b9b6999c2e1c2337f6f05f3e5dba5c910d
|
[
"MIT"
] | null | null | null |
random_forest/__init__.py
|
geekyJock8/random_forest_from_scratch
|
460e68b9b6999c2e1c2337f6f05f3e5dba5c910d
|
[
"MIT"
] | null | null | null |
random_forest/__init__.py
|
geekyJock8/random_forest_from_scratch
|
460e68b9b6999c2e1c2337f6f05f3e5dba5c910d
|
[
"MIT"
] | null | null | null |
from random_forest.decision_tree import DecisionTree
from random_forest.random_forest import RandomForest
| 35.333333
| 52
| 0.90566
| 14
| 106
| 6.571429
| 0.571429
| 0.391304
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 106
| 2
| 53
| 53
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8576a094387defef8046a17290b34d3aff9b4bbd
| 21,792
|
py
|
Python
|
cape_frontend/webapp/tests/test_mocks/test_api.py
|
edwardmjackson/cape-frontend
|
4204f50304ee5cf8808a564b6f8bf969a5bf4043
|
[
"Apache-2.0"
] | 5
|
2018-08-01T16:44:23.000Z
|
2018-08-15T14:19:58.000Z
|
cape_frontend/webapp/tests/test_mocks/test_api.py
|
edwardmjackson/cape-frontend
|
4204f50304ee5cf8808a564b6f8bf969a5bf4043
|
[
"Apache-2.0"
] | null | null | null |
cape_frontend/webapp/tests/test_mocks/test_api.py
|
edwardmjackson/cape-frontend
|
4204f50304ee5cf8808a564b6f8bf969a5bf4043
|
[
"Apache-2.0"
] | 7
|
2018-09-27T14:02:30.000Z
|
2020-06-29T03:45:16.000Z
|
# Copyright 2018 BLEMUNDSBURY AI LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import requests
import hashlib
from cape_frontend.webapp.tests.tests_settings import UI_URL
from cape_api_helpers.text_responses import *
from cape_frontend.webapp.mocks import mock_data
MOCK_FULL_BASE_URL = UI_URL+'/mock/full/api/0.1'
MOCK_UNLUCKY_BASE_URL = UI_URL + '/mock/unlucky/api/0.1'
MOCK_ERROR_BASE_URL = UI_URL+'/mock/error/api/0.1'
MOCK_TIMEOUT_BASE_URL = UI_URL+'/mock/timeout/api/0.1'
def test_mock_full():
session = requests.Session()
response = session.get(MOCK_FULL_BASE_URL + '/user/login?login=bla&password=blu')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['message'] == VALID_CREDENTIALS_TEXT
response = session.get(MOCK_FULL_BASE_URL + '/user/get-user-token')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['userToken'] == mock_data.user_token
response = session.get(MOCK_FULL_BASE_URL + '/inbox/get-inbox')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/inbox/get-inbox?answered=true')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/inbox/get-inbox?read=true')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/inbox/get-inbox?searchTerm=How')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/inbox/mark-inbox-read?inboxId=123')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['inboxId'] == "123"
response = session.get(MOCK_FULL_BASE_URL + '/inbox/archive-inbox?inboxId=111')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['inboxId'] == "111"
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/get-saved-replies')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/get-saved-replies?searchTerm=old')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/add-saved-reply?question=Test&answer=Testing')
assert response.status_code == 200
assert response.json()['success'] == True and 'replyId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/add-paraphrase-question?replyId=14&question=Test2')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/edit-paraphrase-question?questionId=14&question=Test3')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/edit-canonical-question?replyId=14&question=Test4')
assert response.status_code == 200
assert response.json()['success'] == True and 'replyId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/delete-paraphrase-question?questionId=14')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/add-answer?replyId=14&answer=Testing2')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/edit-answer?answerId=14&answer=Testing3')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/delete-answer?answerId=14')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/saved-replies/delete-saved-reply?replyId=14')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['replyId'] == "14"
response = session.get(MOCK_FULL_BASE_URL + '/annotations/get-annotations')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/annotations/get-annotations?searchTerm=old')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/annotations/get-annotations?pages=1,2,3')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
response = session.get(MOCK_FULL_BASE_URL + '/annotations/get-annotations?pages=5,6')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/annotations/add-annotation?question=Test&answer=Testing'
'&documentId=test&startOffset=0&endOffset=12')
assert response.status_code == 200
assert response.json()['success'] == True and 'annotationId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/add-paraphrase-question?annotationId=testid&question=Test2')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/edit-paraphrase-question?questionId=test&question=Test3')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/edit-canonical-question?annotationId=testid&question=Test4')
assert response.status_code == 200
assert response.json()['success'] == True and 'annotationId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/delete-paraphrase-question?questionId=testid')
assert response.status_code == 200
assert response.json()['success'] == True and 'questionId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/add-answer?annotationId=testid&answer=Testing2')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/edit-answer?answerId=testid&answer=Testing3')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/delete-answer?answerId=testid')
assert response.status_code == 200
assert response.json()['success'] == True and 'answerId' in response.json()['result']
response = session.get(MOCK_FULL_BASE_URL + '/annotations/delete-annotation?annotationId=testid')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['annotationId'] == "testid"
response = session.get(MOCK_FULL_BASE_URL + '/documents/get-documents')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 30
response = session.get(MOCK_FULL_BASE_URL + '/documents/get-documents?documentIds=custom_id_2')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/documents/get-documents?searchTerm=API')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/documents/add-document?title=Test&text=hello&origin=test.txt')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['documentId'] == hashlib.sha256(b'hello').hexdigest()
response = session.get(MOCK_FULL_BASE_URL + '/documents/delete-document?documentId=custom_id_2')
assert response.status_code == 200
response = session.get(MOCK_FULL_BASE_URL + '/user/get-default-threshold')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['threshold'] == 'medium'
response = session.get(MOCK_FULL_BASE_URL + '/user/set-default-threshold?threshold=high')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['threshold'] == 'high'
response = session.get(MOCK_FULL_BASE_URL + '/user/set-default-threshold?threshold=superduperhigh')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_FULL_BASE_URL + '/user/set-forward-email?email=test@bloomsbury.ai')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['forwardEmail'] == 'test@bloomsbury.ai'
response = session.get(MOCK_FULL_BASE_URL + '/user/verify-forward-email?verifiedEmailToken=a0uv98ahe')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['forwardEmail'] == 'sample@mail.com'
response = session.get(MOCK_FULL_BASE_URL + '/user/get-profile')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['username'] == mock_data.username
response = session.get(MOCK_FULL_BASE_URL + '/user/get-admin-token')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['adminToken'] == mock_data.admin_token
response = session.post(MOCK_FULL_BASE_URL + '/saved-replies/get-saved-replies', "{\"savedReplyIds\":[\"d27a7812-c3c3-11e7-8d29-d15d28ee5381\"]}")
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/user/logout')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['message'] == LOGGED_OUT_TEXT
response = session.get(MOCK_FULL_BASE_URL + '/user/get-user-token')
assert response.status_code == 500
response.json()['success'] == False and response.json()['result']['message'] == NOT_LOGGED_TEXT
response = session.get(MOCK_FULL_BASE_URL + '/user/get-user-token?adminToken=test')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['userToken'] == mock_data.user_token
response = session.get(MOCK_FULL_BASE_URL + '/answer?token=blah&question=Is this API easy to use?')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.get(MOCK_FULL_BASE_URL + '/answer?token=blah&question=Is this API easy to use?&numberOfItems=2')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 2
response = session.get(MOCK_FULL_BASE_URL + '/answer?token=blah&question=Is this API easy to use?&documentsOnly=true&numberOfItems=2')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 2
response = session.get(MOCK_FULL_BASE_URL + '/answer?token=blah&question=Is this API easy to use?&numberOfItems=2&offset=1')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 2
response = session.get(MOCK_FULL_BASE_URL + '/answer?token=blah&question=Is this API easy to use?')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 1
response = session.post(MOCK_FULL_BASE_URL + '/user/google-oauth2callback', {'successCallback': UI_URL,
'errorCallback': f'{UI_URL}/error'})
assert response.status_code == 200
response = session.post(MOCK_FULL_BASE_URL + '/user/facebook-oauth2callback', {'successCallback': UI_URL,
'errorCallback': f'{UI_URL}/error'})
assert response.status_code == 200
def test_mock_unlucky():
session = requests.Session()
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/login?login=bla&password=blu')
assert response.status_code == 500
assert response.json()['success'] == False and response.json()['result']['message'] ==INVALID_CREDENTIALS_TEXT
# Perform a valid login against full API for authenticated requests
response = session.get(MOCK_FULL_BASE_URL + '/user/login?login=bla&password=blu')
assert response.status_code == 200
assert response.json()['success'] == True and response.json()['result']['message'] == VALID_CREDENTIALS_TEXT
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/get-user-token')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/get-profile')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/get-admin-token')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/get-default-threshold')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/set-default-threshold?threshold=medium')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/set-forward-email?email=test')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/user/verify-forward-email?verifiedEmailToken=as0da09w')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/inbox/get-inbox')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
response = session.get(MOCK_UNLUCKY_BASE_URL + '/inbox/mark-inbox-read?inboxId=123')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/inbox/archive-inbox?inboxId=223')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/get-saved-replies')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/add-saved-reply?question=Test&answer=Testing')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/add-paraphrase-question?replyId=14&question=Test2')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/edit-paraphrase-question?questionId=14&question=Test3')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/edit-canonical-question?replyId=14&question=Test4')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/delete-paraphrase-question?questionId=14')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/add-answer?replyId=14&answer=Testing2')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/edit-answer?answerId=14&answer=Testing3')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/delete-answer?answerId=14')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/saved-replies/delete-saved-reply?replyId=14')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/get-annotations')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/add-annotation?question=Test&answer=Testing'
'&documentId=test&startOffset=0&endOffset=12')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/add-paraphrase-question?annotationId=testid&question=Test2')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/edit-paraphrase-question?questionId=testid&question=Test3')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/edit-canonical-question?annotationId=testid&question=Test4')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/delete-paraphrase-question?questionId=testid')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/add-answer?annotationId=testid&answer=Testing2')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/edit-answer?answerId=testid&answer=Testing3')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/delete-answer?answerId=testid')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/annotations/delete-annotation?annotationId=testid')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/documents/get-documents')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
response = session.get(MOCK_UNLUCKY_BASE_URL + '/documents/add-document?title=Test&text=hello&origin=test.txt')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/documents/delete-document?documentId=custom_id_2')
assert response.status_code == 500
assert response.json()['success'] == False
response = session.get(MOCK_UNLUCKY_BASE_URL + '/answer?token=blah&question=Is this API easy to use?')
assert response.status_code == 200
assert response.json()['success'] == True and len(response.json()['result']['items']) == 0
def test_mock_error():
session = requests.Session()
response = session.get(MOCK_ERROR_BASE_URL + '/logout')
assert response.status_code == 500
assert response.json()['success'] == False and response.json()['result']['message'] ==ERROR_TEXT
def test_mock_timeout():
session = requests.Session()
with pytest.raises(requests.exceptions.Timeout):
response = session.get(MOCK_TIMEOUT_BASE_URL + '/logout',timeout=0.5)
| 67.052308
| 150
| 0.7115
| 2,799
| 21,792
| 5.385495
| 0.078957
| 0.167175
| 0.122064
| 0.146477
| 0.908717
| 0.90341
| 0.900889
| 0.883641
| 0.867189
| 0.852262
| 0
| 0.023882
| 0.13918
| 21,792
| 324
| 151
| 67.259259
| 0.779679
| 0.028726
| 0
| 0.606061
| 0
| 0.006734
| 0.276076
| 0.186809
| 0
| 0
| 0
| 0
| 0.606061
| 1
| 0.013468
| false
| 0.010101
| 0.020202
| 0
| 0.03367
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85b59d4e57ca47fbe3c50907f39644ec9c3b9a3d
| 6,715
|
py
|
Python
|
tests/s3seek_test.py
|
afaulconbridge/s3seek
|
937ce8b8357586d66362166a2ba365d129c5307b
|
[
"MIT"
] | null | null | null |
tests/s3seek_test.py
|
afaulconbridge/s3seek
|
937ce8b8357586d66362166a2ba365d129c5307b
|
[
"MIT"
] | 1
|
2021-06-02T02:35:26.000Z
|
2021-06-02T02:35:26.000Z
|
tests/s3seek_test.py
|
afaulconbridge/s3seek
|
937ce8b8357586d66362166a2ba365d129c5307b
|
[
"MIT"
] | null | null | null |
import io
import os.path
import pytest
from s3seek import S3File, S3FileBuffered
@pytest.fixture
def s3_obj():
# this is a public file we can use
# not ideal, but simpler than mocking for now
import boto3
resource = boto3.resource("s3")
from botocore.handlers import disable_signing
resource.meta.client.meta.events.register("choose-signer.s3.*", disable_signing)
obj = resource.Object(bucket_name="1000genomes", key="CHANGELOG")
return obj
@pytest.fixture
def s3_local():
# this is a public file we can use
# not ideal, but simpler than mocking for now
pth = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "data", "1000genomes_CHANGELOG"
)
return open(pth, "rb")
class TestS3File:
def test_info(self, s3_obj):
s3file = S3File(s3_obj)
assert s3file.seekable()
assert s3file.readable()
def test_not_writable(self, s3_obj):
s3file = S3File(s3_obj)
assert not s3file.writable()
with pytest.raises(OSError):
assert s3file.write(b"")
with pytest.raises(OSError):
assert s3file.truncate()
def test_read_all(self, s3_obj, s3_local):
content = s3_local.read()
s3file = S3File(s3_obj)
# read the whole thing
assert 0 == s3file.tell()
assert content == s3file.read()
assert len(content) == s3file.size
assert len(content) == s3file.tell()
def test_seek(self, s3_obj, s3_local):
content = s3_local.read()
s3file = S3File(s3_obj)
# read the first X bytes
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
# seek back to the start
s3file.seek(0)
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
s3file.seek(0, io.SEEK_SET)
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
# relative seek forward
s3file.seek(16, io.SEEK_CUR)
assert 32 == s3file.tell()
assert content[32:48] == s3file.read(16)
assert 48 == s3file.tell()
# relative seek backward
s3file.seek(-16, io.SEEK_CUR)
assert 32 == s3file.tell()
assert content[32:48] == s3file.read(16)
assert 48 == s3file.tell()
# relative seek end
s3file.seek(-16, io.SEEK_END)
assert len(content) - 16 == s3file.tell()
assert content[-16:] == s3file.read(16)
assert len(content) == s3file.tell()
# seek before file
with pytest.raises(OSError):
s3file.seek(-len(content) - 1, io.SEEK_CUR)
with pytest.raises(OSError):
s3file.seek(-1, io.SEEK_SET)
with pytest.raises(OSError):
s3file.seek(-len(content) - 1, io.SEEK_END)
# seek after file returns b''
s3file.seek(16, io.SEEK_END)
assert len(content) + 16 == s3file.tell()
assert b"" == s3file.read(16)
# read after end of file doesn't move tell because no bytes returned
assert len(content) + 16 == s3file.tell()
class TestS3FileBuffered:
def test_info(self, s3_obj):
s3file = S3FileBuffered(s3_obj, 64)
assert s3file.seekable()
assert s3file.readable()
def test_not_writable(self, s3_obj):
s3file = S3FileBuffered(s3_obj, 64)
assert not s3file.writable()
with pytest.raises(OSError):
assert s3file.write(b"")
with pytest.raises(OSError):
assert s3file.truncate()
def test_read_all(self, s3_obj, s3_local):
content = s3_local.read()
s3file = S3FileBuffered(s3_obj, 64)
# read the whole thing
assert content == s3file.read()
assert len(content) == s3file.size
def test_seek(self, s3_obj, s3_local):
content = s3_local.read()
s3file = S3FileBuffered(s3_obj, 64)
# first 80 bytes are:
# b'2015-09-04\n\nModification to: misc,bas\n\nDetails can be found in\nchangelog_details'
# 0 16 32 48 64
# read the first X bytes
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
# seek back to the start
s3file.seek(0)
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
s3file.seek(0, io.SEEK_SET)
assert 0 == s3file.tell()
assert content[:16] == s3file.read(16)
assert 16 == s3file.tell()
# relative seek forward
s3file.seek(16, io.SEEK_CUR)
assert 32 == s3file.tell()
assert content[32:48] == s3file.read(16)
assert 48 == s3file.tell()
# relative seek backward
s3file.seek(-16, io.SEEK_CUR)
assert 32 == s3file.tell()
assert content[32:48] == s3file.read(16)
assert 48 == s3file.tell()
# relative seek end
s3file.seek(-16, io.SEEK_END)
assert len(content) - 16 == s3file.tell()
assert content[-16:] == s3file.read(16)
assert len(content) == s3file.tell()
# seek before file
with pytest.raises(OSError):
s3file.seek(-len(content) - 1, io.SEEK_CUR)
with pytest.raises(OSError):
s3file.seek(-1, io.SEEK_SET)
with pytest.raises(OSError):
s3file.seek(-len(content) - 1, io.SEEK_END)
# seek after file returns b''
s3file.seek(16, io.SEEK_END)
assert len(content) + 16 == s3file.tell()
assert b"" == s3file.read(16)
# read after end of file doesn't move tell because no bytes returned
assert len(content) + 16 == s3file.tell()
def test_seek_buffer(self, s3_obj, s3_local):
content = s3_local.read()
s3file = S3FileBuffered(s3_obj, 64)
# first 80 bytes are:
# b'2015-09-04\n\nModification to: misc,bas\n\nDetails can be found in\nchangelog_details'
# 0 16 32 48 64
# read ahead in buffer
s3file.seek(0)
assert content[:16] == s3file.read(16)
assert content[16:32] == s3file.read(16)
s3file.seek(16, io.SEEK_CUR)
assert content[48:64] == s3file.read(16)
# read over buffer end
s3file.seek(0)
assert content[:16] == s3file.read(16)
assert content[16 : 16 + 128] == s3file.read(128)
s3file.seek(128)
assert content[128 : 16 + 128] == s3file.read(16)
| 31.824645
| 98
| 0.58213
| 867
| 6,715
| 4.426759
| 0.148789
| 0.078166
| 0.059406
| 0.065659
| 0.852527
| 0.835852
| 0.831423
| 0.818656
| 0.809015
| 0.768629
| 0
| 0.080879
| 0.302159
| 6,715
| 210
| 99
| 31.97619
| 0.738156
| 0.156217
| 0
| 0.827338
| 0
| 0
| 0.01189
| 0.003727
| 0
| 0
| 0
| 0
| 0.460432
| 1
| 0.079137
| false
| 0
| 0.043165
| 0
| 0.151079
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a43125e6214b66ca486e8b0067784ac8888011b8
| 183
|
py
|
Python
|
Course_Material/battleship/__init__.py
|
Quansight/Practical-RL
|
7c6d8026530d23dcd1266c889480f4ab17f006e4
|
[
"MIT"
] | 3
|
2022-02-14T17:59:56.000Z
|
2022-02-15T10:08:43.000Z
|
Course_Material/battleship/__init__.py
|
Quansight/Practical-RL
|
7c6d8026530d23dcd1266c889480f4ab17f006e4
|
[
"MIT"
] | 21
|
2021-11-02T21:35:26.000Z
|
2022-01-17T18:50:42.000Z
|
Course_Material/battleship/__init__.py
|
Quansight/Practical-RL
|
7c6d8026530d23dcd1266c889480f4ab17f006e4
|
[
"MIT"
] | 2
|
2021-11-24T15:25:17.000Z
|
2022-02-14T19:04:56.000Z
|
from .ship import Ship, Direction
from .board import Board
from .trivial_agent import TrivialAgent
from .less_trivial_agent import LessTrivialAgent
from .rl_agent import RLAgent
| 30.5
| 49
| 0.825137
| 25
| 183
| 5.88
| 0.48
| 0.22449
| 0.244898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142077
| 183
| 5
| 50
| 36.6
| 0.936306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a480a2fb7d620b73581785ccb18fecac6f93fba1
| 1,265
|
py
|
Python
|
src/tests/unit/conftest.py
|
artemlops/case-voucher-selection
|
c4fbd8485bf8db1ccaf4784cd551f91fe5f1d956
|
[
"Apache-2.0"
] | 1
|
2021-07-05T17:51:29.000Z
|
2021-07-05T17:51:29.000Z
|
src/tests/unit/conftest.py
|
artemlops/case-voucher-selection
|
c4fbd8485bf8db1ccaf4784cd551f91fe5f1d956
|
[
"Apache-2.0"
] | null | null | null |
src/tests/unit/conftest.py
|
artemlops/case-voucher-selection
|
c4fbd8485bf8db1ccaf4784cd551f91fe5f1d956
|
[
"Apache-2.0"
] | null | null | null |
import tempfile
from pathlib import Path
import pytest
from voucher_selection.server.db import DBManager, get_db_by_connection
TEST_CSV_DATA = """\
timestamp,country_code,last_order_ts,first_order_ts,total_orders,voucher_amount
2020-05-20 15:43:38.364972+00:00,China,2020-04-19 00:00:00+00:00,2020-04-18 00:00:00+00:00,0,5720
2020-05-20 15:43:47.914346+00:00,Latvia,2020-04-19 00:00:00+00:00,2020-04-13 00:00:00+00:00,1,8800
2020-05-20 15:45:45.018547+00:00,Latvia,2020-04-19 00:00:00+00:00,2019-12-29 00:00:00+00:00,0,3080
2020-05-20 15:24:04.621986+00:00,Peru,2020-04-19 00:00:00+00:00,2017-07-24 00:00:00+00:00,2,2640
2020-05-20 14:32:03.085178+00:00,China,2020-04-19 00:00:00+00:00,2019-07-25 00:00:00+00:00,0,0
2020-05-20 15:02:29.692047+00:00,Peru,2020-04-19 00:00:00+00:00,2019-03-04 00:00:00+00:00,47,2640
2020-05-20 15:36:42.336700+00:00,China,2020-04-19 00:00:00+00:00,2020-01-15 00:00:00+00:00,4,1760
2020-05-20 15:00:51.497972+00:00,Australia,2020-04-19 00:00:00+00:00,2020-01-25 00:00:00+00:00,82,2200
"""
@pytest.fixture
def sql_file() -> Path:
tmp = Path(tempfile.mktemp())
tmp.write_text(TEST_CSV_DATA)
return tmp
@pytest.fixture
def db(postgresql) -> DBManager:
with get_db_by_connection(postgresql) as db:
yield db
| 38.333333
| 102
| 0.732806
| 293
| 1,265
| 3.095563
| 0.31058
| 0.31753
| 0.31753
| 0.282249
| 0.390298
| 0.319735
| 0.267916
| 0.267916
| 0.267916
| 0.254686
| 0
| 0.447028
| 0.082213
| 1,265
| 32
| 103
| 39.53125
| 0.334195
| 0
| 0
| 0.083333
| 0
| 0.333333
| 0.686957
| 0.581818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.166667
| 0
| 0.291667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f1037d9633d2cca3076a3e41fd1438d1eac1d1ab
| 118
|
py
|
Python
|
discord/http.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/http.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
discord/http.py
|
kuzaku-developers/disnake
|
61cc1ad4c2bafd39726a1447c85f7e469e41af10
|
[
"MIT"
] | null | null | null |
from disnake.http import *
from disnake.http import __dict__ as __original_dict__
locals().update(__original_dict__)
| 23.6
| 54
| 0.830508
| 16
| 118
| 5.25
| 0.5625
| 0.261905
| 0.357143
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 4
| 55
| 29.5
| 0.792453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f15053dca668beaa39b7ac855f264287433b5ae6
| 171
|
py
|
Python
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/exceptions.py
|
ned2/dashstrap
|
e8e6fd3cfbe2bdbfd0e611c700ab0474607bdf16
|
[
"MIT"
] | 251
|
2018-01-02T22:58:12.000Z
|
2022-03-29T16:25:50.000Z
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/exceptions.py
|
codeStack82/slapdash
|
e8e6fd3cfbe2bdbfd0e611c700ab0474607bdf16
|
[
"MIT"
] | 26
|
2018-04-12T20:08:24.000Z
|
2020-12-21T10:33:48.000Z
|
{{cookiecutter.project_slug}}/src/{{cookiecutter.project_slug}}/exceptions.py
|
codeStack82/slapdash
|
e8e6fd3cfbe2bdbfd0e611c700ab0474607bdf16
|
[
"MIT"
] | 42
|
2018-01-22T07:19:55.000Z
|
2022-03-04T18:21:18.000Z
|
class {{ cookiecutter.project_slug.title() }}BaseException(Exception):
pass
class InvalidLayoutError({{ cookiecutter.project_slug.title() }}BaseException):
pass
| 24.428571
| 79
| 0.754386
| 16
| 171
| 7.9375
| 0.5625
| 0.299213
| 0.362205
| 0.440945
| 0.645669
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116959
| 171
| 6
| 80
| 28.5
| 0.84106
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
74bd620675103787007db66dcaf1002548ff8d5c
| 11,443
|
py
|
Python
|
cluster_tests/test_api_across_nodes.py
|
kaalam/thetangle
|
4c4877ebc3c6f8cce86f1a43681359c16a51e2c9
|
[
"MIT"
] | 1
|
2021-11-20T12:30:02.000Z
|
2021-11-20T12:30:02.000Z
|
cluster_tests/test_api_across_nodes.py
|
kaalam/thetangle
|
4c4877ebc3c6f8cce86f1a43681359c16a51e2c9
|
[
"MIT"
] | null | null | null |
cluster_tests/test_api_across_nodes.py
|
kaalam/thetangle
|
4c4877ebc3c6f8cce86f1a43681359c16a51e2c9
|
[
"MIT"
] | null | null | null |
import pytest, requests
from http_requests import get, put, delete
def test_AcrossNodesBasic():
d = get('///424x4//lmdb/test_dbi.new')
assert d.status_code == 200
n = put('///424x4//lmdb/test_dbi/twenty', 'What is 5*4?')
assert n.status_code == 201
n = put('///424x4//lmdb/test_dbi/five', 'What is 7 - 2?')
assert n.status_code == 201
a = get('///424x4//lmdb/test_dbi/twenty')
assert a.status_code == 200 and a.text == 'What is 5*4?'
a = get('//lmdb/test_dbi/twenty', remote = True)
assert a.status_code == 200 and a.text == 'What is 5*4?'
a = get('//lmdb/test_dbi/five', remote = True)
assert a.status_code == 200 and a.text == 'What is 7 - 2?'
a = get('///424x4//lmdb/test_dbi/five')
assert a.status_code == 200 and a.text == 'What is 7 - 2?'
d = delete('///424x4//lmdb/test_dbi/twenty')
assert d.status_code == 200
d = delete('///424x4//lmdb/test_dbi/zx81')
assert d.status_code == 404
d = delete('//zz/test_dbi/zx81')
assert d.status_code == 503
d = delete('//file&/tmp/aa/bb/cc;')
assert d.status_code == 404
a = get('//lmdb/test_dbi/twenty')
assert a.status_code == 404
a = get('//lmdb/test_dbi/twenty', remote = True)
assert a.status_code == 404
a = get('///424x4//lmdb/test_dbi/twenty')
assert a.status_code == 404
a = get('///424x4//lmdb/test_dbi/five')
assert a.status_code == 200 and a.text == 'What is 7 - 2?'
d = delete('///424x4//lmdb/test_dbi')
assert d.status_code == 200
a = get('///424x4//lmdb/test_dbi/twenty')
assert a.status_code == 404
a = get('///424x4//lmdb/test_dbi/five')
assert a.status_code == 404
def test_AcrossNodesAllRemoteRight():
q = get('//deque/my_stack.new')
assert q.status_code == 200
q = get('///424x4//deque/my_stack.new')
assert q.status_code == 200
tupl = '("weights" : [[17, 170], [112, 54], [207, 149]], "author" : ["Billy"], "score" : [0.95])'
a = put('//lmdb/www/a_tupl.raw', tupl)
assert a.status_code == 201
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:weights')
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text')
assert a.status_code == 200 and a.text == '[[17, 170], [112, 54], [207, 149]]'
a = get('//lmdb/www/a_tupl:author')
assert a.status_code == 200 and a.text == 'Billy'
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:score')
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text')
assert a.status_code == 200 and a.text == '[9.499999999999999556e-01]'
a = get('///424x4//lmdb/www/a_tupl=//lmdb/www/a_tupl')
assert a.status_code == 200
b = get('//lmdb/www/a_tupl')
assert b.status_code == 200
r = get('//lmdb/www/a_tupl', remote = True)
assert r.status_code == 200 and b.text == r.text
t = get('///424x4//lmdb/www/a_tupl')
assert t.status_code == 200 and b.text == t.text
b = get('//lmdb/www/a_tupl:weights')
assert b.status_code == 200
r = get('//lmdb/www/a_tupl:weights', remote = True)
assert r.status_code == 200 and b.text[0:7] == r.text[0:7] and len(b.content) == len(r.content) # Bytes before .created
r = get('///424x4//lmdb/www/a_tupl:weights')
assert r.status_code == 200 and b.text[0:7] == r.text[0:7] and len(b.content) == len(r.content) # Bytes before .created
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:weights', remote = True)
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text', remote = True)
assert a.status_code == 200 and a.text == '[[17, 170], [112, 54], [207, 149]]'
a = get('//lmdb/www/a_tupl:author', remote = True)
assert a.status_code == 200 and a.text == 'Billy'
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:score', remote = True)
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text', remote = True)
assert a.status_code == 200 and a.text == '[9.499999999999999556e-01]'
a = get('//deque/my_stack/~last=///424x4//lmdb/www/a_tupl:weights')
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text')
assert a.status_code == 200 and a.text == '[[17, 170], [112, 54], [207, 149]]'
a = get('///424x4//lmdb/www/a_tupl:author')
assert a.status_code == 200 and a.text == 'Billy'
a = put('///424x4//lmdb/www/a_tupl.raw', tupl)
assert a.status_code == 201
b = get('//lmdb/www/a_tupl')
assert b.status_code == 200
r = get('//lmdb/www/a_tupl', remote = True)
assert r.status_code == 200 and b.text[0:7] == r.text[0:7] and len(b.content) == len(r.content) # Bytes before .created
t = get('///424x4//lmdb/www/a_tupl')
assert t.status_code == 200 and b.text[0:7] == t.text[0:7] and len(b.content) == len(t.content) # Bytes before .created
b = get('//lmdb/www/a_tupl:weights')
assert b.status_code == 200
r = get('//lmdb/www/a_tupl:weights', remote = True)
assert r.status_code == 200 and b.text[0:7] == r.text[0:7] and len(b.content) == len(r.content) # Bytes before .created
r = get('///424x4//lmdb/www/a_tupl:weights')
assert r.status_code == 200 and b.text[0:7] == r.text[0:7] and len(b.content) == len(r.content) # Bytes before .created
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:weights', remote = True)
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text', remote = True)
assert a.status_code == 200 and a.text == '[[17, 170], [112, 54], [207, 149]]'
a = get('//lmdb/www/a_tupl:author', remote = True)
assert a.status_code == 200 and a.text == 'Billy'
a = get('//deque/my_stack/~last=//lmdb/www/a_tupl:score', remote = True)
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text', remote = True)
assert a.status_code == 200 and a.text == '[9.499999999999999556e-01]'
a = get('//deque/my_stack/~last=///424x4//lmdb/www/a_tupl:weights')
assert a.status_code == 200
a = get('//deque/my_stack/~plast.text')
assert a.status_code == 200 and a.text == '[[17, 170], [112, 54], [207, 149]]'
a = get('///424x4//lmdb/www/a_tupl:author')
assert a.status_code == 200 and a.text == 'Billy'
q = delete('//deque/my_stack')
assert q.status_code == 200
q = delete('///424x4//deque/my_stack')
assert q.status_code == 200
def test_AcrossNodesAllRemoteBoth():
q = get('//deque/stack_01.new')
assert q.status_code == 200
q = get('///424x4//deque/stack_02.new')
assert q.status_code == 200
a = put('///424x4//deque/stack_02/tensor.raw', '[[1,1], [2,2], [3,3]]')
assert a.status_code == 201
a = put('//deque/stack_01/tt.raw', '[[1,4], [2,5], [3,6]]')
assert a.status_code == 201
a = put('//deque/stack_02/filter.raw', '[0,1]', remote = True)
assert a.status_code == 201
a = put('///424x4//deque/stack_02/filter.raw', '[0,1]')
assert a.status_code == 201
a = put('//deque/stack_01/fi_fi.raw', '[1]')
assert a.status_code == 201
a = get('//deque/stack_01/tensor-src=///424x4//deque/stack_02/tensor.text')
assert a.status_code == 200
a = get('//deque/stack_01/slice-1=///424x4//deque/stack_02/tensor[//deque/stack_02/filter]')
assert a.status_code == 200
a = get('//deque/stack_01/slice-2=///424x4//deque/stack_02/tensor[&[0]]')
assert a.status_code == 200
a = get('//deque/stack_01/slice-3=//deque/stack_01/tt[///424x4//deque/stack_02/filter]')
assert a.status_code == 400
a = get('///424x4//deque/stack_02/tensor[//deque/stack_01/fi_fi]')
assert a.status_code == 404
a = get('//deque/stack_02/tensor[//deque/stack_01/fi_fi]', remote = True)
assert a.status_code == 404
a = get('//deque/stack_01/slice-4=///424x4//deque/stack_02/tensor[//deque/stack_01/fi_fi]')
assert a.status_code == 404
tp = get('///424x4//bash/exec/(&echo "2 + 2 = $(expr 2 + 2)")')
assert tp.status_code == 200
a = put('//deque/stack_01/~last', tp.content)
assert a.status_code == 201
a = get('//deque/stack_01/~plast.text')
assert a.status_code == 200
a = get('///424x4//deque/stack_02/result=//bash/exec/(&echo "2 + 2 = $(expr 2 + 2)")')
assert a.status_code == 200
a = get('///424x4//deque/stack_02/result.text')
assert a.status_code == 200 and a.text == '["2 + 2 = 4\\n"]'
a = get('//deque/stack_01/result=///424x4//bash/exec/(&echo "2 + 2 = $(expr 2 + 2)")')
assert a.status_code == 200
a = get('//deque/stack_01/result.text')
assert a.status_code == 200
a = get('///424x4//deque/stack_02/result=///424x4//bash/exec/(&echo "2 + 2 = $(expr 2 + 2)")')
assert a.status_code == 200
a = get('///424x4//deque/stack_02/result.text')
assert a.status_code == 200 and a.text == '["2 + 2 = 4\\n"]'
a = get('//deque/stack_01/slice-1.text')
assert a.status_code == 200 and a.text == '[[1, 1], [2, 2]]'
a = get('//deque/stack_01/slice-2.text')
assert a.status_code == 200 and a.text == '[[1, 1]]'
a = get('//deque/stack_01/tensor-src')
assert a.status_code == 200 and a.text == '[[1, 1], [2, 2], [3, 3]]'
tp = get('//bash/exec/(&echo "2 + 2 = $(expr 2 + 2)")')
assert tp.status_code == 200
a = put('///424x4//deque/stack_02/~first', tp.content)
assert a.status_code == 201
a = get('///424x4//deque/stack_02/~pfirst.text')
assert a.status_code == 200 and a.text == '["2 + 2 = 4\\n"]'
tp = get('///424x4//deque/stack_02/tensor[//deque/stack_02/filter]')
assert tp.status_code == 200
a = put('///424x4//deque/stack_02/~first', tp.content)
assert a.status_code == 201
a = get('///424x4//deque/stack_02/~pfirst.text')
assert a.status_code == 200 and a.text == '[[1, 1], [2, 2]]'
tp = get('///424x4//deque/stack_02/tensor[&[1,2]]')
assert tp.status_code == 200
a = put('///424x4//deque/stack_02/~first', tp.content)
assert a.status_code == 201
a = get('///424x4//deque/stack_02/~pfirst.text')
assert a.status_code == 200 and a.text == '[[2, 2], [3, 3]]'
a = put('///424x4//deque/stack_02/mat_2x2', '[[2, 2], [3, 3]]')
assert a.status_code == 201
tp = get('///424x4//deque/stack_02/mat_2x2.raw')
assert tp.status_code == 200
a = put('///424x4//deque/stack_02/~first', tp.content)
assert a.status_code == 201
a = get('///424x4//deque/stack_02/~pfirst.text')
assert a.status_code == 200 and a.text == '[[2, 2], [3, 3]]'
a = get('//deque/stack_01/~last=///424x4//deque/stack_02/mat_2x2.raw')
assert tp.status_code == 200
a = get('//deque/stack_01/~plast.text')
assert a.status_code == 200 and a.text == '[[2, 2], [3, 3]]'
a = get('//deque/stack_01/~last=///424x4//deque/stack_02/mat_2x2(//bb/ee/kk)')
assert a.status_code == 404
a = get('///424x4//deque/stack_02/mat_2x2(//bb/ee/kk)')
assert a.status_code == 404
a = get('//deque/stack_02/mat_2x2(//bb/ee/kk)', remote = True)
assert a.status_code == 404
a = get('//deque/stack_01/~last=///424x4//deque/stack_02/tensor')
assert tp.status_code == 200
a = get('//deque/stack_01/~plast.text')
assert a.status_code == 200 and a.text == '[[1, 1], [2, 2], [3, 3]]'
a = get('//deque/stack_01/~last=///424x4//http&http://127.0.0.1:5000/test/capital/Spain;')
assert tp.status_code == 200
a = get('//deque/stack_01/~plast.text')
assert a.status_code == 200
a = get('//deque/stack_01/tt-src=//deque/stack_01/tt.text')
assert a.status_code == 200
a = get('//deque/stack_01/tt-src')
assert a.status_code == 200 and a.text == '[[1, 4], [2, 5], [3, 6]]'
a = get('//deque/stack_01/tt-raw=//deque/stack_01/tt-src.raw')
assert a.status_code == 200
a = get('//deque/stack_01/tt-raw.text')
assert a.status_code == 200 and a.text == '[[1, 4], [2, 5], [3, 6]]'
q = delete('//deque/stack_01')
assert q.status_code == 200
q = delete('//deque/stack_02', remote = True)
assert q.status_code == 200
# if __name__ == '__main__':
# test_AcrossNodesBasic()
# test_AcrossNodesAllRemoteRight()
# test_AcrossNodesAllRemoteBoth()
# print('\n\nDone.')
| 32.143258
| 121
| 0.636721
| 1,981
| 11,443
| 3.541141
| 0.055528
| 0.159658
| 0.15196
| 0.184177
| 0.92402
| 0.884818
| 0.859587
| 0.817962
| 0.797719
| 0.771205
| 0
| 0.104819
| 0.140435
| 11,443
| 355
| 122
| 32.233803
| 0.608377
| 0.023595
| 0
| 0.686957
| 0
| 0.03913
| 0.421288
| 0.323269
| 0
| 0
| 0
| 0
| 0.486957
| 1
| 0.013043
| false
| 0
| 0.008696
| 0
| 0.021739
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2d1a303d9ab6547f82ba63c0bc832185634072ea
| 702
|
py
|
Python
|
xlrd/tests/test_inspect.py
|
neoviki/install_python2_package_xlrd_macOS
|
bc64e54eca07031f4438b81e252afdfa70841648
|
[
"BSD-2-Clause"
] | null | null | null |
xlrd/tests/test_inspect.py
|
neoviki/install_python2_package_xlrd_macOS
|
bc64e54eca07031f4438b81e252afdfa70841648
|
[
"BSD-2-Clause"
] | null | null | null |
xlrd/tests/test_inspect.py
|
neoviki/install_python2_package_xlrd_macOS
|
bc64e54eca07031f4438b81e252afdfa70841648
|
[
"BSD-2-Clause"
] | null | null | null |
from xlrd import inspect_format
from .helpers import from_sample
def test_xlsx():
assert inspect_format(from_sample('sample.xlsx')) == 'xlsx'
def test_xlsb():
assert inspect_format(from_sample('sample.xlsb')) == 'xlsb'
def test_ods():
assert inspect_format(from_sample('sample.ods')) == 'ods'
def test_zip():
assert inspect_format(from_sample('sample.zip')) == 'zip'
def test_xls():
assert inspect_format(from_sample('namesdemo.xls')) == 'xls'
def test_content():
with open(from_sample('sample.xlsx'), 'rb') as source:
assert inspect_format(content=source.read()) == 'xlsx'
def test_unknown():
assert inspect_format(from_sample('sample.txt')) is None
| 21.272727
| 64
| 0.69943
| 96
| 702
| 4.875
| 0.28125
| 0.222222
| 0.254274
| 0.294872
| 0.435897
| 0.373932
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150997
| 702
| 32
| 65
| 21.9375
| 0.785235
| 0
| 0
| 0
| 0
| 0
| 0.141026
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 1
| 0.411765
| true
| 0
| 0.117647
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
2d3cd8128ddc63e70d5dfda5418cd7aea0d31744
| 211
|
bzl
|
Python
|
tools/build_defs/apple/flag_defs.bzl
|
CrshOverride/react-native
|
260c5a393fe2708f3d12c722b6d189ec3057743a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
tools/build_defs/apple/flag_defs.bzl
|
CrshOverride/react-native
|
260c5a393fe2708f3d12c722b6d189ec3057743a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
tools/build_defs/apple/flag_defs.bzl
|
CrshOverride/react-native
|
260c5a393fe2708f3d12c722b6d189ec3057743a
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
def get_base_appletvos_flags():
return []
def get_objc_arc_preprocessor_flags():
return []
def get_preprocessor_flags_for_build_mode():
return []
def get_static_library_ios_flags():
return []
| 17.583333
| 44
| 0.739336
| 28
| 211
| 5
| 0.535714
| 0.171429
| 0.257143
| 0.242857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165877
| 211
| 11
| 45
| 19.181818
| 0.795455
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7456f447bebf08471298eace8dd31cf6601e002c
| 3,849
|
py
|
Python
|
student_management_app/forms.py
|
devfajar/student_management_system
|
bb6bfdf14dc96364cf1be801ab17ea6bcf7c2436
|
[
"MIT"
] | 1
|
2021-02-19T12:51:06.000Z
|
2021-02-19T12:51:06.000Z
|
student_management_app/forms.py
|
devfajar/student_management_system
|
bb6bfdf14dc96364cf1be801ab17ea6bcf7c2436
|
[
"MIT"
] | null | null | null |
student_management_app/forms.py
|
devfajar/student_management_system
|
bb6bfdf14dc96364cf1be801ab17ea6bcf7c2436
|
[
"MIT"
] | null | null | null |
from django import forms
from student_management_app.models import Courses, SessionYearModel
class DateInput(forms.DateInput):
input_type = "date"
class AddStudentForm(forms.Form):
email = forms.CharField(label="Email", max_length=50, widget=forms.EmailInput(attrs={"class":"form-control", "autocomplete":"off"}))
password = forms.CharField(label="Password", max_length=50, widget=forms.PasswordInput(attrs={"class":"form-control"}))
first_name = forms.CharField(label="First Name", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
last_name = forms.CharField(label="Last Name", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
username = forms.CharField(label="Username", max_length=50, widget=forms.TextInput(attrs={"class":"form-control", "autocomplete":"off"}))
address = forms.CharField(label="Address", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
course_list = []
courses = Courses.objects.all()
for course in courses:
small_course = (course.id, course.course_name)
course_list.append(small_course)
#course_list = []
# Try To Print Out Session Year
session_list = []
try:
sessions = SessionYearModel.objects.all()
for ses in sessions:
small_ses = (ses.id, str(ses.session_start_year) + " TO " + str(ses.session_end_year))
session_list.append(small_ses)
except:
session_list = []
gender_choice = (
("Male", "Male"),
("Female", "Female")
)
course = forms.ChoiceField(label="Course", choices=course_list, widget=forms.Select(attrs={"class":"form-control"}))
sex = forms.ChoiceField(label="Sex", choices=gender_choice, widget=forms.Select(attrs={"class":"form-control"}))
session_year_id = forms.ChoiceField(label="Session Year", widget=forms.Select(attrs={"class":"form-control"}), choices=session_list)
profile_pic = forms.FileField(label="Profile Pic", max_length=50, widget=forms.FileInput(attrs={"class":"form-control"}))
class EditStudentForm(forms.Form):
email = forms.CharField(label="Email", max_length=50, widget=forms.EmailInput(attrs={"class":"form-control"}))
first_name = forms.CharField(label="First Name", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
last_name = forms.CharField(label="Last Name", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
username = forms.CharField(label="Username", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
address = forms.CharField(label="Address", max_length=50, widget=forms.TextInput(attrs={"class":"form-control"}))
course_list = []
try:
courses = Courses.objects.all()
for course in courses:
small_course = (course.id, course.course_name)
course_list.append(small_course)
except:
course_list = []
session_list = []
try:
sessions = SessionYearModel.objects.all()
for ses in sessions:
small_ses = (ses.id, str(ses.session_start_year) + " TO " + str(ses.session_end_year))
session_list.append(small_ses)
except:
session_list = []
gender_choice = (
("Male", "Male"),
("Female", "Female")
)
course = forms.ChoiceField(label="Course", choices=course_list, widget=forms.Select(attrs={"class":"form-control"}))
sex = forms.ChoiceField(label="Sex", choices=gender_choice, widget=forms.Select(attrs={"class":"form-control"}))
session_year_id = forms.ChoiceField(label="Session Year", widget=forms.Select(attrs={"class":"form-control"}), choices=session_list)
profile_pic = forms.FileField(label="Profile Pic", max_length=50, widget=forms.FileInput(attrs={"class":"form-control"}), required=False)
| 50.644737
| 141
| 0.684593
| 472
| 3,849
| 5.447034
| 0.154661
| 0.081291
| 0.103462
| 0.155193
| 0.893427
| 0.88487
| 0.873201
| 0.873201
| 0.873201
| 0.873201
| 0
| 0.008002
| 0.155885
| 3,849
| 76
| 142
| 50.644737
| 0.783318
| 0.011951
| 0
| 0.758065
| 0
| 0
| 0.145976
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.016129
| 0.032258
| 0
| 0.516129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
77dbde26de729615bfddd42b1691c5c9da050d9f
| 189
|
py
|
Python
|
rationale/models/__init__.py
|
koreyou/rationale-chainer
|
7986daaaf7c5a47729c9422b21bd1b63d079a822
|
[
"CC0-1.0"
] | null | null | null |
rationale/models/__init__.py
|
koreyou/rationale-chainer
|
7986daaaf7c5a47729c9422b21bd1b63d079a822
|
[
"CC0-1.0"
] | null | null | null |
rationale/models/__init__.py
|
koreyou/rationale-chainer
|
7986daaaf7c5a47729c9422b21bd1b63d079a822
|
[
"CC0-1.0"
] | null | null | null |
from rationale.models.encoder import Encoder
from rationale.models.generator import Generator, GeneratorDependent
from rationale.models.rationalized_regressor import RationalizedRegressor
| 37.8
| 73
| 0.888889
| 20
| 189
| 8.35
| 0.5
| 0.233533
| 0.341317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 189
| 4
| 74
| 47.25
| 0.954286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
77fc24f5753cdc1978f078f93e4e374f761d0f4e
| 39,175
|
py
|
Python
|
Python/src/polynomialfiltering/RadarCoordinates.py
|
lintondf/MorrisonPolynomialFiltering
|
f5713f9ed9a24c1382875d8ebdec00100f39e3a5
|
[
"MIT"
] | null | null | null |
Python/src/polynomialfiltering/RadarCoordinates.py
|
lintondf/MorrisonPolynomialFiltering
|
f5713f9ed9a24c1382875d8ebdec00100f39e3a5
|
[
"MIT"
] | null | null | null |
Python/src/polynomialfiltering/RadarCoordinates.py
|
lintondf/MorrisonPolynomialFiltering
|
f5713f9ed9a24c1382875d8ebdec00100f39e3a5
|
[
"MIT"
] | null | null | null |
'''
Created on Feb 9, 2019
@author: NOOK
'''
from abc import ABC
from numpy import array, zeros, diag, transpose
from numpy import array as vector
from math import sqrt, sin, cos, atan2, pi
class RadarCoordinates(ABC):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
def dAERdENU(self, E : vector, N : vector, U : vector) -> array:
D = array([
[N[0]/(E[0]**2 + N[0]**2), -(E[0]/(E[0]**2 + N[0]**2)), 0],
[-((E[0]*U[0])/(sqrt(2 + E[0]**2 + N[0])*(2 + E[0]**2 + N[0] + U[0]**2))),-U[0]/(2.*sqrt(2 + E[0]**2 + N[0])*(2 + E[0]**2 + N[0] + U[0]**2)),sqrt(2 + E[0]**2 + N[0])/(2 + E[0]**2 + N[0] + U[0]**2)],
[E[0]/sqrt(E[0]**2 + N[0]**2 + U[0]**2), N[0]/sqrt(E[0]**2 + N[0]**2 + U[0]**2), U[0]/sqrt(E[0]**2 + N[0]**2 + U[0]**2)]
])
return D
def dENUdAER(self, A : vector, E : vector, R : vector) -> array:
D = array([
[R[0]*cos(A[0])*cos(E[0]), -R[0]*sin(A[0])*sin(E[0]), cos(E[0])*sin(A[0])],
[-R[0]*cos(E[0])*sin(A[0]), -R[0]*cos(A[0])*sin(E[0]),cos(E[0])*cos(A[0])],
[0, R[0]*cos(E[0]), sin(E[0])]
])
return D
def AER2ENU(self, A, E, R) -> array:
ENU = zeros([len(A), 3])
ENU[0, 0] = R[0] * cos(E[0]) * sin(A[0])
ENU[0, 1] = R[0] * cos(E[0]) * cos(A[0])
ENU[0, 2] = R[0] * sin(E[0])
if (len(A) > 1) :
ENU[1, 0] = self.d1EastdAER1(A, E, R)
ENU[1, 1] = self.d1NorthdAER1(A, E, R)
ENU[1, 2] = self.d1UpdAER1(A, E, R)
if (len(A) > 2) :
ENU[2, 0] = self.d2EastdAER2(A, E, R)
ENU[2, 1] = self.d2NorthdAER2(A, E, R)
ENU[2, 2] = self.d2UpdAER2(A, E, R)
if (len(A) > 3) :
ENU[3, 0] = self.d3EastdAER3(A, E, R)
ENU[3, 1] = self.d3NorthdAER3(A, E, R)
ENU[3, 2] = self.d3UpdAER3(A, E, R)
if (len(A) > 4) :
ENU[4, 0] = self.d4EastdAER4(A, E, R)
ENU[4, 1] = self.d4NorthdAER4(A, E, R)
ENU[4, 2] = self.d4UpdAER4(A, E, R)
if (len(A) > 5) :
ENU[5, 0] = self.d5EastdAER5(A, E, R)
ENU[5, 1] = self.d5NorthdAER5(A, E, R)
ENU[5, 2] = self.d5UpdAER5(A, E, R)
return ENU
def ENU2AER(self, E : vector, N : vector, U : vector) -> array:
AER = zeros([len(E), 3])
AER[0, 0] = atan2( E[0], N[0] ) % (2*pi)
AER[0, 1] = atan2( U[0], sqrt(E[0]**2 + N[0]**2) )
AER[0, 2] = sqrt(E[0]**2 + N[0]**2 + U[0]**2)
if (len(E) > 1) :
AER[1, 0] = self.d1AzimuthdENU1(E, N, U)
AER[1, 1] = self.d1ElevationdENU1(E, N, U)
AER[1, 2] = self.d1RangedENU1(E, N, U)
if (len(E) > 2) :
AER[2, 0] = self.d2AzimuthdENU2(E, N, U)
AER[2, 1] = self.d2ElevationdENU2(E, N, U)
AER[2, 2] = self.d2RangedENU2(E, N, U)
if (len(E) > 3) :
AER[3, 0] = self.d3AzimuthdENU3(E, N, U)
AER[3, 1] = self.d3ElevationdENU3(E, N, U)
AER[3, 2] = self.d3RangedENU3(E, N, U)
if (len(E) > 4) :
AER[4, 0] = self.d4AzimuthdENU4(E, N, U)
AER[4, 1] = self.d4ElevationdENU4(E, N, U)
AER[4, 2] = self.d4RangedENU4(E, N, U)
if (len(E) > 5) :
AER[5, 0] = self.d5AzimuthdENU5(E, N, U)
AER[5, 1] = self.d5ElevationdENU5(E, N, U)
AER[5, 2] = self.d5RangedENU5(E, N, U)
return AER
'''
public RealMatrix ENU2AER( RealVector E, RealVector N, RealVector U ) {
RealMatrix AER = new Array2DRowRealMatrix( E.getDimension(), 3 );
AER.setEntry(0, 0, Math.atan2(N.getEntry(0), E.getEntry(0))); // azimuth
AER.setEntry(0, 1, Math.atan2(U.getEntry(0), Math.sqrt(POW(E.getEntry(0),2) + POW(N.getEntry(0),2))));
AER.setEntry(0, 2, Math.sqrt(POW(E.getEntry(0),2) + POW(N.getEntry(0),2) + POW(U.getEntry(0),2)));
if (E.getDimension() > 1) {
AER.setEntry(1, 0, d1AzimuthdENU1(E, N, U));
AER.setEntry(1, 1, d1ElevationdENU1(E, N, U));
AER.setEntry(1, 2, d1RangedENU1(E, N, U));
if (E.getDimension() > 2) {
AER.setEntry(2, 0, d2AzimuthdENU2(E, N, U));
AER.setEntry(2, 1, d2ElevationdENU2(E, N, U));
AER.setEntry(2, 2, d2RangedENU2(E, N, U));
if (E.getDimension() > 3) {
AER.setEntry(3, 0, d3AzimuthdENU3(E, N, U));
AER.setEntry(3, 1, d3ElevationdENU3(E, N, U));
AER.setEntry(3, 2, d3RangedENU3(E, N, U));
if (E.getDimension() > 4) {
AER.setEntry(4, 0, d4AzimuthdENU4(E, N, U));
AER.setEntry(4, 1, d4ElevationdENU4(E, N, U));
AER.setEntry(4, 2, d4RangedENU4(E, N, U));
if (E.getDimension() > 5) {
AER.setEntry(5, 0, d5AzimuthdENU5(E, N, U));
AER.setEntry(5, 1, d5ElevationdENU5(E, N, U));
AER.setEntry(5, 2, d5RangedENU5(E, N, U));
}
}
}
}
}
return AER;
}
'''
def d1AzimuthdENU1(self, E : vector, N : vector, U : vector) -> array:
return (N[0]*E[1]-E[0]*N[1])/(E[0]**2+N[0]**2);
def d2AzimuthdENU2(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-2*(N[0]*E[1]-E[0]*N[1])*(E[0]*E[1]+N[0]*N[1])+(E[0]**2+N[0]**2)*(N[0]*E[2]-E[0]*N[2]))/(E[0]**2+N[0]**2)**2;
def d3AzimuthdENU3(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return ((6*E[0]**2*N[0]-2*N[0]**3)*E[1]**3-6*E[0]*(E[0]**2-3*N[0]**2)*E[1]**2*N[1]+2*(E[0]**3-3*E[0]*N[0]**2)*N[1]**3+3*N[1]*((E[0]**4-N[0]**4)*E[2]+2*E[0]*N[0]*(E[0]**2+N[0]**2)*N[2])+3*E[1]*(2*N[0]*(-3*E[0]**2+N[0]**2)*N[1]**2-2*E[0]*N[0]*(E[0]**2+N[0]**2)*E[2]+(E[0]**4-N[0]**4)*N[2])-(E[0]**2+N[0]**2)**2*(-(N[0]*E[3])+E[0]*N[3]))/(E[0]**2+N[0]**2)**3;
def d4AzimuthdENU4(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (24*E[0]*N[0]*(-E[0]**2+N[0]**2)*E[1]**4+24*(E[0]**4-6*E[0]**2*N[0]**2+N[0]**4)*E[1]**3*N[1]+24*E[0]*N[0]*(-E[0]**2+N[0]**2)*N[1]**4+12*(E[0]**2+N[0]**2)*N[1]**2*((-3*E[0]**2*N[0]+N[0]**3)*E[2]+E[0]*(E[0]**2-3*N[0]**2)*N[2])-12*E[1]**2*(12*E[0]*N[0]*(-E[0]**2+N[0]**2)*N[1]**2+(E[0]**2+N[0]**2)*((-3*E[0]**2*N[0]+N[0]**3)*E[2]+E[0]*(E[0]**2-3*N[0]**2)*N[2]))+4*(E[0]**2+N[0]**2)**2*N[1]*((E[0]-N[0])*(E[0]+N[0])*E[3]+2*E[0]*N[0]*N[3])+4*E[1]*(-6*(E[0]**4-6*E[0]**2*N[0]**2+N[0]**4)*N[1]**3-6*(E[0]**2+N[0]**2)*N[1]*((E[0]**3-3*E[0]*N[0]**2)*E[2]-N[0]*(-3*E[0]**2+N[0]**2)*N[2])+(E[0]**2+N[0]**2)**2*(-2*E[0]*N[0]*E[3]+(E[0]-N[0])*(E[0]+N[0])*N[3]))-(E[0]**2+N[0]**2)**2*(6*E[0]*N[0]*E[2]**2+6*(-E[0]**2+N[0]**2)*E[2]*N[2]-6*E[0]*N[0]*N[2]**2+(E[0]**2+N[0]**2)*(-(N[0]*E[4])+E[0]*N[4])))/(E[0]**2+N[0]**2)**4;
def d5AzimuthdENU5(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (24*(5*E[0]**4*N[0]-10*E[0]**2*N[0]**3+N[0]**5)*E[1]**5-120*E[0]*(E[0]**4-10*E[0]**2*N[0]**2+5*N[0]**4)*E[1]**4*N[1]-24*(E[0]**5-10*E[0]**3*N[0]**2+5*E[0]*N[0]**4)*N[1]**5-60*(E[0]**2+N[0]**2)*N[1]**3*((E[0]**4-6*E[0]**2*N[0]**2+N[0]**4)*E[2]+4*E[0]*(E[0]-N[0])*N[0]*(E[0]+N[0])*N[2])+60*E[1]**3*(-4*(5*E[0]**4*N[0]-10*E[0]**2*N[0]**3+N[0]**5)*N[1]**2+4*E[0]*N[0]*(-E[0]**4+N[0]**4)*E[2]+(E[0]**6-5*E[0]**4*N[0]**2-5*E[0]**2*N[0]**4+N[0]**6)*N[2])+20*(E[0]**2+N[0]**2)**2*N[1]**2*((-3*E[0]**2*N[0]+N[0]**3)*E[3]+E[0]*(E[0]**2-3*N[0]**2)*N[3])-20*E[1]**2*(-12*(E[0]**5-10*E[0]**3*N[0]**2+5*E[0]*N[0]**4)*N[1]**3-9*(E[0]**2+N[0]**2)*N[1]*((E[0]**4-6*E[0]**2*N[0]**2+N[0]**4)*E[2]+4*E[0]*(E[0]-N[0])*N[0]*(E[0]+N[0])*N[2])+(E[0]**2+N[0]**2)**2*((-3*E[0]**2*N[0]+N[0]**3)*E[3]+E[0]*(E[0]**2-3*N[0]**2)*N[3]))+5*(E[0]**2+N[0]**2)**2*N[1]*(-6*(E[0]**3-3*E[0]*N[0]**2)*E[2]**2+12*N[0]*(-3*E[0]**2+N[0]**2)*E[2]*N[2]+6*(E[0]**3-3*E[0]*N[0]**2)*N[2]**2+(E[0]**4-N[0]**4)*E[4]+2*E[0]*N[0]*(E[0]**2+N[0]**2)*N[4])+5*E[1]*(24*(5*E[0]**4*N[0]-10*E[0]**2*N[0]**3+N[0]**5)*N[1]**4-36*(E[0]**2+N[0]**2)*N[1]**2*(4*E[0]*N[0]*(-E[0]**2+N[0]**2)*E[2]+(E[0]**4-6*E[0]**2*N[0]**2+N[0]**4)*N[2])-8*(E[0]**2+N[0]**2)**2*N[1]*((E[0]**3-3*E[0]*N[0]**2)*E[3]-N[0]*(-3*E[0]**2+N[0]**2)*N[3])+(E[0]**2+N[0]**2)**2*(-6*N[0]*(-3*E[0]**2+N[0]**2)*E[2]**2-12*E[0]*(E[0]**2-3*N[0]**2)*E[2]*N[2]+6*N[0]*(-3*E[0]**2+N[0]**2)*N[2]**2-2*E[0]*N[0]*(E[0]**2+N[0]**2)*E[4]+(E[0]**4-N[0]**4)*N[4]))-(E[0]**2+N[0]**2)**3*(-10*N[2]*((E[0]-N[0])*(E[0]+N[0])*E[3]+2*E[0]*N[0]*N[3])+10*E[2]*(2*E[0]*N[0]*E[3]+(-E[0]**2+N[0]**2)*N[3])+(E[0]**2+N[0]**2)*(-(N[0]*E[5])+E[0]*N[5])))/(E[0]**2+N[0]**2)**5;
def d1ElevationdENU1(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])/(sqrt(S)*(S+U[0]**2));
def d2ElevationdENU2(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-2*(S+U[0]**2)*(E[0]*E[1]+N[0]*N[1])*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])-4*S*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])+2*S*(S+U[0]**2)*(-(U[0]*E[1]**2)-U[0]*N[1]**2+E[0]*E[1]*U[1]+N[0]*N[1]*U[1]-U[0]*(E[0]*E[2]+N[0]*N[2])+S*U[2]))/(2.*S**1.5*(S+U[0]**2)**2);
def d3ElevationdENU3(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-6*(S+U[0]**2)*(E[0]*E[1]+N[0]*N[1])*(-2*(S+U[0]**2)*(E[0]*E[1]+N[0]*N[1])*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])-4*S*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])+2*S*(S+U[0]**2)*(-(U[0]*E[1]**2)-U[0]*N[1]**2+E[0]*E[1]*U[1]+N[0]*N[1]*U[1]-U[0]*(E[0]*E[2]+N[0]*N[2])+S*U[2]))-8*S*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-2*(S+U[0]**2)*(E[0]*E[1]+N[0]*N[1])*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])-4*S*(-(U[0]*(E[0]*E[1]+N[0]*N[1]))+S*U[1])*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])+2*S*(S+U[0]**2)*(-(U[0]*E[1]**2)-U[0]*N[1]**2+E[0]*E[1]*U[1]+N[0]*N[1]*U[1]-U[0]*(E[0]*E[2]+N[0]*N[2])+S*U[2]))+4*S*(S+U[0]**2)*(2*E[0]*(4*E[0]**2+N[0]**2)*U[0]*E[1]**3+2*N[0]*(E[0]**2+4*N[0]**2)*U[0]*N[1]**3-(3*E[0]**4+8*N[0]**4-6*N[0]**2*U[0]**2+E[0]**2*(11*N[0]**2+U[0]**2))*N[1]**2*U[1]-E[1]**2*(-2*N[0]*(10*E[0]**2+N[0]**2)*U[0]*N[1]+(8*E[0]**4+E[0]**2*(11*N[0]**2-6*U[0]**2)+N[0]**2*(3*N[0]**2+U[0]**2))*U[1])+S*N[1]*(-4*N[0]*U[0]*U[1]**2+2*E[0]*N[0]*U[0]*E[2]-U[0]*(3*E[0]**2+N[0]**2+3*U[0]**2)*N[2]+2*N[0]*(2*S+3*U[0]**2)*U[2])+E[1]*(2*E[0]*(E[0]**2+10*N[0]**2)*U[0]*N[1]**2-2*E[0]*N[0]*(5*S-7*U[0]**2)*N[1]*U[1]+S*(-4*E[0]*U[0]*U[1]**2-U[0]*(E[0]**2+3*(N[0]**2+U[0]**2))*E[2]+2*E[0]*N[0]*U[0]*N[2]+2*E[0]*(2*S+3*U[0]**2)*U[2]))+S*(-2*S*U[1]**3+U[1]*(-((3*S+U[0]**2)*(E[0]*E[2]+N[0]*N[2]))-2*S*U[0]*U[2])+(S+U[0]**2)*(-(U[0]*(E[0]*E[3]+N[0]*N[3]))+S*U[3]))))/(4.*S**2.5*(S+U[0]**2)**3);
def d4ElevationdENU4(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-3*U[0]*(-(S**3*(8*E[0]**4-24*E[0]**2*N[0]**2+3*N[0]**4))+S**2*(8*E[0]**4+36*E[0]**2*N[0]**2-7*N[0]**4)*U[0]**2+S*N[0]**2*(16*E[0]**2-5*N[0]**2)*U[0]**4-N[0]**2*(-4*E[0]**2+N[0]**2)*U[0]**6)*E[1]**4+3*U[0]*(S**3*(3*E[0]**4-24*E[0]**2*N[0]**2+8*N[0]**4)+S**2*(7*E[0]**4-36*E[0]**2*N[0]**2-8*N[0]**4)*U[0]**2+S*E[0]**2*(5*E[0]**2-16*N[0]**2)*U[0]**4+E[0]**2*(E[0]**2-4*N[0]**2)*U[0]**6)*N[1]**4-12*S*N[0]*(-(S**3*(3*E[0]**2-2*N[0]**2))+3*S**2*(E[0]**2-4*N[0]**2)*U[0]**2+S*(7*E[0]**2+2*N[0]**2)*U[0]**4+E[0]**2*U[0]**6)*N[1]**3*U[1]-12*E[0]*E[1]**3*(N[0]*U[0]*(-5*S**3*(4*E[0]**2-3*N[0]**2)-5*S**2*(2*E[0]**2-5*N[0]**2)*U[0]**2+(-8*E[0]**4+5*E[0]**2*N[0]**2+13*N[0]**4)*U[0]**4+(-2*E[0]**2+3*N[0]**2)*U[0]**6)*N[1]+S*(S**3*(2*E[0]**2-3*N[0]**2)-3*S**2*(4*E[0]**2-N[0]**2)*U[0]**2+S*(2*E[0]**2+7*N[0]**2)*U[0]**4+N[0]**2*U[0]**6)*U[1])-6*S*N[1]**2*(-2*S*U[0]*(3*S**2*(E[0]**2-4*N[0]**2)+2*S*(E[0]**2+6*N[0]**2)*U[0]**2-E[0]**2*U[0]**4)*U[1]**2+(S+U[0]**2)*(E[0]*U[0]*(-3*S**2*(E[0]**2-4*N[0]**2)+2*(-2*E[0]**4+E[0]**2*N[0]**2+3*N[0]**4)*U[0]**2-(E[0]**2-2*N[0]**2)*U[0]**4)*E[2]-N[0]*U[0]*(3*S**2*(3*E[0]**2-2*N[0]**2)+2*S*(6*E[0]**2+N[0]**2)*U[0]**2+3*E[0]**2*U[0]**4)*N[2]+S*(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*U[2]))+6*E[1]**2*(U[0]*(-3*S**3*(4*E[0]**4-27*E[0]**2*N[0]**2+4*N[0]**4)-3*S**2*(6*E[0]**4-23*E[0]**2*N[0]**2+6*N[0]**4)*U[0]**2-S*(8*E[0]**4-47*E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4+(-2*E[0]**4+11*E[0]**2*N[0]**2-2*N[0]**4)*U[0]**6)*N[1]**2+2*S*N[0]*(-3*S**3*(4*E[0]**2-N[0]**2)+3*S**2*(14*E[0]**2-N[0]**2)*U[0]**2+(8*E[0]**4+E[0]**2*N[0]**2-7*N[0]**4)*U[0]**4+(2*E[0]**2-N[0]**2)*U[0]**6)*N[1]*U[1]+S*(-2*S*U[0]*(3*S**2*(4*E[0]**2-N[0]**2)-2*S*(6*E[0]**2+N[0]**2)*U[0]**2+N[0]**2*U[0]**4)*U[1]**2+(S+U[0]**2)*(E[0]*U[0]*(-3*S**2*(2*E[0]**2-3*N[0]**2)+2*S*(E[0]**2+6*N[0]**2)*U[0]**2+3*N[0]**2*U[0]**4)*E[2]+N[0]*U[0]*(-3*S**2*(4*E[0]**2-N[0]**2)-2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(-2*E[0]**2+N[0]**2)*U[0]**4)*N[2]+S*(2*E[0]**6+3*E[0]**4*N[0]**2-N[0]**6-6*S*E[0]**2*U[0]**2+N[0]**2*U[0]**4)*U[2])))-4*S**2*N[1]*(-6*S*N[0]*(S**2-6*S*U[0]**2+U[0]**4)*U[1]**3+3*(S+U[0]**2)*U[1]*(E[0]*N[0]*(-3*S**2+6*S*U[0]**2+U[0]**4)*E[2]+(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*N[2]-2*S*N[0]*U[0]*(3*S-U[0]**2)*U[2])+(S+U[0]**2)**2*(-(E[0]*N[0]*U[0]*(3*S+U[0]**2)*E[3])+U[0]*(E[0]**4-2*N[0]**4+E[0]**2*(-N[0]**2+U[0]**2))*N[3]+S*N[0]*(S-U[0]**2)*U[3]))-4*E[1]*(3*E[0]*N[0]*U[0]*(5*S**3*(3*E[0]**2-4*N[0]**2)+5*S**2*(5*E[0]**2-2*N[0]**2)*U[0]**2+S*(13*E[0]**2-8*N[0]**2)*U[0]**4+(3*E[0]**2-2*N[0]**2)*U[0]**6)*N[1]**3-3*S*E[0]*(3*S**3*(E[0]**2-4*N[0]**2)-3*S**2*(E[0]**2-14*N[0]**2)*U[0]**2+(-7*E[0]**4+E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4-(E[0]**2-2*N[0]**2)*U[0]**6)*N[1]**2*U[1]-3*S*N[1]*(-2*S*E[0]*N[0]*U[0]*(15*S**2-10*S*U[0]**2-U[0]**4)*U[1]**2+(S+U[0]**2)*(N[0]*U[0]*(-3*S**2*(4*E[0]**2-N[0]**2)-2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(-2*E[0]**2+N[0]**2)*U[0]**4)*E[2]+E[0]*U[0]*(3*S**2*(E[0]**2-4*N[0]**2)+2*S*(2*E[0]**2-3*N[0]**2)*U[0]**2+(E[0]**2-2*N[0]**2)*U[0]**4)*N[2]+S*E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*U[2]))+S**2*(-6*S*E[0]*(S**2-6*S*U[0]**2+U[0]**4)*U[1]**3-3*(S+U[0]**2)*U[1]*((2*E[0]**6+3*E[0]**4*N[0]**2-N[0]**6-6*S*E[0]**2*U[0]**2+N[0]**2*U[0]**4)*E[2]+E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*N[2]+2*S*E[0]*U[0]*(3*S-U[0]**2)*U[2])+(S+U[0]**2)**2*(U[0]*(-2*E[0]**4-E[0]**2*N[0]**2+N[0]**4+N[0]**2*U[0]**2)*E[3]-E[0]*N[0]*U[0]*(3*S+U[0]**2)*N[3]+S*E[0]*(S-U[0]**2)*U[3])))+S**2*(24*S**2*U[0]*(S-U[0]**2)*U[1]**4+12*S*(S+U[0]**2)*U[1]**2*(U[0]*(3*S-U[0]**2)*(E[0]*E[2]+N[0]*N[2])-S*(S-3*U[0]**2)*U[2])+4*S*(S+U[0]**2)**2*U[1]*(-((S-U[0]**2)*(E[0]*E[3]+N[0]*N[3]))-2*S*U[0]*U[3])+(S+U[0]**2)**2*(-3*U[0]*(-2*E[0]**4-E[0]**2*N[0]**2+N[0]**4+N[0]**2*U[0]**2)*E[2]**2-3*U[0]*(E[0]**4-2*N[0]**4+E[0]**2*(-N[0]**2+U[0]**2))*N[2]**2-6*S*N[0]*(S-U[0]**2)*N[2]*U[2]+6*E[0]*E[2]*(N[0]*U[0]*(3*S+U[0]**2)*N[2]-S*(S-U[0]**2)*U[2])+S*(-6*S*U[0]*U[2]**2+(S+U[0]**2)*(-(U[0]*(E[0]*E[4]+N[0]*N[4]))+S*U[4])))))/(S**3.5*(S+U[0]**2)**4);
def d5ElevationdENU5(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-3*E[0]*U[0]*(5*S**4*(8*E[0]**4-40*E[0]**2*N[0]**2+15*N[0]**4)-20*S**3*(4*E[0]**4+15*E[0]**2*N[0]**2-10*N[0]**4)*U[0]**2+2*S**2*(4*E[0]**4-90*E[0]**2*N[0]**2+95*N[0]**4)*U[0]**4+20*S*N[0]**2*(-5*E[0]**2+4*N[0]**2)*U[0]**6+5*N[0]**2*(-4*E[0]**2+3*N[0]**2)*U[0]**8)*E[1]**5-3*N[0]*U[0]*(5*S**4*(15*E[0]**4-40*E[0]**2*N[0]**2+8*N[0]**4)+20*S**3*(10*E[0]**4-15*E[0]**2*N[0]**2-4*N[0]**4)*U[0]**2+2*S**2*(95*E[0]**4-90*E[0]**2*N[0]**2+4*N[0]**4)*U[0]**4+20*S*E[0]**2*(4*E[0]**2-5*N[0]**2)*U[0]**6+5*E[0]**2*(3*E[0]**2-4*N[0]**2)*U[0]**8)*N[1]**5+15*S*(S**4*(3*E[0]**4-24*E[0]**2*N[0]**2+8*N[0]**4)+20*S**3*N[0]**2*(3*E[0]**2-4*N[0]**2)*U[0]**2-10*S**2*(E[0]**4-10*E[0]**2*N[0]**2-4*N[0]**4)*U[0]**4+4*E[0]**2*(-2*E[0]**4+3*E[0]**2*N[0]**2+5*N[0]**4)*U[0]**6-E[0]**2*(E[0]**2-4*N[0]**2)*U[0]**8)*N[1]**4*U[1]+15*E[1]**4*(N[0]*U[0]*(-15*S**4*(8*E[0]**4-12*E[0]**2*N[0]**2+N[0]**4)-20*S**3*(2*E[0]**4-17*E[0]**2*N[0]**2+2*N[0]**4)*U[0]**2-2*S**2*(40*E[0]**4-130*E[0]**2*N[0]**2+19*N[0]**4)*U[0]**4-4*S*(10*E[0]**4-31*E[0]**2*N[0]**2+4*N[0]**4)*U[0]**6+(-8*E[0]**4+24*E[0]**2*N[0]**2-3*N[0]**4)*U[0]**8)*N[1]+S*(S**4*(8*E[0]**4-24*E[0]**2*N[0]**2+3*N[0]**4)-20*S**3*E[0]**2*(4*E[0]**2-3*N[0]**2)*U[0]**2+10*S**2*(4*E[0]**4+10*E[0]**2*N[0]**2-N[0]**4)*U[0]**4+4*S*N[0]**2*(5*E[0]**2-2*N[0]**2)*U[0]**6-N[0]**2*(-4*E[0]**2+N[0]**2)*U[0]**8)*U[1])+30*S*N[1]**3*(2*S*N[0]*U[0]*(-5*S**3*(3*E[0]**2-4*N[0]**2)-5*S**2*(E[0]**2+8*N[0]**2)*U[0]**2+S*(11*E[0]**2+4*N[0]**2)*U[0]**4+E[0]**2*U[0]**6)*U[1]**2+(S+U[0]**2)*(-(E[0]*N[0]*U[0]*(5*S**3*(3*E[0]**2-4*N[0]**2)+5*S**2*(5*E[0]**2-2*N[0]**2)*U[0]**2+S*(13*E[0]**2-8*N[0]**2)*U[0]**4+(3*E[0]**2-2*N[0]**2)*U[0]**6)*E[2])+U[0]*(S**3*(3*E[0]**4-24*E[0]**2*N[0]**2+8*N[0]**4)+S**2*(7*E[0]**4-36*E[0]**2*N[0]**2-8*N[0]**4)*U[0]**2+S*E[0]**2*(5*E[0]**2-16*N[0]**2)*U[0]**4+E[0]**2*(E[0]**2-4*N[0]**2)*U[0]**6)*N[2]-S*N[0]*(-(S**3*(3*E[0]**2-2*N[0]**2))+3*S**2*(E[0]**2-4*N[0]**2)*U[0]**2+S*(7*E[0]**2+2*N[0]**2)*U[0]**4+E[0]**2*U[0]**6)*U[2]))-30*E[1]**3*(E[0]*U[0]*(-5*S**4*(4*E[0]**4-41*E[0]**2*N[0]**2+18*N[0]**4)-10*S**3*(3*E[0]**4-22*E[0]**2*N[0]**2+17*N[0]**4)*U[0]**2-2*S**2*(9*E[0]**4-115*E[0]**2*N[0]**2+65*N[0]**4)*U[0]**4-2*S*(5*E[0]**4-54*E[0]**2*N[0]**2+31*N[0]**4)*U[0]**6+(-2*E[0]**4+21*E[0]**2*N[0]**2-12*N[0]**4)*U[0]**8)*N[1]**2-2*S*E[0]*N[0]*(5*S**4*(4*E[0]**2-3*N[0]**2)-10*S**3*(11*E[0]**2-3*N[0]**2)*U[0]**2-10*S**2*(E[0]**2-6*N[0]**2)*U[0]**4-2*S*(5*E[0]**2-9*N[0]**2)*U[0]**6+(-2*E[0]**2+3*N[0]**2)*U[0]**8)*N[1]*U[1]+S*(-2*S*E[0]*U[0]*(5*S**3*(4*E[0]**2-3*N[0]**2)-5*S**2*(8*E[0]**2+N[0]**2)*U[0]**2+S*(4*E[0]**2+11*N[0]**2)*U[0]**4+N[0]**2*U[0]**6)*U[1]**2+(S+U[0]**2)*(U[0]*(-(S**3*(8*E[0]**4-24*E[0]**2*N[0]**2+3*N[0]**4))+S**2*(8*E[0]**4+36*E[0]**2*N[0]**2-7*N[0]**4)*U[0]**2+S*N[0]**2*(16*E[0]**2-5*N[0]**2)*U[0]**4-N[0]**2*(-4*E[0]**2+N[0]**2)*U[0]**6)*E[2]+E[0]*N[0]*U[0]*(-5*S**3*(4*E[0]**2-3*N[0]**2)-5*S**2*(2*E[0]**2-5*N[0]**2)*U[0]**2+(-8*E[0]**4+5*E[0]**2*N[0]**2+13*N[0]**4)*U[0]**4+(-2*E[0]**2+3*N[0]**2)*U[0]**6)*N[2]+S*E[0]*(S**3*(2*E[0]**2-3*N[0]**2)-3*S**2*(4*E[0]**2-N[0]**2)*U[0]**2+S*(2*E[0]**2+7*N[0]**2)*U[0]**4+N[0]**2*U[0]**6)*U[2])))-10*S**2*N[1]**2*(-6*S*(S**3*(E[0]**2-4*N[0]**2)-5*S**2*(E[0]**2-8*N[0]**2)*U[0]**2-5*S*(E[0]**2+4*N[0]**2)*U[0]**4+E[0]**2*U[0]**6)*U[1]**3-3*(S+U[0]**2)*U[1]*(E[0]*(3*S**3*(E[0]**2-4*N[0]**2)-3*S**2*(E[0]**2-14*N[0]**2)*U[0]**2+(-7*E[0]**4+E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4-(E[0]**2-2*N[0]**2)*U[0]**6)*E[2]-3*N[0]*(-(S**3*(3*E[0]**2-2*N[0]**2))+3*S**2*(E[0]**2-4*N[0]**2)*U[0]**2+S*(7*E[0]**2+2*N[0]**2)*U[0]**4+E[0]**2*U[0]**6)*N[2]+2*S*U[0]*(3*S**2*(E[0]**2-4*N[0]**2)+2*S*(E[0]**2+6*N[0]**2)*U[0]**2-E[0]**2*U[0]**4)*U[2])+(S+U[0]**2)**2*(E[0]*U[0]*(-3*S**2*(E[0]**2-4*N[0]**2)+2*(-2*E[0]**4+E[0]**2*N[0]**2+3*N[0]**4)*U[0]**2-(E[0]**2-2*N[0]**2)*U[0]**4)*E[3]-N[0]*U[0]*(3*S**2*(3*E[0]**2-2*N[0]**2)+2*S*(6*E[0]**2+N[0]**2)*U[0]**2+3*E[0]**2*U[0]**4)*N[3]+S*(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*U[3]))+10*E[1]**2*(3*N[0]*U[0]*(5*S**4*(18*E[0]**4-41*E[0]**2*N[0]**2+4*N[0]**4)+10*S**3*(17*E[0]**4-22*E[0]**2*N[0]**2+3*N[0]**4)*U[0]**2+2*S**2*(65*E[0]**4-115*E[0]**2*N[0]**2+9*N[0]**4)*U[0]**4+2*S*(31*E[0]**4-54*E[0]**2*N[0]**2+5*N[0]**4)*U[0]**6+(12*E[0]**4-21*E[0]**2*N[0]**2+2*N[0]**4)*U[0]**8)*N[1]**3-3*S*(3*S**4*(4*E[0]**4-27*E[0]**2*N[0]**2+4*N[0]**4)-30*S**3*(E[0]**4-12*E[0]**2*N[0]**2+N[0]**4)*U[0]**2-10*S**2*(5*E[0]**4-11*E[0]**2*N[0]**2+5*N[0]**4)*U[0]**4-2*S*(5*E[0]**4-32*E[0]**2*N[0]**2+5*N[0]**4)*U[0]**6+(-2*E[0]**4+11*E[0]**2*N[0]**2-2*N[0]**4)*U[0]**8)*N[1]**2*U[1]-3*S*N[1]*(-2*S*N[0]*U[0]*(15*S**3*(6*E[0]**2-N[0]**2)-5*S**2*(22*E[0]**2+N[0]**2)*U[0]**2+(-10*E[0]**4+E[0]**2*N[0]**2+11*N[0]**4)*U[0]**4+(-2*E[0]**2+N[0]**2)*U[0]**6)*U[1]**2+(S+U[0]**2)*(3*E[0]*N[0]*U[0]*(-5*S**3*(4*E[0]**2-3*N[0]**2)-5*S**2*(2*E[0]**2-5*N[0]**2)*U[0]**2+(-8*E[0]**4+5*E[0]**2*N[0]**2+13*N[0]**4)*U[0]**4+(-2*E[0]**2+3*N[0]**2)*U[0]**6)*E[2]+U[0]*(3*S**3*(4*E[0]**4-27*E[0]**2*N[0]**2+4*N[0]**4)+3*S**2*(6*E[0]**4-23*E[0]**2*N[0]**2+6*N[0]**4)*U[0]**2+S*(8*E[0]**4-47*E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4+(2*E[0]**4-11*E[0]**2*N[0]**2+2*N[0]**4)*U[0]**6)*N[2]+S*N[0]*(3*S**3*(4*E[0]**2-N[0]**2)-3*S**2*(14*E[0]**2-N[0]**2)*U[0]**2-(8*E[0]**4+E[0]**2*N[0]**2-7*N[0]**4)*U[0]**4+(-2*E[0]**2+N[0]**2)*U[0]**6)*U[2]))+S**2*(-6*S*(S**3*(4*E[0]**2-N[0]**2)-5*S**2*(8*E[0]**2-N[0]**2)*U[0]**2+5*S*(4*E[0]**2+N[0]**2)*U[0]**4-N[0]**2*U[0]**6)*U[1]**3-3*(S+U[0]**2)*U[1]*(3*E[0]*(S**3*(2*E[0]**2-3*N[0]**2)-3*S**2*(4*E[0]**2-N[0]**2)*U[0]**2+S*(2*E[0]**2+7*N[0]**2)*U[0]**4+N[0]**2*U[0]**6)*E[2]+N[0]*(3*S**3*(4*E[0]**2-N[0]**2)-3*S**2*(14*E[0]**2-N[0]**2)*U[0]**2-(8*E[0]**4+E[0]**2*N[0]**2-7*N[0]**4)*U[0]**4+(-2*E[0]**2+N[0]**2)*U[0]**6)*N[2]+2*S*U[0]*(3*S**2*(4*E[0]**2-N[0]**2)-2*S*(6*E[0]**2+N[0]**2)*U[0]**2+N[0]**2*U[0]**4)*U[2])+(S+U[0]**2)**2*(E[0]*U[0]*(-3*S**2*(2*E[0]**2-3*N[0]**2)+2*S*(E[0]**2+6*N[0]**2)*U[0]**2+3*N[0]**2*U[0]**4)*E[3]+N[0]*U[0]*(-3*S**2*(4*E[0]**2-N[0]**2)-2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(-2*E[0]**2+N[0]**2)*U[0]**4)*N[3]+S*(2*E[0]**6+3*E[0]**4*N[0]**2-N[0]**6-6*S*E[0]**2*U[0]**2+N[0]**2*U[0]**4)*U[3])))-5*S**2*N[1]*(24*S**2*N[0]*U[0]*(5*S**2-10*S*U[0]**2+U[0]**4)*U[1]**4-12*S*(S+U[0]**2)*U[1]**2*(E[0]*N[0]*U[0]*(-15*S**2+10*S*U[0]**2+U[0]**4)*E[2]+U[0]*(3*S**2*(E[0]**2-4*N[0]**2)+2*S*(E[0]**2+6*N[0]**2)*U[0]**2-E[0]**2*U[0]**4)*N[2]+3*S*N[0]*(S**2-6*S*U[0]**2+U[0]**4)*U[2])+4*S*(S+U[0]**2)**2*U[1]*(E[0]*N[0]*(-3*S**2+6*S*U[0]**2+U[0]**4)*E[3]+(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*N[3]-2*S*N[0]*U[0]*(3*S-U[0]**2)*U[3])+(S+U[0]**2)**2*(3*N[0]*U[0]*(3*S**2*(4*E[0]**2-N[0]**2)+2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(2*E[0]**2-N[0]**2)*U[0]**4)*E[2]**2-3*N[0]*U[0]*(3*S**2*(3*E[0]**2-2*N[0]**2)+2*S*(6*E[0]**2+N[0]**2)*U[0]**2+3*E[0]**2*U[0]**4)*N[2]**2+6*S*(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*N[2]*U[2]+6*E[2]*(E[0]*U[0]*(-3*S**2*(E[0]**2-4*N[0]**2)+2*(-2*E[0]**4+E[0]**2*N[0]**2+3*N[0]**4)*U[0]**2-(E[0]**2-2*N[0]**2)*U[0]**4)*N[2]-S*E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*U[2])+S*(-6*S*N[0]*U[0]*(3*S-U[0]**2)*U[2]**2+U[0]*(S+U[0]**2)*(-(E[0]*N[0]*(3*S+U[0]**2)*E[4])+(E[0]**4-2*N[0]**4+E[0]**2*(-N[0]**2+U[0]**2))*N[4])+S*N[0]*(S**2-U[0]**4)*U[4])))-5*E[1]*(3*E[0]*U[0]*(15*S**4*(E[0]**4-12*E[0]**2*N[0]**2+8*N[0]**4)+20*S**3*(2*E[0]**4-17*E[0]**2*N[0]**2+2*N[0]**4)*U[0]**2+2*S**2*(19*E[0]**4-130*E[0]**2*N[0]**2+40*N[0]**4)*U[0]**4+4*S*(4*E[0]**4-31*E[0]**2*N[0]**2+10*N[0]**4)*U[0]**6+(3*E[0]**4-24*E[0]**2*N[0]**2+8*N[0]**4)*U[0]**8)*N[1]**4+12*S*E[0]*N[0]*(5*S**4*(3*E[0]**2-4*N[0]**2)-10*S**3*(3*E[0]**2-11*N[0]**2)*U[0]**2-10*S**2*(6*E[0]**2-N[0]**2)*U[0]**4-2*S*(9*E[0]**2-5*N[0]**2)*U[0]**6+(-3*E[0]**2+2*N[0]**2)*U[0]**8)*N[1]**3*U[1]-6*S*N[1]**2*(-2*S*E[0]*U[0]*(15*S**3*(E[0]**2-6*N[0]**2)+5*S**2*(E[0]**2+22*N[0]**2)*U[0]**2-(11*E[0]**4+E[0]**2*N[0]**2-10*N[0]**4)*U[0]**4-(E[0]**2-2*N[0]**2)*U[0]**6)*U[1]**2+(S+U[0]**2)*(U[0]*(-3*S**3*(4*E[0]**4-27*E[0]**2*N[0]**2+4*N[0]**4)-3*S**2*(6*E[0]**4-23*E[0]**2*N[0]**2+6*N[0]**4)*U[0]**2-S*(8*E[0]**4-47*E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4+(-2*E[0]**4+11*E[0]**2*N[0]**2-2*N[0]**4)*U[0]**6)*E[2]-3*E[0]*N[0]*U[0]*(5*S**3*(3*E[0]**2-4*N[0]**2)+5*S**2*(5*E[0]**2-2*N[0]**2)*U[0]**2+S*(13*E[0]**2-8*N[0]**2)*U[0]**4+(3*E[0]**2-2*N[0]**2)*U[0]**6)*N[2]+S*E[0]*(3*S**3*(E[0]**2-4*N[0]**2)-3*S**2*(E[0]**2-14*N[0]**2)*U[0]**2+(-7*E[0]**4+E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4-(E[0]**2-2*N[0]**2)*U[0]**6)*U[2]))-4*S**2*N[1]*(-6*S*E[0]*N[0]*(5*S**3-45*S**2*U[0]**2+15*S*U[0]**4+U[0]**6)*U[1]**3+3*(S+U[0]**2)*U[1]*(N[0]*(-3*S**3*(4*E[0]**2-N[0]**2)+3*S**2*(14*E[0]**2-N[0]**2)*U[0]**2+(8*E[0]**4+E[0]**2*N[0]**2-7*N[0]**4)*U[0]**4+(2*E[0]**2-N[0]**2)*U[0]**6)*E[2]+E[0]*(3*S**3*(E[0]**2-4*N[0]**2)-3*S**2*(E[0]**2-14*N[0]**2)*U[0]**2+(-7*E[0]**4+E[0]**2*N[0]**2+8*N[0]**4)*U[0]**4-(E[0]**2-2*N[0]**2)*U[0]**6)*N[2]-2*S*E[0]*N[0]*U[0]*(15*S**2-10*S*U[0]**2-U[0]**4)*U[2])+(S+U[0]**2)**2*(N[0]*U[0]*(-3*S**2*(4*E[0]**2-N[0]**2)-2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(-2*E[0]**2+N[0]**2)*U[0]**4)*E[3]+E[0]*U[0]*(3*S**2*(E[0]**2-4*N[0]**2)+2*S*(2*E[0]**2-3*N[0]**2)*U[0]**2+(E[0]**2-2*N[0]**2)*U[0]**4)*N[3]+S*E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*U[3]))+S**2*(24*S**2*E[0]*U[0]*(5*S**2-10*S*U[0]**2+U[0]**4)*U[1]**4-12*S*(S+U[0]**2)*U[1]**2*(U[0]*(-3*S**2*(4*E[0]**2-N[0]**2)+2*S*(6*E[0]**2+N[0]**2)*U[0]**2-N[0]**2*U[0]**4)*E[2]+E[0]*N[0]*U[0]*(-15*S**2+10*S*U[0]**2+U[0]**4)*N[2]+3*S*E[0]*(S**2-6*S*U[0]**2+U[0]**4)*U[2])-4*S*(S+U[0]**2)**2*U[1]*((2*E[0]**6+3*E[0]**4*N[0]**2-N[0]**6-6*S*E[0]**2*U[0]**2+N[0]**2*U[0]**4)*E[3]+E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*N[3]+2*S*E[0]*U[0]*(3*S-U[0]**2)*U[3])+(S+U[0]**2)**2*(3*E[0]*U[0]*(3*S**2*(2*E[0]**2-3*N[0]**2)-2*S*(E[0]**2+6*N[0]**2)*U[0]**2-3*N[0]**2*U[0]**4)*E[2]**2+3*E[0]*U[0]*(-3*S**2*(E[0]**2-4*N[0]**2)+2*(-2*E[0]**4+E[0]**2*N[0]**2+3*N[0]**4)*U[0]**2-(E[0]**2-2*N[0]**2)*U[0]**4)*N[2]**2-6*S*E[0]*N[0]*(3*S**2-6*S*U[0]**2-U[0]**4)*N[2]*U[2]+6*E[2]*(N[0]*U[0]*(3*S**2*(4*E[0]**2-N[0]**2)+2*(3*E[0]**4+E[0]**2*N[0]**2-2*N[0]**4)*U[0]**2+(2*E[0]**2-N[0]**2)*U[0]**4)*N[2]-S*(2*E[0]**6+3*E[0]**4*N[0]**2-N[0]**6-6*S*E[0]**2*U[0]**2+N[0]**2*U[0]**4)*U[2])+S*(-6*S*E[0]*U[0]*(3*S-U[0]**2)*U[2]**2-U[0]*(S+U[0]**2)*((2*E[0]**4+E[0]**2*N[0]**2-N[0]**2*(N[0]**2+U[0]**2))*E[4]+E[0]*N[0]*(3*S+U[0]**2)*N[4])+S*E[0]*(S**2-U[0]**4)*U[4]))))+S**3*(24*S**2*(S**2-10*S*U[0]**2+5*U[0]**4)*U[1]**5+60*S*(S+U[0]**2)*U[1]**3*((S**2-6*S*U[0]**2+U[0]**4)*(E[0]*E[2]+N[0]*N[2])+4*S*U[0]*(S-U[0]**2)*U[2])+20*S*(S+U[0]**2)**2*U[1]**2*(U[0]*(3*S-U[0]**2)*(E[0]*E[3]+N[0]*N[3])-S*(S-3*U[0]**2)*U[3])-5*(S+U[0]**2)**2*U[1]*(3*(-2*E[0]**6+N[0]**6+6*E[0]**2*N[0]**2*U[0]**2-N[0]**2*U[0]**4-3*E[0]**4*(N[0]**2-2*U[0]**2))*E[2]**2+3*(E[0]**6-2*N[0]**6+6*N[0]**4*U[0]**2-E[0]**2*(3*N[0]**4-6*N[0]**2*U[0]**2+U[0]**4))*N[2]**2-12*S*N[0]*U[0]*(3*S-U[0]**2)*N[2]*U[2]+6*E[0]*E[2]*(N[0]*(-3*S**2+6*S*U[0]**2+U[0]**4)*N[2]+2*S*U[0]*(-3*S+U[0]**2)*U[2])+S*(6*S*(S-3*U[0]**2)*U[2]**2+(S**2-U[0]**4)*(E[0]*E[4]+N[0]*N[4])+2*S*U[0]*(S+U[0]**2)*U[4]))+(S+U[0]**2)**3*(10*E[2]*(-(U[0]*(-2*E[0]**4-E[0]**2*N[0]**2+N[0]**4+N[0]**2*U[0]**2)*E[3])+E[0]*N[0]*U[0]*(3*S+U[0]**2)*N[3]-S*E[0]*(S-U[0]**2)*U[3])+10*N[2]*(E[0]*N[0]*U[0]*(3*S+U[0]**2)*E[3]-U[0]*(E[0]**4-2*N[0]**4+E[0]**2*(-N[0]**2+U[0]**2))*N[3]-S*N[0]*(S-U[0]**2)*U[3])+S*(10*U[2]*(-((S-U[0]**2)*(E[0]*E[3]+N[0]*N[3]))-2*S*U[0]*U[3])+(S+U[0]**2)*(-(U[0]*(E[0]*E[5]+N[0]*N[5]))+S*U[5])))))/(S**4.5*(S+U[0]**2)**5);
def d1RangedENU1(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (E[0]*E[1]+N[0]*N[1]+U[0]*U[1])/sqrt(E[0]**2+N[0]**2+U[0]**2);
def d2RangedENU2(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))/(2.*(E[0]**2+N[0]**2+U[0]**2)**1.5);
def d3RangedENU3(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-6*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3]))/(4.*(E[0]**2+N[0]**2+U[0]**2)**2.5);
def d4RangedENU4(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-10*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-6*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3]))+2*(E[0]**2+N[0]**2+U[0]**2)*(-6*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[2]**2+3*N[2]**2+3*U[2]**2+4*E[1]*E[3]+4*N[1]*N[3]+4*U[1]*U[3]+E[0]*E[4]+N[0]*N[4]+U[0]*U[4])))/(8.*(E[0]**2+N[0]**2+U[0]**2)**3.5);
def d5RangedENU5(self, E : vector, N : vector, U : vector) -> array:
'''@S : float'''
S = E[0]**2 + N[0]**2
return (-14*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-10*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-6*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3]))+2*(E[0]**2+N[0]**2+U[0]**2)*(-6*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[2]**2+3*N[2]**2+3*U[2]**2+4*E[1]*E[3]+4*N[1]*N[3]+4*U[1]*U[3]+E[0]*E[4]+N[0]*N[4]+U[0]*U[4])))+4*(E[0]**2+N[0]**2+U[0]**2)*(-5*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2])*(-6*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3]))-3*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(-6*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2])*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))+4*(E[0]**2+N[0]**2+U[0]**2)*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])+4*(E[0]**2+N[0]**2+U[0]**2)**2*(3*E[2]**2+3*N[2]**2+3*U[2]**2+4*E[1]*E[3]+4*N[1]*N[3]+4*U[1]*U[3]+E[0]*E[4]+N[0]*N[4]+U[0]*U[4]))+(E[0]**2+N[0]**2+U[0]**2)*(8*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])-8*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2])*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])-6*(-2*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])**2+2*(E[0]**2+N[0]**2+U[0]**2)*(E[1]**2+N[1]**2+U[1]**2+E[0]*E[2]+N[0]*N[2]+U[0]*U[2]))*(3*E[1]*E[2]+3*N[1]*N[2]+3*U[1]*U[2]+E[0]*E[3]+N[0]*N[3]+U[0]*U[3])+20*(E[0]**2+N[0]**2+U[0]**2)*(E[0]*E[1]+N[0]*N[1]+U[0]*U[1])*(3*E[2]**2+3*N[2]**2+3*U[2]**2+4*E[1]*E[3]+4*N[1]*N[3]+4*U[1]*U[3]+E[0]*E[4]+N[0]*N[4]+U[0]*U[4])+4*(E[0]**2+N[0]**2+U[0]**2)**2*(10*E[2]*E[3]+10*N[2]*N[3]+10*U[2]*U[3]+5*E[1]*E[4]+5*N[1]*N[4]+5*U[1]*U[4]+E[0]*E[5]+N[0]*N[5]+U[0]*U[5]))))/(16.*(E[0]**2+N[0]**2+U[0]**2)**4.5);
def d1EastdAER1(self, A : vector, E : vector, R : vector) -> array:
return R[0]*cos(A[0])*cos(E[0])*A[1]+sin(A[0])*(cos(E[0])*R[1]-R[0]*E[1]*sin(E[0]));
def d2EastdAER2(self, A : vector, E : vector, R : vector) -> array:
return -(sin(A[0])*(cos(E[0])*(R[0]*(A[1]**2+E[1]**2)-R[2])+(2*E[1]*R[1]+R[0]*E[2])*sin(E[0])))+cos(A[0])*(R[0]*cos(E[0])*A[2]+2*A[1]*(cos(E[0])*R[1]-R[0]*E[1]*sin(E[0])));
def d3EastdAER3(self, A : vector, E : vector, R : vector) -> array:
return cos(E[0])*(cos(A[0])*(-(R[0]*A[1]**3)+3*R[1]*A[2]+3*A[1]*(-(R[0]*E[1]**2)+R[2])+R[0]*A[3])+(-3*(A[1]**2*R[1]+R[0]*A[1]*A[2]+E[1]*(E[1]*R[1]+R[0]*E[2]))+R[3])*sin(A[0]))-(3*cos(A[0])*(R[0]*E[1]*A[2]+A[1]*(2*E[1]*R[1]+R[0]*E[2]))+(-3*R[0]*A[1]**2*E[1]-R[0]*E[1]**3+3*R[1]*E[2]+3*E[1]*R[2]+R[0]*E[3])*sin(A[0]))*sin(E[0]);
def d4EastdAER4(self, A : vector, E : vector, R : vector) -> array:
return R[0]*cos(E[0])*A[1]**4*sin(A[0])+cos(E[0])*(cos(A[0])*(-6*R[0]*E[1]**2*A[2]+6*A[2]*R[2]+4*R[1]*A[3]+R[0]*A[4])+(R[0]*E[1]**4-3*R[0]*(A[2]**2+E[2]**2)-6*E[1]**2*R[2]-4*E[1]*(3*R[1]*E[2]+R[0]*E[3])+R[4])*sin(A[0]))-(6*cos(A[0])*A[2]*(2*E[1]*R[1]+R[0]*E[2])+4*R[0]*cos(A[0])*E[1]*A[3]+(-4*E[1]**3*R[1]-6*R[0]*E[1]**2*E[2]+6*E[2]*R[2]+4*R[1]*E[3]+4*E[1]*R[3]+R[0]*E[4])*sin(A[0]))*sin(E[0])+4*cos(A[0])*A[1]**3*(-(cos(E[0])*R[1])+R[0]*E[1]*sin(E[0]))+6*A[1]**2*(-(cos(E[0])*(R[0]*cos(A[0])*A[2]+(-(R[0]*E[1]**2)+R[2])*sin(A[0])))+(2*E[1]*R[1]+R[0]*E[2])*sin(A[0])*sin(E[0]))+4*A[1]*(-(cos(E[0])*(cos(A[0])*(3*E[1]*(E[1]*R[1]+R[0]*E[2])-R[3])+(3*R[1]*A[2]+R[0]*A[3])*sin(A[0])))+(cos(A[0])*(R[0]*E[1]**3-3*R[1]*E[2]-3*E[1]*R[2]-R[0]*E[3])+3*R[0]*E[1]*A[2]*sin(A[0]))*sin(E[0]));
def d5EastdAER5(self, A : vector, E : vector, R : vector) -> array:
return R[0]*cos(A[0])*cos(E[0])*A[1]**5+cos(E[0])*(cos(A[0])*(-30*R[0]*E[1]*A[2]*E[2]+10*R[2]*A[3]-10*E[1]**2*(3*R[1]*A[2]+R[0]*A[3])+10*A[2]*R[3]+5*R[1]*A[4]+R[0]*A[5])+(5*(E[1]**4*R[1]+2*R[0]*E[1]**3*E[2]-3*R[1]*(A[2]**2+E[2]**2)-2*R[0]*(A[2]*A[3]+E[2]*E[3])-2*E[1]**2*R[3]-E[1]*(6*E[2]*R[2]+4*R[1]*E[3]+R[0]*E[4]))+R[5])*sin(A[0]))-(5*cos(A[0])*(-2*R[0]*E[1]**3*A[2]+2*E[2]*(3*R[1]*A[2]+R[0]*A[3])+2*R[0]*A[2]*E[3]+E[1]*(6*A[2]*R[2]+4*R[1]*A[3]+R[0]*A[4]))+(R[0]*E[1]**5-10*E[1]**3*R[2]+10*R[2]*E[3]-10*E[1]**2*(3*R[1]*E[2]+R[0]*E[3])+10*E[2]*R[3]+5*R[1]*E[4]+5*E[1]*(-3*R[0]*(A[2]**2+E[2]**2)+R[4])+R[0]*E[5])*sin(A[0]))*sin(E[0])+5*A[1]**4*sin(A[0])*(cos(E[0])*R[1]-R[0]*E[1]*sin(E[0]))+10*A[1]**3*(cos(E[0])*(R[0]*cos(A[0])*E[1]**2-cos(A[0])*R[2]+R[0]*A[2]*sin(A[0]))+cos(A[0])*(2*E[1]*R[1]+R[0]*E[2])*sin(E[0]))+5*A[1]*(cos(E[0])*(cos(A[0])*(R[0]*E[1]**4-3*R[0]*(A[2]**2+E[2]**2)-6*E[1]**2*R[2]-4*E[1]*(3*R[1]*E[2]+R[0]*E[3])+R[4])+(6*A[2]*(R[0]*E[1]**2-R[2])-4*R[1]*A[3]-R[0]*A[4])*sin(A[0]))+(cos(A[0])*(4*E[1]**3*R[1]+6*R[0]*E[1]**2*E[2]-6*E[2]*R[2]-4*R[1]*E[3]-4*E[1]*R[3]-R[0]*E[4])+6*A[2]*(2*E[1]*R[1]+R[0]*E[2])*sin(A[0])+4*R[0]*E[1]*A[3]*sin(A[0]))*sin(E[0]))+10*A[1]**2*(cos(E[0])*(-(cos(A[0])*(3*R[1]*A[2]+R[0]*A[3]))+(3*E[1]*(E[1]*R[1]+R[0]*E[2])-R[3])*sin(A[0]))+(-(R[0]*E[1]**3*sin(A[0]))+(3*R[1]*E[2]+R[0]*E[3])*sin(A[0])+3*E[1]*(R[0]*cos(A[0])*A[2]+R[2]*sin(A[0])))*sin(E[0]));
def d1NorthdAER1(self, A : vector, E : vector, R : vector) -> array:
return cos(A[0])*cos(E[0])*R[1]-R[0]*(cos(E[0])*A[1]*sin(A[0])+cos(A[0])*E[1]*sin(E[0]));
def d2NorthdAER2(self, A : vector, E : vector, R : vector) -> array:
return -(cos(E[0])*(cos(A[0])*(R[0]*(A[1]**2+E[1]**2)-R[2])+(2*A[1]*R[1]+R[0]*A[2])*sin(A[0])))-(R[0]*cos(A[0])*E[2]+2*E[1]*(cos(A[0])*R[1]-R[0]*A[1]*sin(A[0])))*sin(E[0]);
def d3NorthdAER3(self, A : vector, E : vector, R : vector) -> array:
return cos(E[0])*(cos(A[0])*(-3*(A[1]**2*R[1]+R[0]*A[1]*A[2]+E[1]*(E[1]*R[1]+R[0]*E[2]))+R[3])+(R[0]*A[1]**3-3*R[1]*A[2]+3*A[1]*(R[0]*E[1]**2-R[2])-R[0]*A[3])*sin(A[0]))+(cos(A[0])*(3*R[0]*A[1]**2*E[1]+R[0]*E[1]**3-3*R[1]*E[2]-3*E[1]*R[2]-R[0]*E[3])+3*(R[0]*E[1]*A[2]+A[1]*(2*E[1]*R[1]+R[0]*E[2]))*sin(A[0]))*sin(E[0]);
def d4NorthdAER4(self, A : vector, E : vector, R : vector) -> array:
return cos(E[0])*(cos(A[0])*(R[0]*A[1]**4+R[0]*E[1]**4-3*R[0]*(A[2]**2+E[2]**2)+6*A[1]**2*(R[0]*E[1]**2-R[2])-6*E[1]**2*R[2]-4*A[1]*(3*R[1]*A[2]+R[0]*A[3])-4*E[1]*(3*R[1]*E[2]+R[0]*E[3])+R[4])+(4*A[1]**3*R[1]+6*R[0]*A[1]**2*A[2]+6*A[2]*(R[0]*E[1]**2-R[2])-4*R[1]*A[3]+4*A[1]*(3*E[1]*(E[1]*R[1]+R[0]*E[2])-R[3])-R[0]*A[4])*sin(A[0]))+(cos(A[0])*(4*E[1]**3*R[1]+12*R[0]*A[1]*E[1]*A[2]+6*R[0]*E[1]**2*E[2]+6*A[1]**2*(2*E[1]*R[1]+R[0]*E[2])-6*E[2]*R[2]-4*R[1]*E[3]-4*E[1]*R[3]-R[0]*E[4])+2*(-2*R[0]*A[1]**3*E[1]+3*A[2]*(2*E[1]*R[1]+R[0]*E[2])+2*R[0]*E[1]*A[3]+A[1]*(-2*R[0]*E[1]**3+6*R[1]*E[2]+6*E[1]*R[2]+2*R[0]*E[3]))*sin(A[0]))*sin(E[0]);
def d5NorthdAER5(self, A : vector, E : vector, R : vector) -> array:
return -(R[0]*cos(E[0])*A[1]**5*sin(A[0]))+5*cos(A[0])*A[1]**4*(cos(E[0])*R[1]-R[0]*E[1]*sin(E[0]))+sin(A[0])*(cos(E[0])*(30*R[0]*E[1]*A[2]*E[2]+10*E[1]**2*(3*R[1]*A[2]+R[0]*A[3])-5*(2*R[2]*A[3]+2*A[2]*R[3]+R[1]*A[4])-R[0]*A[5])+5*(-2*R[0]*E[1]**3*A[2]+2*E[2]*(3*R[1]*A[2]+R[0]*A[3])+2*R[0]*A[2]*E[3]+E[1]*(6*A[2]*R[2]+4*R[1]*A[3]+R[0]*A[4]))*sin(E[0]))+cos(A[0])*(cos(E[0])*(5*(E[1]**4*R[1]+2*R[0]*E[1]**3*E[2]-3*R[1]*(A[2]**2+E[2]**2)-2*R[0]*(A[2]*A[3]+E[2]*E[3])-2*E[1]**2*R[3]-E[1]*(6*E[2]*R[2]+4*R[1]*E[3]+R[0]*E[4]))+R[5])-(R[0]*E[1]**5-10*E[1]**3*R[2]+10*R[2]*E[3]-10*E[1]**2*(3*R[1]*E[2]+R[0]*E[3])+10*E[2]*R[3]+5*R[1]*E[4]+5*E[1]*(-3*R[0]*(A[2]**2+E[2]**2)+R[4])+R[0]*E[5])*sin(E[0]))+10*A[1]**3*(cos(E[0])*(R[0]*cos(A[0])*A[2]+(-(R[0]*E[1]**2)+R[2])*sin(A[0]))-(2*E[1]*R[1]+R[0]*E[2])*sin(A[0])*sin(E[0]))+5*A[1]*(cos(E[0])*(cos(A[0])*(6*A[2]*(R[0]*E[1]**2-R[2])-4*R[1]*A[3]-R[0]*A[4])+(-(R[0]*E[1]**4)+3*R[0]*(A[2]**2+E[2]**2)+6*E[1]**2*R[2]+4*E[1]*(3*R[1]*E[2]+R[0]*E[3])-R[4])*sin(A[0]))+(6*cos(A[0])*A[2]*(2*E[1]*R[1]+R[0]*E[2])+4*R[0]*cos(A[0])*E[1]*A[3]+(-4*E[1]**3*R[1]-6*R[0]*E[1]**2*E[2]+6*E[2]*R[2]+4*R[1]*E[3]+4*E[1]*R[3]+R[0]*E[4])*sin(A[0]))*sin(E[0]))+10*A[1]**2*(cos(E[0])*(cos(A[0])*(3*E[1]*(E[1]*R[1]+R[0]*E[2])-R[3])+(3*R[1]*A[2]+R[0]*A[3])*sin(A[0]))+(-(R[0]*cos(A[0])*E[1]**3)+cos(A[0])*(3*R[1]*E[2]+R[0]*E[3])+3*E[1]*(cos(A[0])*R[2]-R[0]*A[2]*sin(A[0])))*sin(E[0]));
def d1UpdAER1(self, A : vector, E : vector, R : vector) -> array:
return R[0]*cos(E[0])*E[1]+R[1]*sin(E[0]);
def d2UpdAER2(self, A : vector, E : vector, R : vector) -> array:
return 2*cos(E[0])*E[1]*R[1]+R[0]*cos(E[0])*E[2]-R[0]*E[1]**2*sin(E[0])+R[2]*sin(E[0]);
def d3UpdAER3(self, A : vector, E : vector, R : vector) -> array:
return -(R[0]*cos(E[0])*E[1]**3)+3*cos(E[0])*R[1]*E[2]+R[0]*cos(E[0])*E[3]-3*E[1]**2*R[1]*sin(E[0])+R[3]*sin(E[0])+3*E[1]*(cos(E[0])*R[2]-R[0]*E[2]*sin(E[0]));
def d4UpdAER4(self, A : vector, E : vector, R : vector) -> array:
return cos(E[0])*(-4*E[1]**3*R[1]-6*R[0]*E[1]**2*E[2]+6*E[2]*R[2]+4*R[1]*E[3]+4*E[1]*R[3]+R[0]*E[4])+(R[0]*E[1]**4-3*R[0]*E[2]**2-6*E[1]**2*R[2]-4*E[1]*(3*R[1]*E[2]+R[0]*E[3])+R[4])*sin(E[0]);
def d5UpdAER5(self, A : vector, E : vector, R : vector) -> array:
return cos(E[0])*(R[0]*E[1]**5-10*E[1]**3*R[2]+10*R[2]*E[3]-10*E[1]**2*(3*R[1]*E[2]+R[0]*E[3])+10*E[2]*R[3]+5*R[1]*E[4]+5*E[1]*(-3*R[0]*E[2]**2+R[4])+R[0]*E[5])+(5*E[1]**4*R[1]+10*R[0]*E[1]**3*E[2]-5*E[2]*(3*R[1]*E[2]+2*R[0]*E[3])-10*E[1]**2*R[3]-5*E[1]*(6*E[2]*R[2]+4*R[1]*E[3]+R[0]*E[4])+R[5])*sin(E[0]);
| 157.96371
| 11,493
| 0.394231
| 12,121
| 39,175
| 1.273822
| 0.01056
| 0.140285
| 0.081218
| 0.073834
| 0.884003
| 0.834067
| 0.791775
| 0.767487
| 0.728044
| 0.690026
| 0
| 0.207205
| 0.085897
| 39,175
| 248
| 11,494
| 157.96371
| 0.22396
| 0.005437
| 0
| 0.136986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.239726
| false
| 0
| 0.027397
| 0.109589
| 0.506849
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 14
|
bb05ee3c8d9a84d8dedf5d670c3ac7aa6a46257a
| 144
|
py
|
Python
|
tests/unit/multipoint/conftest.py
|
phuntimes/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | 1
|
2020-11-26T05:58:23.000Z
|
2020-11-26T05:58:23.000Z
|
tests/unit/multipoint/conftest.py
|
Sean-McVeigh/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | null | null | null |
tests/unit/multipoint/conftest.py
|
Sean-McVeigh/mongoshapes
|
f461c67343c32c6b97af8d67a269b4de492d1d71
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from tests.fixtures.multipoint import geojson
from tests.fixtures.multipoint import geointerface
| 24
| 50
| 0.763889
| 19
| 144
| 5.789474
| 0.736842
| 0.163636
| 0.309091
| 0.490909
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007813
| 0.111111
| 144
| 5
| 51
| 28.8
| 0.851563
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
24a2bad3bd740fbd46d4bd20676fb3e58b8840f6
| 4,361
|
py
|
Python
|
enterprise_pmg/API/ProjectApi.py
|
abbddos/MAD
|
7e2ba13e5397ec62ad678512947eb75af248ff83
|
[
"MIT"
] | null | null | null |
enterprise_pmg/API/ProjectApi.py
|
abbddos/MAD
|
7e2ba13e5397ec62ad678512947eb75af248ff83
|
[
"MIT"
] | null | null | null |
enterprise_pmg/API/ProjectApi.py
|
abbddos/MAD
|
7e2ba13e5397ec62ad678512947eb75af248ff83
|
[
"MIT"
] | null | null | null |
from flask_restful import Api, Resource
from datetime import datetime, date
from enterprise_pmg.model import Projects, Admin
class GetAllProjects(Resource):
def get(self):
qrys = Projects.Project.GET_ALL_PROJECTS()
data1 = []
for qry in qrys:
data = dict()
data['ProjectID'] = qry.ProjectID
data['ProjectCode'] = qry.ProjectCode
data['StartDate'] = str(qry.StartDate)
data['EndDate'] = str(qry.EndDate)
data['Location'] = qry.Location
data['FundingSource'] = qry.FundingSource
data['TotalBudget'] = qry.TotalBudget
data['Currency'] = qry.Currency
data['ProjectManagers'] = qry.ProjectManagers
data['StakeHolders'] = qry.ProjectStakeHolders
data['Description'] = qry.Description
data['Log'] = qry.Log
data1.append(data)
return {'Projects': data1}
class GetProjectByID(Resource):
def get(self,pid):
qry = Projects.Project.GetProjectByID(pid)
data = dict()
data['ProjectID'] = qry.ProjectID
data['ProjectCode'] = qry.ProjectCode
data['StartDate'] = str(qry.StartDate)
data['EndDate'] = str(qry.EndDate)
data['Location'] = qry.Location
data['FundingSource'] = qry.FundingSource
data['TotalBudget'] = qry.TotalBudget
data['Currency'] = qry.Currency
data['ProjectManagers'] = qry.ProjectManagers
data['StakeHolders'] = qry.ProjectStakeHolders
data['Description'] = qry.Description
data['Log'] = qry.Log
return data
class GetProjectByCode(Resource):
def get(self,code):
qry = Projects.Project.GetProjectByCode(code)
data = dict()
data['ProjectID'] = qry.ProjectID
data['ProjectCode'] = qry.ProjectCode
data['StartDate'] = str(qry.StartDate)
data['EndDate'] = str(qry.EndDate)
data['Location'] = qry.Location
data['FundingSource'] = qry.FundingSource
data['TotalBudget'] = qry.TotalBudget
data['Currency'] = qry.Currency
data['ProjectManagers'] = qry.ProjectManagers
data['StakeHolders'] = qry.ProjectStakeHolders
data['Description'] = qry.Description
data['Log'] = qry.Log
return data
class GetAllProjectTasks(Resource):
def get(self, procode):
qrys = Projects.Tasks.GetAllProjectTasks(procode)
data1 = []
for qry in qrys:
data = dict()
data['TaskID'] = qry.TaskID
data['TaskCode'] = qry.TaskCode
data['ProjectCode'] = qry.project_code
data['Priority'] = qry.Priority
data['TaskDescription'] = qry.TaskDescription
data['ExpectedStartDate'] = str(qry.ExpectedStartDate)
data['ExpectedEndDate'] = str(qry.ExpectedEndDate)
data['ActualStartDate'] = str(qry.ActualStartDate)
data['ActualEndDate'] = str(qry.ActualEndDate)
data['Location'] = qry.Location
data['AssignedTo'] = qry.AssignedTo
data['Deliverables'] = qry.Deliverables
data['DependsOn'] = qry.DependsOn
data['Dependability'] = qry.Dependabilities
data['TaskStatus'] = qry.TaskStatus
data['Comments'] = qry.Comments
data1.append(data)
return data1
class GetTaskByCode(Resource):
def get(self, procode, taskcode):
qry = Projects.Tasks.GetTaskByCode(procode, taskcode)
data = dict()
data['TaskID'] = qry.TaskID
data['TaskCode'] = qry.TaskCode
data['ProjectCode'] = qry.project_code
data['Priority'] = qry.Priority
data['TaskDescription'] = qry.TaskDescription
data['ExpectedStartDate'] = str(qry.ExpectedStartDate)
data['ExpectedEndDate'] = str(qry.ExpectedEndDate)
data['ActualStartDate'] = str(qry.ActualStartDate)
data['ActualEndDate'] = str(qry.ActualEndDate)
data['Location'] = qry.Location
data['AssignedTo'] = qry.AssignedTo
data['Deliverables'] = qry.Deliverables
data['DependsOn'] = qry.DependsOn
data['Dependability'] = qry.Dependabilities
data['TaskStatus'] = qry.TaskStatus
data['Comments'] = qry.Comments
return data
| 38.9375
| 66
| 0.605824
| 405
| 4,361
| 6.508642
| 0.162963
| 0.031866
| 0.026555
| 0.034143
| 0.800835
| 0.781866
| 0.781866
| 0.781866
| 0.768968
| 0.768968
| 0
| 0.001889
| 0.271497
| 4,361
| 111
| 67
| 39.288288
| 0.827825
| 0
| 0
| 0.803922
| 0
| 0
| 0.164028
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04902
| false
| 0
| 0.029412
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
700a310a9e2ab22cbbe196653093aa21b8068ff3
| 41,414
|
py
|
Python
|
tests/tests_libs_centos.py
|
JourdanClark/nova-agent
|
cccea98bc5b55982352ab50d751907eb5465e675
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_libs_centos.py
|
JourdanClark/nova-agent
|
cccea98bc5b55982352ab50d751907eb5465e675
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_libs_centos.py
|
JourdanClark/nova-agent
|
cccea98bc5b55982352ab50d751907eb5465e675
|
[
"Apache-2.0"
] | null | null | null |
from novaagent.libs import centos
from .fixtures import xen_data
from .fixtures import network
import logging
import glob
import sys
import os
if sys.version_info[:2] >= (2, 7):
from unittest import TestCase
else:
from unittest2 import TestCase
try:
from unittest import mock
except ImportError:
import mock
class MockDistro(object):
def id(self):
print("ID Fedora")
return 'fedora'
def vesion(self):
print("Version 29")
return ['29']
class TestHelpers(TestCase):
def setUp(self):
logging.disable(logging.ERROR)
def tearDown(self):
logging.disable(logging.NOTSET)
file_searches = [
'/tmp/route-*',
'/tmp/ifcfg-eth*',
'/tmp/hostname*',
'/tmp/network*',
'/tmp/ifcfg-lo*'
]
for search in file_searches:
route_files = glob.glob(search)
for item in route_files:
os.remove(item)
def setup_temp_route(self):
with open('/tmp/route-eth1', 'a+') as f:
f.write('This is a test file')
def setup_temp_interface_config(self, interface):
with open('/tmp/ifcfg-{0}'.format(interface), 'a+') as f:
f.write(
'IPADDR=2.2.2.2\n'
'IPADDR999=1.1.1.1\n'
'ZONE=TestFirewalldZone\n'
'# Comment in file\n'
' Starts with a space\n'
' # Mulitple spaces\n'
'TEST_OPTION=TEST_VALUE\n'
)
def setup_temp_hostname(self):
with open('/tmp/hostname', 'a+') as f:
f.write('test.hostname.local')
def setup_temp_network(self):
with open('/tmp/network', 'a+') as f:
f.write('This is a test file')
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_initialization(self, mock_distro):
mock_distro.return_value = False
temp = centos.ServerOS()
self.assertEqual(
temp.netconfig_dir,
'/etc/sysconfig/network-scripts',
'Network scripts directory was not expected value'
)
self.assertEqual(
temp.interface_file_prefix,
'ifcfg',
'Network scripts prefix is not expected value'
)
self.assertEqual(
temp.route_file_prefix,
'route',
'Route script prefix is not expected value'
)
self.assertEqual(
temp.hostname_file,
'/etc/hostname',
'Hostname file is not expected value'
)
self.assertEqual(
temp.network_file,
'/etc/sysconfig/network',
'Network file location is not expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_hostname_failure(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
mock_response_exists = mock.Mock()
mock_response_exists.side_effect = [
True, True, False, True, True, True
]
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 1, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs') as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces') as hwint:
hwint.return_value = ['eth1', 'lo']
mock_response = mock.Mock()
mock_response.side_effect = [
'BC764E206C5B',
None
]
with mock.patch(
'novaagent.utils.get_hw_addr',
side_effect=mock_response
):
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_to_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1']
with mock.patch(
'novaagent.libs.centos.ServerOS.'
'_check_for_extra_settings'
) as check:
check.return_value = []
mock_popen = mock.Mock()
mock_comm = mock.Mock()
mock_comm.return_value = ('out',
'error')
mock_popen.side_effect = [
mock.Mock(
returncode=0,
communicate=mock_comm
),
mock.Mock(
returncode=0,
communicate=mock_comm
),
mock.Mock(
returncode=0,
communicate=mock_comm
)
]
with mock.patch(
'novaagent.libs.centos.Popen',
side_effect=mock_popen):
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('0', ''),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_flush_failure(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
mock_response_exists = mock.Mock()
mock_response_exists.side_effect = [
True, True, False, True, True, True
]
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.os.path.exists',
mock_response_exists
):
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 0, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs') as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces') as hwint:
hwint.return_value = ['eth1', 'lo']
mock_response = mock.Mock()
mock_response.side_effect = [
'BC764E206C5B',
None
]
with mock.patch(
'novaagent.utils.get_hw_addr',
side_effect=mock_response
):
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_to'
'_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1']
with mock.patch(
'novaagent.libs.centos.ServerOS.'
'_check_for_extra_settings'
) as check:
check.return_value = []
mock_popen = mock.Mock()
mock_comm = mock.Mock()
mock_comm.return_value = ('out',
'error')
mock_popen.side_effect = [
mock.Mock(
returncode=1,
communicate=mock_comm
),
mock.Mock(
returncode=0,
communicate=mock_comm
),
mock.Mock(
returncode=0,
communicate=mock_comm
)
]
with mock.patch(
'novaagent.libs.centos.Popen',
side_effect=mock_popen
):
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('0', ''),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_success(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 0, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs') as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces') as hwint:
hwint.return_value = ['eth1', 'lo']
mock_response = mock.Mock()
mock_response.side_effect = [
'BC764E206C5B',
None
]
with mock.patch(
'novaagent.utils.get_hw_addr',
side_effect=mock_response
):
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_to_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1']
with mock.patch(
'novaagent.libs.centos.ServerOS.'
'_check_for_extra_settings'
) as check:
check.return_value = []
with mock.patch(
'novaagent.libs.centos.Popen'
) as p:
p.return_value.communicate.\
return_value = ('out', 'error')
p.return_value.returncode = 0
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('0', ''),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_error(self, mock_distro):
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 0, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs') as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces') as hwint:
hwint.return_value = ['eth1']
with mock.patch('novaagent.utils.get_hw_addr') as addr:
addr.return_value = 'BC764E206C5B'
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_to_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1']
with mock.patch(
'novaagent.libs.centos.ServerOS.'
'_check_for_extra_settings'
) as check:
check.return_value = []
with mock.patch(
'novaagent.libs.centos.Popen'
) as p:
p.return_value.communicate.\
return_value = ('out', 'error')
p.return_value.returncode = 1
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('1', 'Error restarting network'),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_success_systemctl(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
mock_response = mock.Mock()
mock_response.side_effect = [
True, True, False, True, True, True
]
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.os.path.exists',
mock_response
):
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 0, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs'
) as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces'
) as hwint:
hwint.return_value = ['eth1']
with mock.patch(
'novaagent.utils.get_hw_addr'
) as hw_addr:
hw_addr.return_value = 'BC764E206C5B'
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_to'
'_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1'
]
with mock.patch(
'novaagent.libs.centos.ServerOS.'
'_check_for_extra_settings'
) as check:
check.return_value = []
with mock.patch(
'novaagent.libs.centos.Popen'
) as p:
p.return_value.communicate.\
return_value = ('out',
'error')
p.return_value.returncode = 0
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('0', ''),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_reset_network_error_systemctl(self, mock_distro):
self.setup_temp_route()
self.setup_temp_interface_config('eth1')
self.setup_temp_interface_config('lo')
self.setup_temp_network()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp.network_file = '/tmp/network'
mock_response = mock.Mock()
mock_response.side_effect = [
True, True, False, True, True, True
]
with mock.patch.object(
temp, '_os_defaults_network_manager') as mock_def_net_mgr:
mock_def_net_mgr.return_value = False
with mock.patch(
'novaagent.libs.centos.os.path.exists',
mock_response
):
with mock.patch(
'novaagent.libs.centos.ServerOS._setup_hostname'
) as hostname:
hostname.return_value = 0, 'temp.hostname'
with mock.patch(
'novaagent.utils.list_xenstore_macaddrs'
) as mac:
mac.return_value = ['BC764E206C5B']
with mock.patch(
'novaagent.utils.list_hw_interfaces'
) as hwint:
hwint.return_value = ['eth1']
with mock.patch(
'novaagent.utils.get_hw_addr'
) as hw_addr:
hw_addr.return_value = 'BC764E206C5B'
with mock.patch(
'novaagent.utils.get_interface'
) as inter:
inter.return_value = (
xen_data.check_network_interface()
)
with mock.patch(
'novaagent.utils.get_ifcfg_files_'
'to_remove'
) as ifcfg_files:
ifcfg_files.return_value = [
'/tmp/ifcfg-eth1'
]
with mock.patch(
'novaagent.libs.centos.Server'
'OS._check_for_extra_settings'
) as check:
check.return_value = []
with mock.patch(
'novaagent.libs.centos.'
'Popen'
) as p:
p.return_value.\
communicate.\
return_value = (
'out', 'error')
p.return_value.\
returncode = 1
result = temp.resetnetwork(
'name',
'value',
'dummy_client'
)
self.assertEqual(
result,
('1', 'Error restarting network'),
'Result was not the expected value'
)
network_files = glob.glob('/tmp/network*')
self.assertEqual(
len(network_files),
2,
'Incorrect number of network files'
)
ifcfg_files = glob.glob('/tmp/ifcfg-eth*')
self.assertEqual(
len(ifcfg_files),
2,
'Incorrect number of ifcfg files'
)
route_files = glob.glob('/tmp/route*')
self.assertEqual(
len(route_files),
2,
'Incorrect number of route files'
)
localhost = glob.glob('/tmp/ifcfg-lo')
self.assertEqual(
len(localhost),
1,
'Localhost ifcfg file was moved out of the way and should not have'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_check_extra_args(self, mock_distro):
self.setup_temp_interface_config('eth1')
temp = centos.ServerOS()
interface_file = '/tmp/ifcfg-eth1'
extra_args = temp._check_for_extra_settings(interface_file)
self.assertEqual(
len(extra_args),
2,
'Did not get proper number of arguments from check'
)
self.assertEqual(
extra_args,
['ZONE=TestFirewalldZone', 'TEST_OPTION=TEST_VALUE'],
'Did not get proper extra arguments from check'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_check_extra_args_no_file(self, mock_distro):
temp = centos.ServerOS()
interface_file = '/tmp/ifcfg-eth1'
extra_args = temp._check_for_extra_settings(interface_file)
self.assertEqual(
len(extra_args),
0,
'Did not get proper number of arguments from check'
)
self.assertEqual(
extra_args,
[],
'Did not get proper extra arguments from check'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_routes(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_route()
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp_iface = xen_data.check_network_interface()
temp._setup_routes('eth1', temp_iface)
files = glob.glob('/tmp/route-eth1*')
self.assertEqual(
len(files),
2,
'Did not find correct number of files'
)
with open('/tmp/route-eth1') as f:
written_data = f.readlines()
for index, line in enumerate(written_data):
self.assertEqual(
line,
network.CENTOS_ROUTE_FILE[index],
'Written file did not match expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_interfaces_eth0(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_interface_config('eth0')
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp_iface = network.ETH0_INTERFACE
temp._setup_interface('eth0', temp_iface)
files = glob.glob('/tmp/ifcfg-eth0*')
self.assertEqual(
len(files),
2,
'Did not find correct number of files'
)
with open('/tmp/ifcfg-eth0') as f:
written_data = f.readlines()
for index, line in enumerate(written_data):
self.assertEqual(
line,
network.CENTOS_IFCFG_ETH0[index],
'Written file did not match expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_interfaces_eth1(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_interface_config('eth1')
temp = centos.ServerOS()
temp.netconfig_dir = '/tmp'
temp_iface = xen_data.check_network_interface()
temp._setup_interface('eth1', temp_iface)
files = glob.glob('/tmp/ifcfg-eth1*')
self.assertEqual(
len(files),
2,
'Did not find correct number of files'
)
with open('/tmp/ifcfg-eth1') as f:
written_data = f.readlines()
for index, line in enumerate(written_data):
self.assertEqual(
line,
network.CENTOS_IFCFG_ETH1[index],
'Written file did not match expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_hostname_hostname_success(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_hostname()
temp = centos.ServerOS()
temp.hostname_file = '/tmp/hostname'
test_hostname = 'test.hostname'
with mock.patch('novaagent.utils.get_hostname') as hostname:
hostname.return_value = test_hostname
with mock.patch('novaagent.libs.os.path.exists') as exists:
exists.return_value = False
with mock.patch('novaagent.libs.Popen') as popen:
popen.return_value.communicate.return_value = (
('out', 'err')
)
popen.return_value.returncode = 0
return_code, hostname = temp._setup_hostname(
'dummy_client'
)
self.assertEqual(
hostname,
test_hostname,
'Did not receive expected host from function'
)
self.assertEqual(
return_code,
0,
'Return code received was not expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_hostname_hostname_failure(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_hostname()
temp = centos.ServerOS()
temp.hostname_file = '/tmp/hostname'
test_hostname = 'test.hostname'
with mock.patch('novaagent.utils.get_hostname') as hostname:
hostname.return_value = test_hostname
with mock.patch('novaagent.libs.os.path.exists') as exists:
exists.return_value = False
with mock.patch('novaagent.libs.Popen') as popen:
popen.return_value.communicate.return_value = (
('out', 'err')
)
popen.return_value.returncode = 1
return_code, hostname = temp._setup_hostname(
'dummy_client'
)
self.assertEqual(
hostname,
test_hostname,
'Did not receive expected host from function'
)
self.assertEqual(
return_code,
1,
'Return code received was not expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_hostname_hostnamectl_success(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_hostname()
temp = centos.ServerOS()
temp.hostname_file = '/tmp/hostname'
test_hostname = 'test.hostname'
with mock.patch('novaagent.utils.get_hostname') as hostname:
hostname.return_value = test_hostname
with mock.patch('novaagent.libs.os.path.exists') as exists:
exists.return_value = True
with mock.patch('novaagent.libs.Popen') as popen:
popen.return_value.communicate.return_value = (
('out', 'err')
)
popen.return_value.returncode = 0
return_code, hostname = temp._setup_hostname(
'dummy_client'
)
self.assertEqual(
hostname,
test_hostname,
'Did not receive expected host from function'
)
self.assertEqual(
return_code,
0,
'Return code received was not expected value'
)
@mock.patch('novaagent.libs.centos.ServerOS.is_network_manager')
def test_setup_hostname_hostnamectl_failure(self, mock_distro):
mock_distro.return_value = False
self.setup_temp_hostname()
temp = centos.ServerOS()
temp.hostname_file = '/tmp/hostname'
test_hostname = 'test.hostname'
with mock.patch('novaagent.utils.get_hostname') as hostname:
hostname.return_value = test_hostname
with mock.patch('novaagent.libs.os.path.exists') as exists:
exists.return_value = True
with mock.patch('novaagent.libs.Popen') as popen:
popen.return_value.communicate.return_value = (
('out', 'err')
)
popen.return_value.returncode = 1
return_code, hostname = temp._setup_hostname(
'dummy_client'
)
self.assertEqual(
hostname,
test_hostname,
'Did not receive expected host from function'
)
self.assertEqual(
return_code,
1,
'Return code received was not expected value'
)
@mock.patch('novaagent.libs.centos.distro.id', return_value='rhel')
@mock.patch('novaagent.libs.centos.distro.version', return_value='7.5')
def test_os_defaults_network_manager_rhel_pre_8(self, mock_id, mock_ver):
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
False,
'Should have returned False: {0}'.format(results)
)
@mock.patch('novaagent.libs.centos.distro.id', return_value='rhel')
@mock.patch('novaagent.libs.centos.distro.version', return_value='8.0')
def test_os_defaults_network_manager_rhel_8(self, mock_id, mock_ver):
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
True,
'Should have returned True: {0}'.format(results)
)
@mock.patch('novaagent.libs.centos.distro.id', return_value='fedora')
@mock.patch('novaagent.libs.centos.distro.version', return_value='28')
def test_os_defaults_network_manager_fedora_pre_29(self,
mock_id,
mock_ver):
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
False,
'Should have returned False: {0}'.format(results)
)
@mock.patch('novaagent.libs.centos.distro.id', return_value='fedora')
@mock.patch('novaagent.libs.centos.distro.version', return_value='29')
def test_os_defaults_network_manager_fedora_29(self, mock_id, mock_ver):
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
True,
'Should have returned True: {0}'.format(results)
)
@mock.patch('novaagent.libs.centos.ServerOS._os_defaults_network_manager')
def test_is_network_manager_true(self, mock_default_netman):
mock_default_netman.return_value = True
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
True,
'Should have returned True: {0}'.format(results)
)
@mock.patch('novaagent.libs.centos.ServerOS._os_defaults_network_manager')
def test_is_network_manager_false(self, mock_default_netman):
mock_default_netman.return_value = False
temp = centos.ServerOS()
results = temp._os_defaults_network_manager()
self.assertEqual(
results,
False,
'Should have returned False: {0}'.format(results)
)
| 41.249004
| 79
| 0.451707
| 3,527
| 41,414
| 5.077119
| 0.054721
| 0.062043
| 0.089462
| 0.0774
| 0.923382
| 0.92059
| 0.916569
| 0.901268
| 0.894231
| 0.884179
| 0
| 0.010073
| 0.470227
| 41,414
| 1,003
| 80
| 41.29013
| 0.806108
| 0
| 0
| 0.758475
| 0
| 0
| 0.186367
| 0.086398
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.03178
| false
| 0
| 0.012712
| 0
| 0.048729
| 0.002119
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7023b5429104ad169b999b75d62e4bb95cb40fec
| 15,799
|
py
|
Python
|
pvfactors/tests/test_run.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 60
|
2018-06-20T03:28:06.000Z
|
2022-03-09T03:34:42.000Z
|
pvfactors/tests/test_run.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 60
|
2018-07-31T03:09:46.000Z
|
2022-03-03T20:02:19.000Z
|
pvfactors/tests/test_run.py
|
soreva/pvfactors
|
422f883fd6e68c38900ceb74977b3267555f3fee
|
[
"BSD-3-Clause"
] | 18
|
2018-08-02T00:00:29.000Z
|
2022-02-02T13:47:33.000Z
|
from pvfactors.run import run_timeseries_engine, run_parallel_engine
from pvfactors.report import ExampleReportBuilder
from pvfactors.viewfactors.aoimethods import faoi_fn_from_pvlib_sandia
import numpy as np
import mock
def test_run_timeseries_engine(fn_report_example, params_serial,
df_inputs_clearsky_8760):
"""Test that running timeseries engine with full mode works consistently"""
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
n = df_inputs.shape[0]
# Get MET data
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
report = run_timeseries_engine(
fn_report_example, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'])
assert len(report['qinc_front']) == n
# Test value consistency
np.testing.assert_almost_equal(np.nansum(report['qinc_back']),
541.7115807694377)
np.testing.assert_almost_equal(np.nansum(report['iso_back']),
18.050083142438311)
# Check a couple values
np.testing.assert_almost_equal(report['qinc_back'][7],
11.160301350847325)
np.testing.assert_almost_equal(report['qinc_back'][-8],
8.642850754173368)
def test_run_timeseries_engine_fast_mode(fn_report_example, params_serial,
df_inputs_clearsky_8760):
"""Test that running timeseries engine with fast mode works consistently.
Values are supposed to be a little higher than with full mode"""
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
n = df_inputs.shape[0]
# Get MET data
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
fast_mode_pvrow_index = 1
def fn_report(pvarray): return {
'qinc_back': pvarray.ts_pvrows[1].back.get_param_weighted('qinc'),
'iso_back': pvarray.ts_pvrows[1].back.get_param_weighted('isotropic')}
report = run_timeseries_engine(
fn_report, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'],
fast_mode_pvrow_index=fast_mode_pvrow_index)
assert len(report['qinc_back']) == n
# Test value consistency
np.testing.assert_almost_equal(np.nansum(report['qinc_back']),
548.0011865481954)
np.testing.assert_almost_equal(np.nansum(report['iso_back']),
18.03732189070727)
# Check a couple values
np.testing.assert_almost_equal(report['qinc_back'][7],
11.304105184587364)
np.testing.assert_almost_equal(report['qinc_back'][-8],
8.743201975668212)
def test_params_irradiance_model():
"""Test that irradiance params are passed correctly in
run_timeseries_engine"""
mock_irradiance_model = mock.MagicMock()
mock_engine = mock.MagicMock()
mock_pvarray = mock.MagicMock()
irradiance_params = {'horizon_band_angle': 15.}
_ = run_timeseries_engine(
None, None,
None, None, None, None, None, None,
None, None, cls_engine=mock_engine, cls_pvarray=mock_pvarray,
cls_irradiance=mock_irradiance_model,
irradiance_model_params=irradiance_params)
mock_irradiance_model.assert_called_once_with(
horizon_band_angle=irradiance_params['horizon_band_angle'])
def test_run_parallel_engine_with_irradiance_params(params_serial,
df_inputs_clearsky_8760):
"""Test that irradiance model params are passed correctly in parallel
simulations"""
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
# Get MET data
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
n_processes = 2
irradiance_params = {'horizon_band_angle': 6.5}
report_no_params = run_parallel_engine(
ExampleReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'], n_processes=n_processes,
irradiance_model_params=irradiance_params)
np.testing.assert_almost_equal(np.nansum(report_no_params['qinc_back']),
541.7115807694377)
# The incident irradiance should be higher with larger horizon band
irradiance_params = {'horizon_band_angle': 15.}
report_w_params = run_parallel_engine(
ExampleReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'], n_processes=n_processes,
irradiance_model_params=irradiance_params)
np.testing.assert_almost_equal(np.nansum(report_w_params['qinc_back']),
554.5333279555168)
def test_params_ghi_passed():
"""Test that GHI is passed correctly to run functions"""
mock_irradiance_model = mock.MagicMock()
mock_engine = mock.MagicMock()
mock_pvarray = mock.MagicMock()
ghi = [1000.]
_ = run_timeseries_engine(
None, None,
None, None, None, None, None, None,
None, None, cls_engine=mock_engine, cls_pvarray=mock_pvarray,
cls_irradiance=mock_irradiance_model, ghi=ghi)
mock_engine.return_value.fit.assert_called_with(
None, None, None, None, None, None, None, None, ghi=ghi)
def test_run_parallel_engine_with_ghi(params_serial,
df_inputs_clearsky_8760):
"""Test that ghi is correctly passed to models.
Notes:
- ghi is not used in full modes, so it will not affect the full mode results
- so use fast mode instead
"""
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
# Get MET data
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
ghi = 500. * np.ones_like(dni)
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
fast_mode_pvrow_index = 1
n_processes = 2
# Report with no ghi
report_no_ghi = run_parallel_engine(
TestFastReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'], n_processes=n_processes,
fast_mode_pvrow_index=fast_mode_pvrow_index)
np.testing.assert_almost_equal(np.nansum(report_no_ghi['qinc_back']),
548.0011865481954)
# Report with ghi
report_w_ghi = run_parallel_engine(
TestFastReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'], n_processes=n_processes,
fast_mode_pvrow_index=fast_mode_pvrow_index, ghi=ghi)
np.testing.assert_almost_equal(np.nansum(report_w_ghi['qinc_back']),
771.8440422696128)
class TestFastReportBuilder(object):
@staticmethod
def build(pvarray):
return {'qinc_back': pvarray.ts_pvrows[1].back
.get_param_weighted('qinc').tolist()}
@staticmethod
def merge(reports):
report = reports[0]
keys = report.keys()
for other_report in reports[1:]:
for key in keys:
report[key] += other_report[key]
return report
def test_run_timeseries_faoi_fn(params_serial, pvmodule_canadian,
df_inputs_clearsky_8760):
"""Test that in run_timeseries function, faoi functions are used
correctly"""
# Prepare timeseries inputs
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
expected_qinc_back = 542.018551
expected_qinc_front = 5452.858863
# --- Test without passing vf parameters
# report function with test in it
def report_fn_with_tests_no_faoi(pvarray):
vf_aoi_matrix = pvarray.ts_vf_aoi_matrix
pvrow = pvarray.ts_pvrows[0]
list_back_pvrow_idx = [ts_surf.index for ts_surf
in pvarray.ts_pvrows[0].all_ts_surfaces]
# Check that sum of vf_aoi is equal to reflectivity values
# since no faoi_fn used
np.testing.assert_allclose(
vf_aoi_matrix[list_back_pvrow_idx, :, 12].sum(axis=1),
[0.99, 0., 0.97, 0.])
return {'qinc_front': pvrow.front.get_param_weighted('qinc'),
'qabs_front': pvrow.front.get_param_weighted('qabs'),
'qinc_back': pvrow.back.get_param_weighted('qinc'),
'qabs_back': pvrow.back.get_param_weighted('qabs')}
# create calculator
report = run_timeseries_engine(
report_fn_with_tests_no_faoi, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'],
vf_calculator_params=None, irradiance_model_params=None)
np.testing.assert_allclose(np.nansum(report['qinc_back']),
expected_qinc_back)
np.testing.assert_allclose(np.nansum(report['qabs_back']), 525.757995)
np.testing.assert_allclose(np.nansum(report['qinc_front']),
expected_qinc_front)
np.testing.assert_allclose(np.nansum(report['qabs_front']), 5398.330275)
# --- Test when passing vf parameters
# Prepare vf calc params
faoi_fn = faoi_fn_from_pvlib_sandia(pvmodule_canadian)
# the following is a very high number to get agreement in
# integral sums between back and front surfaces
n_sections = 10000
vf_calc_params = {'faoi_fn_front': faoi_fn,
'faoi_fn_back': faoi_fn,
'n_aoi_integral_sections': n_sections}
irr_params = {'faoi_fn_front': faoi_fn,
'faoi_fn_back': faoi_fn}
def report_fn_with_tests_w_faoi(pvarray):
vf_aoi_matrix = pvarray.ts_vf_aoi_matrix
pvrow = pvarray.ts_pvrows[0]
list_back_pvrow_idx = [ts_surf.index for ts_surf
in pvrow.all_ts_surfaces]
# Check that sum of vf_aoi is consistent
np.testing.assert_allclose(
vf_aoi_matrix[list_back_pvrow_idx, :, 12].sum(axis=1),
[0.97102, 0., 0.971548, 0.], atol=0, rtol=1e-6)
return {'qinc_front': pvrow.front.get_param_weighted('qinc'),
'qabs_front': pvrow.front.get_param_weighted('qabs'),
'qinc_back': pvrow.back.get_param_weighted('qinc'),
'qabs_back': pvrow.back.get_param_weighted('qabs')}
# create calculator
report = run_timeseries_engine(
report_fn_with_tests_w_faoi, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'],
vf_calculator_params=vf_calc_params,
irradiance_model_params=irr_params)
np.testing.assert_allclose(np.nansum(report['qinc_back']),
expected_qinc_back)
np.testing.assert_allclose(np.nansum(report['qabs_back']), 520.892016)
np.testing.assert_allclose(np.nansum(report['qinc_front']),
expected_qinc_front)
np.testing.assert_allclose(np.nansum(report['qabs_front']), 5347.050682)
def test_run_parallel_faoi_fn(params_serial, df_inputs_clearsky_8760):
"""Test that in run_parallel function, faoi functions are used
correctly"""
# Prepare timeseries inputs
df_inputs = df_inputs_clearsky_8760.iloc[:24, :]
timestamps = df_inputs.index
dni = df_inputs.dni.values
dhi = df_inputs.dhi.values
solar_zenith = df_inputs.solar_zenith.values
solar_azimuth = df_inputs.solar_azimuth.values
surface_tilt = df_inputs.surface_tilt.values
surface_azimuth = df_inputs.surface_azimuth.values
expected_qinc_back = 542.018551
expected_qinc_front = 5452.858863
# create calculator
report = run_parallel_engine(
TestFAOIReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'],
vf_calculator_params=None,
irradiance_model_params=None)
np.testing.assert_allclose(np.nansum(report['qinc_back']),
expected_qinc_back)
np.testing.assert_allclose(np.nansum(report['qabs_back']), 525.757995)
np.testing.assert_allclose(np.nansum(report['qinc_front']),
expected_qinc_front)
np.testing.assert_allclose(np.nansum(report['qabs_front']), 5398.330275)
# --- Test when passing vf parameters
# the following is a very high number to get agreement in
# integral sums between back and front surfaces
n_sections = 10000
vf_calc_params = {'faoi_fn_front': FaoiClass,
'faoi_fn_back': FaoiClass,
'n_aoi_integral_sections': n_sections}
irr_params = {'faoi_fn_front': FaoiClass,
'faoi_fn_back': FaoiClass}
# create calculator
report = run_parallel_engine(
TestFAOIReportBuilder, params_serial,
timestamps, dni, dhi, solar_zenith, solar_azimuth, surface_tilt,
surface_azimuth, params_serial['rho_ground'],
vf_calculator_params=vf_calc_params,
irradiance_model_params=irr_params)
np.testing.assert_allclose(np.nansum(report['qinc_back']),
expected_qinc_back)
np.testing.assert_allclose(np.nansum(report['qabs_back']), 520.892016)
np.testing.assert_allclose(np.nansum(report['qinc_front']),
expected_qinc_front)
np.testing.assert_allclose(np.nansum(report['qabs_front']), 5347.050682)
class TestFAOIReportBuilder(object):
@staticmethod
def build(pvarray):
pvrow = pvarray.ts_pvrows[0]
return {'qinc_front': pvrow.front.get_param_weighted('qinc').tolist(),
'qabs_front': pvrow.front.get_param_weighted('qabs').tolist(),
'qinc_back': pvrow.back.get_param_weighted('qinc').tolist(),
'qabs_back': pvrow.back.get_param_weighted('qabs').tolist()}
@staticmethod
def merge(reports):
report = reports[0]
keys = report.keys()
for other_report in reports[1:]:
for key in keys:
report[key] += other_report[key]
return report
class FaoiClass(object):
@staticmethod
def faoi(*args, **kwargs):
fn = faoi_fn_from_pvlib_sandia('Canadian_Solar_CS5P_220M___2009_')
return fn(*args, **kwargs)
| 40.303571
| 80
| 0.675486
| 1,981
| 15,799
| 5.044422
| 0.116103
| 0.049635
| 0.045032
| 0.030421
| 0.84239
| 0.810968
| 0.784849
| 0.784849
| 0.745622
| 0.727409
| 0
| 0.037531
| 0.236028
| 15,799
| 391
| 81
| 40.40665
| 0.790389
| 0.09874
| 0
| 0.715827
| 0
| 0
| 0.058869
| 0.005519
| 0
| 0
| 0
| 0
| 0.122302
| 1
| 0.057554
| false
| 0.003597
| 0.017986
| 0.007194
| 0.111511
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5616447322759b0bad81fc644079e68c2bff5539
| 1,007
|
py
|
Python
|
src/testcase/GN_APP/input_case/GN_APP_Login.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 28
|
2017-11-10T00:19:16.000Z
|
2022-02-19T16:42:05.000Z
|
src/testcase/GN_APP/input_case/GN_APP_Login.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | null | null | null |
src/testcase/GN_APP/input_case/GN_APP_Login.py
|
maiyajj/AutoTest_script-Appium_Connect
|
f9c2c42c281a9e2f984acb4a72dda0694b053f22
|
[
"Apache-2.0"
] | 23
|
2017-08-22T06:12:19.000Z
|
2021-09-18T05:45:41.000Z
|
# coding=utf-8
try:
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_001 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_002 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_003 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_004 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_005 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_006 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_007 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_008 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_009 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_010 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_011 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_012 import *
from src.testcase.GN_APP.case.GN_APP_LOGIN.GN_APP_LOGIN_013 import *
except ImportError as e:
print(e)
| 55.944444
| 72
| 0.800397
| 192
| 1,007
| 3.791667
| 0.15625
| 0.267857
| 0.357143
| 0.303571
| 0.884615
| 0.884615
| 0.884615
| 0.884615
| 0.884615
| 0.884615
| 0
| 0.044893
| 0.115194
| 1,007
| 17
| 73
| 59.235294
| 0.772166
| 0.011917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.875
| 0
| 0.875
| 0.0625
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
5631160dd7d370e51125b804a9f4c616668a7465
| 500
|
py
|
Python
|
eval_mosmed_timm-regnetx_002_Rotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_Rotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_Rotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_0_Rotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_1_Rotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_2_Rotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_3_Rotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_4_Rotate.yml",
]
for l in ls:
os.system(l)
| 45.454545
| 94
| 0.834
| 80
| 500
| 4.8375
| 0.3
| 0.129199
| 0.155039
| 0.245478
| 0.894057
| 0.894057
| 0.894057
| 0.894057
| 0.894057
| 0.894057
| 0
| 0.042644
| 0.062
| 500
| 11
| 95
| 45.454545
| 0.782516
| 0
| 0
| 0
| 0
| 0
| 0.868263
| 0.618762
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
566a095a34cf4a4d67f3880a850dd45e4c5f661f
| 12,060
|
py
|
Python
|
rqt_graphprofiler/src/diarc/tests/tests.py
|
cena0805/rqt_graphprofiler
|
810441efa357cbe4410cffd0d491b84c8adec9a5
|
[
"Apache-2.0"
] | null | null | null |
rqt_graphprofiler/src/diarc/tests/tests.py
|
cena0805/rqt_graphprofiler
|
810441efa357cbe4410cffd0d491b84c8adec9a5
|
[
"Apache-2.0"
] | null | null | null |
rqt_graphprofiler/src/diarc/tests/tests.py
|
cena0805/rqt_graphprofiler
|
810441efa357cbe4410cffd0d491b84c8adec9a5
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import types
class Test_BlockNeighbors(unittest.TestCase):
def test(self):
import topology
t = topology.Topology()
v0 = topology.Vertex(t)
v1 = topology.Vertex(t)
v2 = topology.Vertex(t)
v3 = topology.Vertex(t)
assert(v0.block.index is None)
assert(v1.block.index is None)
assert(v2.block.index is None)
assert(v3.block.index is None)
v1.block.index = 1
assert(v1.block.leftBlock is None)
assert(v1.block.rightBlock is None)
v3.block.index = 3
assert(v1.block.leftBlock is None)
assert(v1.block.rightBlock == v3.block)
assert(v3.block.leftBlock == v1.block)
assert(v3.block.rightBlock is None)
v2.block.index = 0
assert(v1.block.leftBlock == v2.block)
assert(v1.block.rightBlock == v3.block)
assert(v2.block.leftBlock is None)
assert(v2.block.rightBlock == v1.block)
assert(v3.block.leftBlock == v1.block)
assert(v3.block.rightBlock is None)
v2.block.index = 2
assert(v1.block.leftBlock is None)
assert(v1.block.rightBlock == v2.block)
assert(v2.block.leftBlock == v1.block)
assert(v2.block.rightBlock == v3.block)
assert(v3.block.leftBlock == v2.block)
assert(v3.block.rightBlock is None)
v0.block.index = 0
assert(v0.block.leftBlock is None)
assert(v0.block.rightBlock == v1.block)
assert(v1.block.leftBlock == v0.block)
assert(v1.block.rightBlock == v2.block)
assert(v2.block.leftBlock == v1.block)
assert(v2.block.rightBlock == v3.block)
assert(v3.block.leftBlock == v2.block)
assert(v3.block.rightBlock is None)
class Test_v5_a(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_a.xml')
def test_band_emitters_collectors(self):
t = self.t
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [0])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [1])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [2])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [1])
def test_block(self):
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
# Count the number of emitters and collectors of each block
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (0,1), 1: (1,0), 2: (0,1)}
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_to_bands_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[0].emitter[0].negBandLink is None)
assert(t.blocks[1].collector[0].posBandLink.altitude == 1)
assert(t.blocks[1].collector[0].negBandLink.altitude == -1)
assert(t.blocks[2].emitter[0].posBandLink is None)
assert(t.blocks[2].emitter[0].negBandLink.altitude == -1)
class Test_v5_b(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_b.xml')
def test_band_emitters_collectors(self):
t = self.t
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [1])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [2])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [1])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [0])
def test_block(self):
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
# Count the number of emitters and collectors of each block
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (1,0), 1: (0,1), 2: (1,0)}
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_to_bands_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].collector[0].posBandLink is None)
assert(t.blocks[0].collector[0].negBandLink.altitude == -1)
assert(t.blocks[1].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[1].emitter[0].negBandLink.altitude == -1)
assert(t.blocks[2].collector[0].posBandLink.altitude == 1)
assert(t.blocks[2].collector[0].negBandLink is None)
class Test_v5_c(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_c.xml')
def test_band_emitters_collectors(self):
t = self.t
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [0])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [1,2])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [])
def test_block(self):
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
# Count the number of emitters and collectors of each block
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (0,1), 1: (1,0), 2: (1,0)}
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_to_bands_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[0].emitter[0].negBandLink is None)
assert(t.blocks[1].collector[0].posBandLink.altitude == 1)
assert(t.blocks[1].collector[0].negBandLink is None)
assert(t.blocks[2].collector[0].posBandLink.altitude == 1)
assert(t.blocks[2].collector[0].negBandLink is None)
class Test_v5_d(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_d.xml')
def test_band_emitters_collectors(self):
t = self.t
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [1,2])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [0])
def test_block_index(self):
""" checks that topology block indexing follows block index values """
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
def test_emitter_collector_count(self):
""" Counts the number of emitters and collectors in each block """
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (1,0), 1: (0,1), 2: (0,1)}
t = self.t
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].collector[0].posBandLink is None)
assert(t.blocks[0].collector[0].negBandLink.altitude == -1)
assert(t.blocks[1].emitter[0].posBandLink is None)
assert(t.blocks[1].emitter[0].negBandLink.altitude == -1)
assert(t.blocks[2].emitter[0].posBandLink is None)
assert(t.blocks[2].emitter[0].negBandLink.altitude == -1)
class Test_v5_e(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_e.xml')
def test_band_emitters_collectors(self):
t = self.t
# Check locations
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [0,2])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [1,3])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [2])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [1])
def test_block_index(self):
""" checks that topology block indexing follows block index values """
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
def test_emitter_collector_count(self):
""" Counts the number of emitters and collectors in each block """
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (0,1), 1: (1,0), 2: (0,1), 3: (1,0)}
t = self.t
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[0].emitter[0].negBandLink is None)
assert(t.blocks[1].collector[0].posBandLink.altitude == 1)
assert(t.blocks[1].collector[0].negBandLink.altitude == -1)
assert(t.blocks[2].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[2].emitter[0].negBandLink.altitude == -1)
assert(t.blocks[3].collector[0].posBandLink.altitude == 1)
assert(t.blocks[3].collector[0].negBandLink is None)
class Test_v5_f(unittest.TestCase):
def setUp(self):
import parser
self.t = parser.parseFile('data/v5_f.xml')
def test_band_emitters_collectors(self):
t = self.t
# Check locations
assert([snap.block.index for snap in t.edges[0].posBand.emitters] == [1])
assert([snap.block.index for snap in t.edges[0].posBand.collectors] == [2])
assert([snap.block.index for snap in t.edges[0].negBand.emitters] == [1,3])
assert([snap.block.index for snap in t.edges[0].negBand.collectors] == [0,2])
def test_block_index(self):
""" checks that topology block indexing follows block index values """
t = self.t
# Make sure the matching is correct
for index in t.blocks:
assert(t.blocks[index].index == index)
def test_emitter_collector_count(self):
""" Counts the number of emitters and collectors in each block """
# {blockIndex: (#collectors,#emitters), ...}
vals = { 0: (1,0), 1: (0,1), 2: (1,0), 3: (0,1)}
t = self.t
for index in t.blocks:
assert(len(t.blocks[index].collector) == vals[index][0])
assert(len(t.blocks[index].emitter) == vals[index][1])
def test_snaps_connectivity(self):
""" Test to make sure that snaps are connected to the correct bands """
t = self.t
assert(t.blocks[0].collector[0].negBandLink.altitude == -1)
assert(t.blocks[0].collector[0].posBandLink is None)
assert(t.blocks[1].emitter[0].negBandLink.altitude == -1)
assert(t.blocks[1].emitter[0].posBandLink.altitude == 1)
assert(t.blocks[2].collector[0].negBandLink.altitude == -1)
assert(t.blocks[2].collector[0].posBandLink.altitude == 1)
assert(t.blocks[3].emitter[0].negBandLink.altitude == -1)
assert(t.blocks[3].emitter[0].posBandLink is None)
if __name__ == "__main__":
unittest.main()
| 39.540984
| 85
| 0.614594
| 1,693
| 12,060
| 4.330183
| 0.049616
| 0.066839
| 0.081571
| 0.065475
| 0.956077
| 0.925385
| 0.925385
| 0.919656
| 0.913109
| 0.892375
| 0
| 0.032748
| 0.240381
| 12,060
| 304
| 86
| 39.671053
| 0.767493
| 0.117579
| 0
| 0.690141
| 0
| 0
| 0.008162
| 0
| 0
| 0
| 0
| 0
| 0.525822
| 1
| 0.131455
| false
| 0
| 0.042254
| 0
| 0.206573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b28cac2ed64661bcaeb1ad6000e0738c6b0ae59
| 64,156
|
py
|
Python
|
main/lib/idds/tests/test_workflow_condition_v2.py
|
HSF/iDDS
|
39144a059522074fdedab1a56a9daded9fa65d03
|
[
"Apache-2.0"
] | null | null | null |
main/lib/idds/tests/test_workflow_condition_v2.py
|
HSF/iDDS
|
39144a059522074fdedab1a56a9daded9fa65d03
|
[
"Apache-2.0"
] | 1
|
2020-10-01T15:47:50.000Z
|
2020-10-28T17:55:01.000Z
|
main/lib/idds/tests/test_workflow_condition_v2.py
|
HSF/iDDS
|
39144a059522074fdedab1a56a9daded9fa65d03
|
[
"Apache-2.0"
] | 5
|
2019-09-14T20:34:41.000Z
|
2021-12-18T10:46:58.000Z
|
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0OA
#
# Authors:
# - Wen Guan, <wen.guan@cern.ch>, 2021
"""
Test workflow condtions.
"""
import unittest2 as unittest
# from nose.tools import assert_equal
from idds.common.utils import setup_logging
from idds.common.utils import json_dumps, json_loads
from idds.workflowv2.work import Work, WorkStatus
from idds.workflowv2.workflow import (CompositeCondition, AndCondition, OrCondition,
Condition, ConditionTrigger, Workflow, ParameterLink)
setup_logging(__name__)
class TestWorkflowCondtion(unittest.TestCase):
def test_condition(self):
# init_p = Parameter({'input_dataset': 'data17:data17.test.raw.1'})
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
work4 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=4)
work5 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=5)
work6 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=6)
work7 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=7,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.raw.1'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work2'}])
work8 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=8,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.work2'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work3'}])
workflow = Workflow()
workflow.add_work(work1, initial=True)
workflow.add_work(work2, initial=True)
workflow.add_work(work3, initial=False)
workflow.add_work(work8, initial=False)
# CompositeCondition
cond1 = CompositeCondition(conditions=work1.is_finished, true_works=work2, false_works=work3)
works = cond1.all_works()
assert(works == [work1, work2, work3])
works = cond1.all_pre_works()
assert(works == [work1])
works = cond1.all_next_works()
assert(works == [work2, work3])
cond_status = cond1.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond1.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
works = cond1.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work3])
work1.status = WorkStatus.Finished
works = cond1.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work2])
work1.status = WorkStatus.New
works = cond1.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work3])
works = cond1.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond1.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work2])
works = cond1.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.New
works = cond1.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work3])
work1.status = WorkStatus.Finished
works = cond1.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work2])
work1.status = WorkStatus.New
# CompositeCondition
cond2 = CompositeCondition(conditions=[work1.is_finished, work2.is_finished, work3.is_finished], true_works=[work4, work5], false_works=[work6, work7])
works = cond2.all_works()
assert(works == [work1, work2, work3, work4, work5, work6, work7])
works = cond2.all_pre_works()
assert(works == [work1, work2, work3])
works = cond2.all_next_works()
assert(works == [work4, work5, work6, work7])
cond_status = cond2.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond2.get_condition_status()
assert(cond_status is False)
work2.status = WorkStatus.Finished
cond_status = cond2.get_condition_status()
assert(cond_status is False)
work3.status = WorkStatus.Finished
cond_status = cond2.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond2.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond2.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond2.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work6, work7])
works = cond2.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond2.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work4, work5])
works = cond2.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond2.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond2.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
# AndCondition
cond3 = AndCondition(conditions=[work1.is_finished, work2.is_finished, work3.is_finished], true_works=[work4, work5], false_works=[work6, work7])
works = cond3.all_works()
assert(works == [work1, work2, work3, work4, work5, work6, work7])
works = cond3.all_pre_works()
assert(works == [work1, work2, work3])
works = cond3.all_next_works()
assert(works == [work4, work5, work6, work7])
cond_status = cond3.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond3.get_condition_status()
assert(cond_status is False)
work2.status = WorkStatus.Finished
cond_status = cond3.get_condition_status()
assert(cond_status is False)
work3.status = WorkStatus.Finished
cond_status = cond3.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond3.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond3.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond3.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work6, work7])
works = cond3.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond3.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work4, work5])
works = cond3.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond3.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
work2.status = WorkStatus.Finished
work3.status = WorkStatus.Finished
works = cond3.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
# OrCondtion
cond4 = OrCondition(conditions=[work1.is_finished, work2.is_finished, work3.is_finished], true_works=[work4, work5], false_works=[work6, work7])
works = cond4.all_works()
assert(works == [work1, work2, work3, work4, work5, work6, work7])
works = cond4.all_pre_works()
assert(works == [work1, work2, work3])
works = cond4.all_next_works()
assert(works == [work4, work5, work6, work7])
cond_status = cond4.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond4.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
work2.status = WorkStatus.Finished
cond_status = cond4.get_condition_status()
assert(cond_status is True)
work2.status = WorkStatus.New
work3.status = WorkStatus.Finished
cond_status = cond4.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond4.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
# work2.status = WorkStatus.Finished
# work3.status = WorkStatus.Finished
works = cond4.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond4.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work6, work7])
works = cond4.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
# work2.status = WorkStatus.Finished
# work3.status = WorkStatus.Finished
works = cond4.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work4, work5])
works = cond4.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
works = cond4.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work6, work7])
work1.status = WorkStatus.Finished
# work2.status = WorkStatus.Finished
# work3.status = WorkStatus.Finished
works = cond4.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work4, work5])
work1.status = WorkStatus.New
work2.status = WorkStatus.New
work3.status = WorkStatus.New
# Condition
cond5 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
works = cond5.all_works()
assert(works == [work1, work2, work3])
works = cond5.all_pre_works()
assert(works == [work1])
works = cond5.all_next_works()
assert(works == [work2, work3])
cond_status = cond5.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond5.get_condition_status()
assert(cond_status is True)
work1.status = WorkStatus.New
works = cond5.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work3])
work1.status = WorkStatus.Finished
works = cond5.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work2])
work1.status = WorkStatus.New
works = cond5.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work3])
works = cond5.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond5.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work2])
works = cond5.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.New
works = cond5.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work3])
work1.status = WorkStatus.Finished
works = cond5.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work2])
work1.status = WorkStatus.New
# multiple conditions
cond6 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond7 = CompositeCondition(conditions=[work4.is_finished, work5.is_finished], true_works=[work6, cond6], false_works=work7)
works = cond7.all_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2, work3, work4, work5, work6, work7])
works = cond7.all_pre_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4, work5])
works = cond7.all_next_works()
works.sort(key=lambda x: x.work_id)
# print([w.work_id for w in works])
assert(works == [work2, work3, work6, work7])
cond_status = cond7.get_condition_status()
assert(cond_status is False)
work4.status = WorkStatus.Finished
cond_status = cond7.get_condition_status()
assert(cond_status is False)
work5.status = WorkStatus.Finished
cond_status = cond7.get_condition_status()
assert(cond_status is True)
work4.status = WorkStatus.New
work5.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work2, work6])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work2, work6])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
# multiple conditions
# cond8 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond8 = Condition(cond=work1.is_finished)
cond9 = CompositeCondition(conditions=[work4.is_finished, cond8.is_condition_true], true_works=[work6], false_works=work7)
works = cond9.all_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4, work6, work7])
works = cond9.all_pre_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4])
works = cond9.all_next_works()
works.sort(key=lambda x: x.work_id)
# print([w.work_id for w in works])
assert(works == [work6, work7])
cond_status = cond9.get_condition_status()
assert(cond_status is False)
work4.status = WorkStatus.Finished
cond_status = cond9.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond9.get_condition_status()
assert(cond_status is True)
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
return workflow
def print_workflow(self, workflow):
print('print workflow')
print(workflow.conditions)
for cond_id in workflow.conditions:
print(cond_id)
cond = workflow.conditions[cond_id]
print(cond)
print(cond.conditions)
print(cond.true_works)
print(cond.false_works)
for w in cond.true_works:
print(w)
if isinstance(w, CompositeCondition):
print(w.conditions)
print(w.true_works)
print(w.false_works)
def test_workflow(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
work4 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=4)
work5 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=5)
work6 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=6)
work7 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=7,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.raw.1'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work2'}])
work8 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=8,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.work2'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work3'}])
workflow = Workflow()
workflow.add_work(work1, initial=False)
workflow.add_work(work2, initial=False)
workflow.add_work(work3, initial=False)
workflow.add_work(work4, initial=False)
workflow.add_work(work5, initial=False)
workflow.add_work(work6, initial=False)
workflow.add_work(work7, initial=False)
workflow.add_work(work8, initial=False)
# multiple conditions
cond6 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond7 = CompositeCondition(conditions=[work4.is_finished, work5.is_finished], true_works=[work6, cond6], false_works=work7)
# multiple conditions
# cond8 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond8 = Condition(cond=work1.is_finished)
cond9 = CompositeCondition(conditions=[work4.is_finished, cond8.is_condition_true], true_works=[work6], false_works=work7)
workflow.add_condition(cond7)
workflow.add_condition(cond9)
id_works = workflow.independent_works
# print(id_works)
id_works.sort()
id_works_1 = [work1, work4, work5, work8]
id_works_1 = [w.get_template_id() for w in id_works_1]
id_works_1.sort()
# id_works.sort(key=lambda x: x.work_id)
assert(id_works == id_works_1)
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow1 = json_loads(workflow_str)
# print('before load_metadata')
# self.print_workflow(workflow1)
workflow1.load_metadata()
# print('after load_metadata')
# self.print_workflow(workflow1)
workflow_str1 = json_dumps(workflow1, sort_keys=True, indent=4)
assert(workflow_str == workflow_str1)
works = cond7.all_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2, work3, work4, work5, work6, work7])
works = cond7.all_pre_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4, work5])
works = cond7.all_next_works()
works.sort(key=lambda x: x.work_id)
# print([w.work_id for w in works])
assert(works == [work2, work3, work6, work7])
cond_status = cond7.get_condition_status()
assert(cond_status is False)
work4.status = WorkStatus.Finished
cond_status = cond7.get_condition_status()
assert(cond_status is False)
work5.status = WorkStatus.Finished
cond_status = cond7.get_condition_status()
assert(cond_status is True)
work4.status = WorkStatus.New
work5.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work2, work6])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work2, work6])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
# multiple conditions
# cond8 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
# cond8 = Condition(cond=work1.is_finished)
# cond9 = CompositeCondition(conditions=[work4.is_finished, cond8.is_condition_true], true_works=[work6], false_works=work7)
works = cond9.all_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4, work6, work7])
works = cond9.all_pre_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work4])
works = cond9.all_next_works()
works.sort(key=lambda x: x.work_id)
# print([w.work_id for w in works])
assert(works == [work6, work7])
cond_status = cond9.get_condition_status()
assert(cond_status is False)
work4.status = WorkStatus.Finished
cond_status = cond9.get_condition_status()
assert(cond_status is False)
work1.status = WorkStatus.Finished
cond_status = cond9.get_condition_status()
assert(cond_status is True)
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.NotTriggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
return workflow
def test_workflow_condition_reload(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
work4 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=4)
work5 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=5)
work6 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=6)
work7 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=7,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.raw.1'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work2'}])
work8 = Work(executable='echo',
arguments='--in=IN_DATASET --out=OUT_DATASET',
sandbox=None,
work_id=8,
primary_input_collection={'scope': 'data17', 'name': 'data17.test.work2'},
output_collections=[{'scope': 'data17', 'name': 'data17.test.work3'}])
workflow = Workflow()
workflow.add_work(work1, initial=False)
workflow.add_work(work2, initial=False)
workflow.add_work(work3, initial=False)
workflow.add_work(work4, initial=False)
workflow.add_work(work5, initial=False)
workflow.add_work(work6, initial=False)
workflow.add_work(work7, initial=False)
workflow.add_work(work8, initial=False)
# multiple conditions
cond6 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond7 = CompositeCondition(conditions=[work4.is_finished, work5.is_finished], true_works=[work6, cond6], false_works=work7)
# multiple conditions
# cond8 = Condition(cond=work1.is_finished, true_work=work2, false_work=work3)
cond8 = Condition(cond=work1.is_finished)
cond9 = CompositeCondition(conditions=[work4.is_finished, cond8.is_condition_true], true_works=[work6], false_works=work7)
workflow.add_condition(cond7)
workflow.add_condition(cond9)
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow1 = json_loads(workflow_str)
# print('before load_metadata')
# self.print_workflow(workflow1)
workflow1.load_metadata()
# print('after load_metadata')
# self.print_workflow(workflow1)
workflow_str1 = json_dumps(workflow1, sort_keys=True, indent=4)
assert(workflow_str == workflow_str1)
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
works = cond7.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work5.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work3, work6])
work1.status = WorkStatus.Finished
works = cond7.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work2, work6])
work4.status = WorkStatus.New
work5.status = WorkStatus.New
work1.status = WorkStatus.New
# cond9
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [work7])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
assert(works == [])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
works = cond9.get_next_works(trigger=ConditionTrigger.ToTrigger)
works.sort(key=lambda x: x.work_id)
assert(works == [])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
assert(works == [work7])
work4.status = WorkStatus.Finished
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work1.status = WorkStatus.Finished
works = cond9.get_next_works(trigger=ConditionTrigger.Triggered)
works.sort(key=lambda x: x.work_id)
assert(works == [work6])
work4.status = WorkStatus.New
work1.status = WorkStatus.New
return workflow
def test_workflow_loop(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow = Workflow()
workflow.add_work(work1, initial=False)
workflow.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow.add_loop_condition(cond)
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow1 = json_loads(workflow_str)
# print('before load_metadata')
# self.print_workflow(workflow1)
workflow1.load_metadata()
# print('after load_metadata')
# self.print_workflow(workflow1)
workflow_str1 = json_dumps(workflow1, sort_keys=True, indent=4)
assert(workflow_str == workflow_str1)
def test_workflow_loop1(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow = Workflow()
workflow.add_work(work1, initial=False)
workflow.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow.add_loop_condition(cond)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.num_run == 1)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
return workflow
def test_workflow_loop2(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow = Workflow()
workflow.add_work(work1, initial=False)
workflow.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow.add_loop_condition(cond)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.num_run == 1)
for work in works:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.num_run == 2)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
for work in works:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.num_run == 3)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
for work in works:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.num_run == 3)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
return workflow
def test_workflow_subworkflow(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
works = workflow.get_new_works()
# print(json_dumps(workflow, sort_keys=True, indent=4))
# print(json_dumps(works, sort_keys=True, indent=4))
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2, work3])
# assert(workflow1.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(workflow.is_terminated() is True)
def test_workflow_subworkflow1(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
cond = Condition(cond=work3.is_finished, true_work=workflow1)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
def test_workflow_subworkflow2(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
cond = Condition(cond=work3.is_finished, true_work=workflow1)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
for work in works:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2, work3])
# assert(workflow1.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow1(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2, work3])
# assert(workflow1.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
assert(workflow.is_terminated() is False)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
# assert(workflow1.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow2(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
cond1 = Condition(cond=work3.is_finished, true_work=workflow1)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond1)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow3(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
cond1 = Condition(cond=work3.is_finished, true_work=workflow1)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond1)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
for work in works:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
for work in works:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow_reload(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1)
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2)
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3)
cond1 = Condition(cond=work3.is_finished, true_work=workflow1)
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond1)
# reload
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow = json_loads(workflow_str)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
# reload
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow = json_loads(workflow_str)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
for work in works:
work.transforming = True
work.status = WorkStatus.Finished
# reload
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow = json_loads(workflow_str)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(works == [work1, work2])
assert(workflow.is_terminated() is False)
for work in works:
work.transforming = True
work.status = WorkStatus.Failed
# reload
workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
workflow = json_loads(workflow_str)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
def test_workflow_subloopworkflow_parameter_link(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_1'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_1'})
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_2'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_2'})
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond1 = Condition(cond=work1.is_finished, true_work=work2)
workflow1.add_condition(cond1)
p_link = ParameterLink(parameters=[{'source': 'primary_output_collection',
'destination': 'primary_input_collection'}])
workflow1.add_parameter_link(work1, work2, p_link)
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1])
# assert(workflow.num_run == 1)
work1_1 = works[0]
assert(work1_1.primary_input_collection.name == 'input_test_work_1')
assert(work1_1.primary_output_collection.name == 'output_test_work_1')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
assert(workflow1.is_terminated() is False)
work2_1 = works[0]
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
assert(work2_1.primary_input_collection.name == 'output_test_work_1')
assert(work2_1.primary_output_collection.name == 'output_test_work_2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow1.is_terminated() is True)
def test_workflow_subloopworkflow_parameter_link1(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_1'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_1'})
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_2'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_2'})
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond1 = Condition(cond=work1.is_finished, true_work=work2)
workflow1.add_condition(cond1)
p_link = ParameterLink(parameters=[{'source': 'primary_output_collection',
'destination': 'primary_input_collection'}])
workflow1.add_parameter_link(work1, work2, p_link)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1])
# assert(workflow.num_run == 1)
work1_1 = works[0]
assert(work1_1.primary_input_collection.name == 'input_test_work_1')
assert(work1_1.primary_output_collection.name == 'output_test_work_1')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
assert(workflow1.is_terminated() is False)
work2_1 = works[0]
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
assert(work2_1.primary_input_collection.name == 'output_test_work_1')
assert(work2_1.primary_output_collection.name == 'output_test_work_2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1])
assert(workflow1.is_terminated() is False)
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
work1_2 = works[0]
assert(work1_2.primary_input_collection.name == 'input_test_work_1')
assert(work1_2.primary_output_collection.name == 'output_test_work_1.2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
assert(workflow1.is_terminated() is False)
work2_2 = works[0]
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
assert(work2_2.primary_input_collection.name == 'output_test_work_1.2')
assert(work2_2.primary_output_collection.name == 'output_test_work_2.2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow1.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow1.is_terminated() is True)
def test_workflow_subloopworkflow_parameter_link2(self):
work1 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=1,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_1'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_1'})
work2 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=2,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_2'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_2'})
workflow1 = Workflow()
workflow1.add_work(work1, initial=False)
workflow1.add_work(work2, initial=False)
cond1 = Condition(cond=work1.is_finished, true_work=work2)
workflow1.add_condition(cond1)
p_link = ParameterLink(parameters=[{'source': 'primary_output_collection',
'destination': 'primary_input_collection'}])
workflow1.add_parameter_link(work1, work2, p_link)
cond = Condition(cond=work2.is_finished)
workflow1.add_loop_condition(cond)
work3 = Work(executable='/bin/hostname', arguments=None, sandbox=None, work_id=3,
primary_input_collection={'scope': 'test_scop', 'name': 'input_test_work_3'},
primary_output_collection={'scope': 'test_scop', 'name': 'output_test_work_3'})
cond2 = Condition(cond=work3.is_finished, true_work=workflow1)
p_link1 = ParameterLink(parameters=[{'source': 'primary_output_collection',
'destination': 'primary_input_collection'}])
workflow = Workflow()
workflow.add_work(work3, initial=False)
workflow.add_work(workflow1, initial=False)
workflow.add_condition(cond2)
workflow.add_parameter_link(work3, work1, p_link1)
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work3])
# assert(workflow.num_run == 1)
work3_1 = works[0]
assert(work3_1.primary_input_collection.name == 'input_test_work_3')
assert(work3_1.primary_output_collection.name == 'output_test_work_3')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1])
assert(workflow.is_terminated() is False)
work1_1 = works[0]
# workflow_str = json_dumps(workflow, sort_keys=True, indent=4)
# print(workflow_str)
assert(work1_1.primary_input_collection.name == 'output_test_work_3')
assert(work1_1.primary_output_collection.name == 'output_test_work_1')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
assert(workflow1.is_terminated() is False)
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
work2_1 = works[0]
assert(work2_1.primary_input_collection.name == 'output_test_work_1')
assert(work2_1.primary_output_collection.name == 'output_test_work_2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work1])
assert(workflow1.is_terminated() is False)
work1_2 = works[0]
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
assert(work1_2.primary_input_collection.name == 'output_test_work_3')
assert(work1_2.primary_output_collection.name == 'output_test_work_1.2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Finished
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [work2])
assert(workflow1.is_terminated() is False)
work2_2 = works[0]
# workflow_str = json_dumps(workflow1, sort_keys=True, indent=4)
# print(workflow_str)
assert(work2_2.primary_input_collection.name == 'output_test_work_1.2')
assert(work2_2.primary_output_collection.name == 'output_test_work_2.2')
for work in works:
# if work.work_id == 3:
work.transforming = True
work.status = WorkStatus.Failed
works = workflow.get_new_works()
works.sort(key=lambda x: x.work_id)
assert(works == [])
assert(workflow.is_terminated() is True)
| 41.849967
| 159
| 0.642964
| 7,472
| 64,156
| 5.334984
| 0.024358
| 0.089105
| 0.071043
| 0.050523
| 0.961418
| 0.9562
| 0.955698
| 0.952889
| 0.947269
| 0.938038
| 0
| 0.027575
| 0.247646
| 64,156
| 1,532
| 160
| 41.877285
| 0.798293
| 0.067648
| 0
| 0.929302
| 0
| 0
| 0.041893
| 0.003284
| 0
| 0
| 0
| 0
| 0.220613
| 1
| 0.015332
| false
| 0
| 0.004259
| 0
| 0.024702
| 0.010221
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8eb8202337e15de0f72c753ffde80ad6ff9d27d2
| 224
|
py
|
Python
|
output/models/ms_data/attribute_group/attg_c036_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/attribute_group/attg_c036_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/attribute_group/attg_c036_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.attribute_group.attg_c036_xsd.attg_c036 import (
Doc,
Test,
)
from output.models.ms_data.attribute_group.attg_c036_xsd.attg_c036_inc import E
__all__ = [
"Doc",
"Test",
"E",
]
| 18.666667
| 79
| 0.709821
| 34
| 224
| 4.235294
| 0.470588
| 0.222222
| 0.222222
| 0.25
| 0.763889
| 0.763889
| 0.763889
| 0.763889
| 0.763889
| 0.763889
| 0
| 0.064865
| 0.174107
| 224
| 11
| 80
| 20.363636
| 0.713514
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8eeb8efd5e79f11b585441f78b538a5f5c24b3d5
| 2,268
|
py
|
Python
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vshufps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 2
|
2021-01-15T17:32:18.000Z
|
2021-12-21T02:53:58.000Z
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vshufps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 3
|
2021-03-26T20:33:59.000Z
|
2022-01-24T22:54:03.000Z
|
src/arch/x86/isa/insts/simd512/floating_point/data_transfer/vshufps.py
|
jyhuang91/gem5-avx
|
f988da46080f8db49beb39e20af437219f3aa4cb
|
[
"BSD-3-Clause"
] | 3
|
2021-03-27T16:36:19.000Z
|
2022-03-28T18:32:57.000Z
|
microcode = '''
def macroop VSHUFPS_XMM_XMM_I {
shuffle ufp1, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, xmm0m, xmm1m, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp1, dataSize=8
};
def macroop VSHUFPS_XMM_M_I {
ldfp ufp1, seg, sib, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, sib, "DISPLACEMENT + 8", dataSize=8
shuffle ufp3, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, ufp1, ufp2, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp3, dataSize=8
};
def macroop VSHUFPS_XMM_P_I {
rdip t7
ldfp ufp1, seg, riprel, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, riprel, "DISPLACEMENT + 8", dataSize=8
shuffle ufp3, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, ufp1, ufp2, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp3, dataSize=8
};
def macroop VSHUFPS_YMM_YMM_I {
shuffle ufp1, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, xmm0m, xmm1m, size=4, ext="IMMEDIATE >> 4"
shuffle ufp2, xmm2v, xmm3v, size=4, ext="IMMEDIATE"
shuffle xmm3, xmm2m, xmm3m, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp1, dataSize=8
movfp xmm2, ufp2, dataSize=8
};
def macroop VSHUFPS_YMM_M_I {
ldfp ufp1, seg, sib, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, sib, "DISPLACEMENT + 8", dataSize=8
ldfp ufp3, seg, sib, "DISPLACEMENT + 16", dataSize=8
ldfp ufp4, seg, sib, "DISPLACEMENT + 24", dataSize=8
shuffle ufp5, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, ufp1, ufp2, size=4, ext="IMMEDIATE >> 4"
shuffle ufp6, xmm2v, xmm3v, size=4, ext="IMMEDIATE"
shuffle xmm3, ufp3, ufp4, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp5, dataSize=8
movfp xmm2, ufp6, dataSize=8
};
def macroop VSHUFPS_YMM_P_I {
rdip t7
ldfp ufp1, seg, riprel, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, riprel, "DISPLACEMENT + 8", dataSize=8
ldfp ufp3, seg, riprel, "DISPLACEMENT + 16", dataSize=8
ldfp ufp4, seg, riprel, "DISPLACEMENT + 24", dataSize=8
shuffle ufp5, xmm0v, xmm1v, size=4, ext="IMMEDIATE"
shuffle xmm1, ufp1, ufp2, size=4, ext="IMMEDIATE >> 4"
shuffle ufp6, xmm2v, xmm3v, size=4, ext="IMMEDIATE"
shuffle xmm3, ufp3, ufp4, size=4, ext="IMMEDIATE >> 4"
movfp xmm0, ufp5, dataSize=8
movfp xmm2, ufp6, dataSize=8
};
'''
| 36.580645
| 60
| 0.663139
| 331
| 2,268
| 4.489426
| 0.132931
| 0.127187
| 0.096904
| 0.205922
| 0.946164
| 0.946164
| 0.890983
| 0.830417
| 0.804845
| 0.804845
| 0
| 0.080663
| 0.20194
| 2,268
| 62
| 61
| 36.580645
| 0.740331
| 0
| 0
| 0.727273
| 0
| 0
| 0.991626
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8eed09ebcdeb6e3cdca4f7eb2d9624bf891caddb
| 179
|
py
|
Python
|
atm/functions/__init__.py
|
moeyensj/atm
|
0523600cf44423a1ef72ca40fff29bbfbe1281a8
|
[
"BSD-3-Clause"
] | 10
|
2019-05-04T01:02:16.000Z
|
2021-12-29T11:20:23.000Z
|
atm/functions/__init__.py
|
moeyensj/atm
|
0523600cf44423a1ef72ca40fff29bbfbe1281a8
|
[
"BSD-3-Clause"
] | 22
|
2019-04-26T03:17:24.000Z
|
2021-03-03T23:38:02.000Z
|
atm/functions/__init__.py
|
moeyensj/atm
|
0523600cf44423a1ef72ca40fff29bbfbe1281a8
|
[
"BSD-3-Clause"
] | 2
|
2019-09-23T05:52:18.000Z
|
2021-12-29T11:20:21.000Z
|
from .blackbody import *
from .hg import *
from .temperature import *
from .flux_lambda import *
from .interpolate_flux_lambda import *
from .optical import *
from .mags import *
| 22.375
| 38
| 0.765363
| 24
| 179
| 5.583333
| 0.416667
| 0.447761
| 0.238806
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156425
| 179
| 7
| 39
| 25.571429
| 0.887417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d946da8b87a12c38d473112cff6b294a9ec166ce
| 3,076
|
py
|
Python
|
run/gen_retrieval_list.py
|
dontLoveBugs/netvlad_tf_open
|
ec04267d0391797e44c693aa42c5d88fa181c77f
|
[
"MIT"
] | null | null | null |
run/gen_retrieval_list.py
|
dontLoveBugs/netvlad_tf_open
|
ec04267d0391797e44c693aa42c5d88fa181c77f
|
[
"MIT"
] | null | null | null |
run/gen_retrieval_list.py
|
dontLoveBugs/netvlad_tf_open
|
ec04267d0391797e44c693aa42c5d88fa181c77f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
@Time : 2019-09-21 19:55
@Author : Wang Xin
@Email : wangxin_buaa@163.com
@File : gen_retrieval.py
"""
import numpy as np
import tensorflow as tf
from netvlad_tf.image_descriptor import ImageDescriptor
#
from run.database import AAcchenDB
aachen_db = AAcchenDB(root='/data/vldata/aachen', slice='day_time')
db_imgs, q_imgs = aachen_db.db_imgs, aachen_db.query_imgs
tf.reset_default_graph()
imd = ImageDescriptor(is_grayscale=False)
db_feats = imd.describeAllJpegsInPath(db_imgs, batch_size=1, verbose=True)
q_feats = imd.describeAllJpegsInPath(q_imgs, batch_size=1, verbose=True)
# lll
db_feats = np.array(db_feats).T
q_feats = np.array(q_feats).T
scores = np.dot(db_feats.T, db_feats)
ranks = np.argsort(-scores, axis=0)
match_list = []
# generate match list
for i in range(len(db_imgs)):
q_list = list(ranks[0:10, i])
for qi in q_list:
# match_list.append((db_imgs[i], db_imgs[qi]))
match_list.append((aachen_db.get_image_name(db_imgs[i]),
aachen_db.get_image_name(db_imgs[qi])))
scores = np.dot(db_feats.T, q_feats)
ranks = np.argsort(-scores, axis=0)
for i in range(len(q_imgs)):
q_list = list(ranks[0:10, i])
for qi in q_list:
# match_list.append((q_imgs[i], db_imgs[qi]))
match_list.append((aachen_db.get_image_name(q_imgs[i]),
aachen_db.get_image_name(db_imgs[qi])))
# print(match_list)
match_list_file = open('day_time_match_list_file.txt', 'a')
for q, p in match_list:
tmp_line = str(q) + ' ' + str(p) + '\n'
match_list_file.write(tmp_line)
match_list_file.close()
"""
-------------------------------------------------------------
"""
aachen_db = AAcchenDB(root='/data/vldata/aachen', slice='night_time')
db_imgs, q_imgs = aachen_db.db_imgs, aachen_db.query_imgs
tf.reset_default_graph()
imd = ImageDescriptor(is_grayscale=False)
db_feats = imd.describeAllJpegsInPath(db_imgs, batch_size=1, verbose=True)
q_feats = imd.describeAllJpegsInPath(q_imgs, batch_size=1, verbose=True)
# lll
db_feats = np.array(db_feats).T
q_feats = np.array(q_feats).T
scores = np.dot(db_feats.T, db_feats)
ranks = np.argsort(-scores, axis=0)
match_list = []
# generate match list
for i in range(len(db_imgs)):
q_list = list(ranks[0:10, i])
for qi in q_list:
# match_list.append((db_imgs[i], db_imgs[qi]))
match_list.append((aachen_db.get_image_name(db_imgs[i]),
aachen_db.get_image_name(db_imgs[qi])))
scores = np.dot(db_feats.T, q_feats)
ranks = np.argsort(-scores, axis=0)
for i in range(len(q_imgs)):
q_list = list(ranks[0:10, i])
for qi in q_list:
# match_list.append((q_imgs[i], db_imgs[qi]))
match_list.append((aachen_db.get_image_name(q_imgs[i]),
aachen_db.get_image_name(db_imgs[qi])))
match_list_file = open('night_time_match_list_file.txt', 'a')
for q, p in match_list:
tmp_line = str(q) + ' ' + str(p) + '\n'
match_list_file.write(tmp_line)
match_list_file.close()
| 26.290598
| 74
| 0.668726
| 505
| 3,076
| 3.790099
| 0.190099
| 0.10815
| 0.062696
| 0.066876
| 0.863636
| 0.858934
| 0.858934
| 0.858934
| 0.815047
| 0.815047
| 0
| 0.014521
| 0.171652
| 3,076
| 116
| 75
| 26.517241
| 0.736656
| 0.127113
| 0
| 0.866667
| 0
| 0
| 0.047104
| 0.022394
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d988931f28ba7ad52165d67ce603a734ee9bddba
| 3,113
|
py
|
Python
|
excel_2021-01-21_18-26-27.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
excel_2021-01-21_18-26-27.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
excel_2021-01-21_18-26-27.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
# This code is generated automatically by ClointFusion BOT Builder Tool.
import ClointFusion as cf
import time
cf.window_show_desktop()
cf.mouse_click(int(cf.pg.size()[0]/2),int(cf.pg.size()[1]/2))
cf.key_write_enter('n',key='')
time.sleep(0)
cf.key_write_enter('o',key='')
time.sleep(1)
cf.key_write_enter('t',key='')
time.sleep(0)
cf.key_write_enter('e',key='')
time.sleep(0)
cf.key_press('enter')
time.sleep(1)
cf.key_write_enter('h',key='')
time.sleep(0)
cf.key_write_enter('i',key='')
time.sleep(1)
cf.key_press('space')
time.sleep(0)
cf.key_write_enter('s',key='')
time.sleep(0)
cf.key_write_enter('u',key='')
time.sleep(0)
cf.key_write_enter('s',key='')
time.sleep(0)
cf.key_write_enter('h',key='')
time.sleep(0)
cf.key_write_enter('i',key='')
time.sleep(0)
cf.key_write_enter('l',key='')
time.sleep(1)
cf.key_press('space')
time.sleep(0)
cf.key_write_enter('h',key='')
time.sleep(0)
cf.key_write_enter('o',key='')
time.sleep(0)
cf.key_write_enter('w',key='')
time.sleep(0)
cf.key_press('space')
time.sleep(1)
cf.key_write_enter('r',key='')
time.sleep(0)
cf.key_write_enter('u',key='')
time.sleep(0)
cf.key_press('space')
time.sleep(2)
cf.key_write_enter('\x01',key='')
time.sleep(1)
cf.key_write_enter('\x03',key='')
time.sleep(0)
cf.key_write_enter('\x16',key='')
time.sleep(2)
cf.key_write_enter('\x03',key='')
time.sleep(0)
cf.key_write_enter('\x16',key='')
time.sleep(0)
cf.key_write_enter('\x16',key='')
time.sleep(1)
cf.key_write_enter('\x16',key='')
time.sleep(1)
cf.key_write_enter('\x01',key='')
time.sleep(0)
cf.key_write_enter('\x18',key='')
time.sleep(0)
time.sleep(2)
cf.key_press('alt+f4')
time.sleep(0)
time.sleep(1)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\Sushil\AppData\Local\Temp\cf_log_0jbtwggm_generator\Images\Snips\1-ProgramManager-542_237.png',conf=0.7, wait=11),left_or_right='right', single_double_triple = 'single')
except:
cf.mouse_click(542,237,left_or_right='right', single_double_triple = 'single')
time.sleep(1)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\Sushil\AppData\Local\Temp\cf_log_0jbtwggm_generator\Images\Snips\2-ProgramManager-612_295.png',conf=0.7, wait=11),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(612,295,left_or_right='left', single_double_triple = 'single')
time.sleep(1)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\Sushil\AppData\Local\Temp\cf_log_0jbtwggm_generator\Images\Snips\3-ProgramManager-612_295.png',conf=0.7, wait=11),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(612,295,left_or_right='left', single_double_triple = 'single')
time.sleep(1)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\Sushil\AppData\Local\Temp\cf_log_0jbtwggm_generator\Images\Snips\4-ProgramManager-674_450.png',conf=0.7, wait=10),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(674,450,left_or_right='left', single_double_triple = 'single')
time.sleep(0)
cf.window_close_windows('Program Manager')
| 29.367925
| 244
| 0.743334
| 564
| 3,113
| 3.849291
| 0.163121
| 0.157531
| 0.11976
| 0.179641
| 0.876555
| 0.847075
| 0.847075
| 0.823584
| 0.731
| 0.719023
| 0
| 0.045392
| 0.058786
| 3,113
| 106
| 245
| 29.367925
| 0.695563
| 0.022486
| 0
| 0.78022
| 1
| 0.043956
| 0.193623
| 0.134122
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.021978
| 0
| 0.021978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.