hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acff91a2f7da7eb04626db59fe8cc4795bfd3ba8
| 2,135
|
bzl
|
Python
|
main/targets.bzl
|
FFFF01/v2ray-core
|
18f6c420ad319b7c67514575bdafde2f23ea7e79
|
[
"MIT"
] | 1
|
2021-09-26T14:20:35.000Z
|
2021-09-26T14:20:35.000Z
|
main/targets.bzl
|
lokifist/v2ray-core
|
4f7384c020994efc94977a865d0ff70e63dbc385
|
[
"MIT"
] | 51
|
2020-07-31T05:12:22.000Z
|
2022-03-22T15:02:21.000Z
|
main/targets.bzl
|
lokifist/v2ray-core
|
4f7384c020994efc94977a865d0ff70e63dbc385
|
[
"MIT"
] | 2
|
2019-11-21T08:48:40.000Z
|
2020-07-05T06:53:15.000Z
|
load("//infra/bazel:build.bzl", "foreign_go_binary")
load("//infra/bazel:gpg.bzl", "gpg_sign")
def gen_targets(matrix):
pkg = "v2ray.com/core/main"
output = "v2ray"
for (os, arch) in matrix:
bin_name = "v2ray_" + os + "_" + arch
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = output,
os = os,
arch = arch,
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
if os in ["windows"]:
bin_name = "v2ray_" + os + "_" + arch + "_nowindow"
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = "w" + output,
os = os,
arch = arch,
ld = "-H windowsgui",
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
if arch in ["mips", "mipsle"]:
bin_name = "v2ray_" + os + "_" + arch + "_softfloat"
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = output+"_softfloat",
os = os,
arch = arch,
mips = "softfloat",
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
if arch in ["arm"]:
bin_name = "v2ray_" + os + "_" + arch + "_armv7"
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = output+"_armv7",
os = os,
arch = arch,
arm = "7",
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
bin_name = "v2ray_" + os + "_" + arch + "_armv6"
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = output+"_armv6",
os = os,
arch = arch,
arm = "6",
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
bin_name = "v2ray_" + os + "_" + arch + "_armv5"
foreign_go_binary(
name = bin_name,
pkg = pkg,
output = output+"_armv5",
os = os,
arch = arch,
arm = "5",
)
gpg_sign(
name = bin_name + "_sig",
base = ":" + bin_name,
)
| 21.565657
| 58
| 0.457143
|
acff91b63c8dac30f24bb1aa42a823a297ce65d8
| 1,151
|
py
|
Python
|
docmaster/interpreter/listener.py
|
iuricuneo/docmaster
|
a82470c19d2a0c2e5d9cb5960500f9c7e46b5105
|
[
"MIT"
] | null | null | null |
docmaster/interpreter/listener.py
|
iuricuneo/docmaster
|
a82470c19d2a0c2e5d9cb5960500f9c7e46b5105
|
[
"MIT"
] | 1
|
2019-04-22T11:35:08.000Z
|
2019-04-22T11:35:08.000Z
|
docmaster/interpreter/listener.py
|
iuricuneo/docmaster
|
a82470c19d2a0c2e5d9cb5960500f9c7e46b5105
|
[
"MIT"
] | null | null | null |
"""
Listener module.
Listener is a class responsible for receiving a full user command and
interpreting it however required..
It is a function and has a behaviour similar to the one of the speaker. To call,
use:
from listener import Listener
request = Listener.listen(command)
Options so far are:
v - verbose
f - force
---
Commands (in lib/interpretstrategies):
save
show
update
remove
"""
from typing import List
import interpretstrategies as strat
class Listener:
"""
Class responsible for interpreting user commands.
"""
@staticmethod
def listen(command: List[str]): # (...) -> Request:
"""To be called by the user interface, it will receive a command and handle
it, returning a request to it."""
strategy = strat.InterpretingStrategy()
pure_command = strategy.get_command(command)
map = {
'save': strat.CreateEntryStrategy,
'show': strat.ReadEntryStrategy,
'update': strat.UpdateEntryStrategy,
'remove': strat.DeleteEntryStrategy,
'error': strat.ErrorStrategy}
return map[pure_command]().get_request(command)
| 23.979167
| 83
| 0.679409
|
acff91d35f7f54fa2b094347945ee640d936d018
| 948
|
py
|
Python
|
pychron/hardware/lakeshore/model331.py
|
ASUPychron/pychron
|
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
|
[
"Apache-2.0"
] | 31
|
2016-03-07T02:38:17.000Z
|
2022-02-14T18:23:43.000Z
|
pychron/hardware/lakeshore/model331.py
|
ASUPychron/pychron
|
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
|
[
"Apache-2.0"
] | 1,626
|
2015-01-07T04:52:35.000Z
|
2022-03-25T19:15:59.000Z
|
pychron/hardware/lakeshore/model331.py
|
UIllinoisHALPychron/pychron
|
f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc
|
[
"Apache-2.0"
] | 26
|
2015-05-23T00:10:06.000Z
|
2022-03-07T16:51:57.000Z
|
# ===============================================================================
# Copyright 2019 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from pychron.hardware.lakeshore.base_controller import BaseLakeShoreController
class Model331TemperatureController(BaseLakeShoreController):
pass
# ============= EOF =============================================
| 39.5
| 81
| 0.600211
|
acff92b1a390b5c858d19776dd0fad799c367c83
| 7,218
|
py
|
Python
|
conans/test/generators/cmake_find_package_test.py
|
datalogics-kam/conan
|
7bf230cd5f8ef68eb804908777ebaad75e951b16
|
[
"MIT"
] | null | null | null |
conans/test/generators/cmake_find_package_test.py
|
datalogics-kam/conan
|
7bf230cd5f8ef68eb804908777ebaad75e951b16
|
[
"MIT"
] | null | null | null |
conans/test/generators/cmake_find_package_test.py
|
datalogics-kam/conan
|
7bf230cd5f8ef68eb804908777ebaad75e951b16
|
[
"MIT"
] | null | null | null |
import unittest
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.test.utils.tools import TestClient
from nose.plugins.attrib import attr
@attr('slow')
class CMakeFindPathGeneratorTest(unittest.TestCase):
def cmake_find_package_system_libs_test(self):
conanfile = """from conans import ConanFile, tools
class Test(ConanFile):
name = "Test"
version = "0.1"
def package_info(self):
self.cpp_info.libs.append("fake_lib")
self.cpp_info.cflags.append("a_flag")
self.cpp_info.cppflags.append("a_cxx_flag")
self.cpp_info.sharedlinkflags.append("shared_link_flag")
"""
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("export . user/channel")
conanfile = """from conans import ConanFile, tools, CMake
class Consumer(ConanFile):
name = "consumer"
version = "0.1"
requires = "Test/0.1@user/channel"
generators = "cmake_find_package"
exports_sources = "CMakeLists.txt"
settings = "os", "arch", "compiler"
def build(self):
cmake = CMake(self)
cmake.configure()
"""
cmakelists = """
project(consumer)
cmake_minimum_required(VERSION 3.1)
find_package(Test)
message("Libraries to Link: ${Test_LIBS}")
get_target_property(tmp Test::Test INTERFACE_LINK_LIBRARIES)
message("Target libs: ${tmp}")
get_target_property(tmp Test::Test INTERFACE_COMPILE_OPTIONS)
message("Compile options: ${tmp}")
"""
client.save({"conanfile.py": conanfile, "CMakeLists.txt": cmakelists})
client.run("create . user/channel --build missing")
self.assertIn("Library fake_lib not found in package, might be system one", client.out)
self.assertIn("Libraries to Link: fake_lib", client.out)
self.assertIn("Target libs: fake_lib;shared_link_flag", client.out)
self.assertIn("Compile options: a_cxx_flag;a_flag", client.out)
def cmake_lock_target_redefinition_test(self):
client = TestClient()
files = cpp_hello_conan_files(name="Hello0",
settings='"os", "compiler", "arch", "build_type"')
client.save(files)
client.run("create . user/channel -s build_type=Release")
# Consume the previous Hello0 with auto generated FindHello0.cmake
# The module path will point to the "install" folder automatically (CMake helper)
files = cpp_hello_conan_files(name="Hello1", deps=["Hello0/0.1@user/channel"],
settings='"os", "compiler", "arch", "build_type"')
files["conanfile.py"] = files["conanfile.py"].replace(
'generators = "cmake", "gcc"',
'generators = "cmake_find_package"')
files["CMakeLists.txt"] = """
set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_CXX_ABI_COMPILED 1)
project(MyHello CXX)
cmake_minimum_required(VERSION 2.8)
# Create a target first to check if it is not redefined
add_library(CONAN_LIB::Hello0_helloHello0)
find_package(Hello0 REQUIRED)
"""
client.save(files, clean_first=True)
client.run("create . user/channel -s build_type=Release", ignore_error=True)
self.assertIn("Skipping already existing target: CONAN_LIB::Hello0_helloHello0", client.out)
def cmake_find_package_test(self):
"""First package without custom find_package"""
client = TestClient()
files = cpp_hello_conan_files(name="Hello0",
settings='"os", "compiler", "arch", "build_type"')
client.save(files)
client.run("create . user/channel -s build_type=Release")
# Consume the previous Hello0 with auto generated FindHello0.cmake
# The module path will point to the "install" folder automatically (CMake helper)
files = cpp_hello_conan_files(name="Hello1", deps=["Hello0/0.1@user/channel"],
settings='"os", "compiler", "arch", "build_type"')
files["conanfile.py"] = files["conanfile.py"].replace(
'generators = "cmake", "gcc"',
'generators = "cmake_find_package"')
files["CMakeLists.txt"] = """
set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_CXX_ABI_COMPILED 1)
project(MyHello CXX)
cmake_minimum_required(VERSION 2.8)
find_package(Hello0 REQUIRED)
add_library(helloHello1 hello.cpp)
target_link_libraries(helloHello1 PUBLIC Hello0::Hello0)
if(Hello0_LIBRARIES)
MESSAGE("Hello0_LIBRARIES set")
endif()
add_executable(say_hello main.cpp)
target_link_libraries(say_hello helloHello1)
"""
client.save(files, clean_first=True)
client.run("create . user/channel -s build_type=Release")
self.assertIn("Conan: Using autogenerated FindHello0.cmake", client.out)
self.assertIn("Hello0_LIBRARIES set", client.out)
self.assertNotIn("Skipping already existing target", client.out)
# Now link with old cmake
files["CMakeLists.txt"] = """
set(CMAKE_VERSION "2.8")
set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_CXX_ABI_COMPILED 1)
project(MyHello CXX)
cmake_minimum_required(VERSION 2.8)
message(${CMAKE_BINARY_DIR})
set(CMAKE_MODULE_PATH ${CMAKE_BINARY_DIR} ${CMAKE_MODULE_PATH})
find_package(Hello0 REQUIRED)
add_library(helloHello1 hello.cpp)
if(NOT DEFINED Hello0_FOUND)
message(FATAL_ERROR "Hello0_FOUND not declared")
endif()
if(NOT DEFINED Hello0_INCLUDE_DIRS)
message(FATAL_ERROR "Hello0_INCLUDE_DIRS not declared")
endif()
if(NOT DEFINED Hello0_INCLUDES)
message(FATAL_ERROR "Hello0_INCLUDES not declared")
endif()
if(NOT DEFINED Hello0_LIBRARIES)
message(FATAL_ERROR "Hello0_LIBRARIES not declared")
endif()
include_directories(${Hello0_INCLUDE_DIRS})
target_link_libraries(helloHello1 PUBLIC ${Hello0_LIBS})
add_executable(say_hello main.cpp)
target_link_libraries(say_hello helloHello1)
"""
client.save(files, clean_first=True)
client.run("create . user/channel -s build_type=Release")
self.assertIn("Conan: Using autogenerated FindHello0.cmake", client.out)
# Now a transitive consumer, but the consumer only find_package the first level Hello1
files = cpp_hello_conan_files(name="Hello2", deps=["Hello1/0.1@user/channel"],
settings='"os", "compiler", "arch", "build_type"')
files["CMakeLists.txt"] = """
set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_CXX_ABI_COMPILED 1)
project(MyHello CXX)
cmake_minimum_required(VERSION 2.8)
set(CMAKE_MODULE_PATH ${CMAKE_BINARY_DIR} ${CMAKE_MODULE_PATH})
find_package(Hello1 REQUIRED) # We don't need to find Hello0, it is transitive
add_library(helloHello2 hello.cpp)
target_link_libraries(helloHello2 PUBLIC Hello1::Hello1)
add_executable(say_hello main.cpp)
target_link_libraries(say_hello helloHello2)
"""
files["conanfile.py"] = files["conanfile.py"].replace(
'generators = "cmake", "gcc"',
'generators = "cmake_find_package"')
client.save(files, clean_first=True)
client.run("create . user/channel -s build_type=Release")
self.assertIn("Conan: Using autogenerated FindHello0.cmake", client.out)
self.assertIn("Conan: Using autogenerated FindHello1.cmake", client.out)
| 38.59893
| 100
| 0.694652
|
acff93c03cc44cbfc33fcc5f5503075ac49a146c
| 946
|
py
|
Python
|
pype/plugins/maya/create/create_camera.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
pype/plugins/maya/create/create_camera.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
pype/plugins/maya/create/create_camera.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
import avalon.maya
from pype.hosts.maya import lib
class CreateCamera(avalon.maya.Creator):
"""Single baked camera"""
name = "cameraMain"
label = "Camera"
family = "camera"
icon = "video-camera"
defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateCamera, self).__init__(*args, **kwargs)
# get basic animation data : start / end / handles / steps
animation_data = lib.collect_animation_data()
for key, value in animation_data.items():
self.data[key] = value
# Bake to world space by default, when this is False it will also
# include the parent hierarchy in the baked results
self.data['bakeToWorldSpace'] = True
class CreateCameraRig(avalon.maya.Creator):
"""Complex hierarchy with camera."""
name = "camerarigMain"
label = "Camera Rig"
family = "camerarig"
icon = "video-camera"
defaults = ['Main']
| 27.028571
| 73
| 0.639535
|
acff960e14d22271bf067c32cbf7f25faa035c85
| 78
|
py
|
Python
|
tritam_automatic/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
tritam_automatic/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
tritam_automatic/__init__.py
|
kenysmile/test_facebook
|
844a3ddd53abd319c0115de86909118a37106c67
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from . import models
from . import config_auto_share
| 15.6
| 31
| 0.679487
|
acff96e70f35c6255f933f7382d135ad0e57f35a
| 3,174
|
py
|
Python
|
mmpose/datasets/datasets/bottom_up/bottom_up_crowdpose.py
|
nightfuryyy/mmpose
|
910d9e31dd9d46e3329be1b7567e6309d70ab64c
|
[
"Apache-2.0"
] | 1,775
|
2020-07-10T01:20:01.000Z
|
2022-03-31T16:31:50.000Z
|
mmpose/datasets/datasets/bottom_up/bottom_up_crowdpose.py
|
KHB1698/mmpose
|
93c3a742c540dfb4ca515ad545cef705a07d90b4
|
[
"Apache-2.0"
] | 1,021
|
2020-07-11T11:40:24.000Z
|
2022-03-31T14:32:26.000Z
|
mmpose/datasets/datasets/bottom_up/bottom_up_crowdpose.py
|
KHB1698/mmpose
|
93c3a742c540dfb4ca515ad545cef705a07d90b4
|
[
"Apache-2.0"
] | 477
|
2020-07-11T11:27:51.000Z
|
2022-03-31T09:42:25.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import json_tricks as json
from mmcv import Config
from xtcocotools.cocoeval import COCOeval
from mmpose.datasets.builder import DATASETS
from .bottom_up_coco import BottomUpCocoDataset
@DATASETS.register_module()
class BottomUpCrowdPoseDataset(BottomUpCocoDataset):
"""CrowdPose dataset for bottom-up pose estimation.
The dataset loads raw features and apply specified transforms
to return a dict containing the image tensors and other information.
CrowdPose keypoint indexes::
0: 'left_shoulder',
1: 'right_shoulder',
2: 'left_elbow',
3: 'right_elbow',
4: 'left_wrist',
5: 'right_wrist',
6: 'left_hip',
7: 'right_hip',
8: 'left_knee',
9: 'right_knee',
10: 'left_ankle',
11: 'right_ankle',
12: 'top_head',
13: 'neck'
Args:
ann_file (str): Path to the annotation file.
img_prefix (str): Path to a directory where images are held.
Default: None.
data_cfg (dict): config
pipeline (list[dict | callable]): A sequence of data transforms.
dataset_info (DatasetInfo): A class containing all dataset info.
test_mode (bool): Store True when building test or
validation dataset. Default: False.
"""
def __init__(self,
ann_file,
img_prefix,
data_cfg,
pipeline,
dataset_info=None,
test_mode=False):
if dataset_info is None:
warnings.warn(
'dataset_info is missing. '
'Check https://github.com/open-mmlab/mmpose/pull/663 '
'for details.', DeprecationWarning)
cfg = Config.fromfile('configs/_base_/datasets/crowdpose.py')
dataset_info = cfg._cfg_dict['dataset_info']
super(BottomUpCocoDataset, self).__init__(
ann_file,
img_prefix,
data_cfg,
pipeline,
dataset_info=dataset_info,
test_mode=test_mode)
self.ann_info['use_different_joint_weights'] = False
print(f'=> num_images: {self.num_images}')
def _do_python_keypoint_eval(self, res_file):
"""Keypoint evaluation using COCOAPI."""
stats_names = [
'AP', 'AP .5', 'AP .75', 'AR', 'AR .5', 'AR .75', 'AP(E)', 'AP(M)',
'AP(H)'
]
with open(res_file, 'r') as file:
res_json = json.load(file)
if not res_json:
info_str = list(zip(stats_names, [
0,
] * len(stats_names)))
return info_str
coco_det = self.coco.loadRes(res_file)
coco_eval = COCOeval(
self.coco,
coco_det,
'keypoints_crowd',
self.sigmas,
use_area=False)
coco_eval.params.useSegm = None
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
info_str = list(zip(stats_names, coco_eval.stats))
return info_str
| 30.228571
| 79
| 0.574984
|
acff97a2269f42379fd0477e3dab40f54b601016
| 229
|
py
|
Python
|
mindhome_alpha/erpnext/projects/doctype/timesheet/timesheet_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
mindhome_alpha/erpnext/projects/doctype/timesheet/timesheet_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | null | null | null |
mindhome_alpha/erpnext/projects/doctype/timesheet/timesheet_dashboard.py
|
Mindhome/field_service
|
3aea428815147903eb9af1d0c1b4b9fc7faed057
|
[
"MIT"
] | 1
|
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'fieldname': 'time_sheet',
'transactions': [
{
'label': _('References'),
'items': ['Sales Invoice', 'Salary Slip']
}
]
}
| 17.615385
| 45
| 0.628821
|
acff9a86b3a2bce7095d5a1e33d8573a1a841809
| 1,070
|
py
|
Python
|
server/realms/migrations/0007_auto_20220329_0639.py
|
synthetic-intelligence/zentral
|
774104cea90b7f3d6f2aac655859c1b1f034f8dd
|
[
"Apache-2.0"
] | null | null | null |
server/realms/migrations/0007_auto_20220329_0639.py
|
synthetic-intelligence/zentral
|
774104cea90b7f3d6f2aac655859c1b1f034f8dd
|
[
"Apache-2.0"
] | null | null | null |
server/realms/migrations/0007_auto_20220329_0639.py
|
synthetic-intelligence/zentral
|
774104cea90b7f3d6f2aac655859c1b1f034f8dd
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.2.12 on 2022-03-29 06:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('realms', '0006_realmgroupmapping'),
]
operations = [
migrations.AlterField(
model_name='realm',
name='config',
field=models.JSONField(default=dict, editable=False),
),
migrations.AlterField(
model_name='realmauthenticationsession',
name='backend_state',
field=models.JSONField(null=True),
),
migrations.AlterField(
model_name='realmauthenticationsession',
name='callback_kwargs',
field=models.JSONField(default=dict),
),
migrations.AlterField(
model_name='realmuser',
name='claims',
field=models.JSONField(default=dict),
),
migrations.AlterField(
model_name='realmuser',
name='password_hash',
field=models.JSONField(null=True),
),
]
| 27.435897
| 65
| 0.571963
|
acff9a8d3056e0d939626ea632a21a9b10178cc5
| 14
|
py
|
Python
|
requirements.py
|
nikhil-rupanawar/aws-practice
|
e3ef96b5394daee0706463fada05521f371e1b27
|
[
"MIT"
] | null | null | null |
requirements.py
|
nikhil-rupanawar/aws-practice
|
e3ef96b5394daee0706463fada05521f371e1b27
|
[
"MIT"
] | null | null | null |
requirements.py
|
nikhil-rupanawar/aws-practice
|
e3ef96b5394daee0706463fada05521f371e1b27
|
[
"MIT"
] | null | null | null |
awscli
boto3
| 4.666667
| 6
| 0.785714
|
acff9aba90bca82ceeddf81fdb83ba65d554c812
| 10,459
|
py
|
Python
|
users/views.py
|
ananthan-123/react-django
|
5305649f1d39e1f5b62eb17a4f40f67fe7cfd14b
|
[
"MIT"
] | null | null | null |
users/views.py
|
ananthan-123/react-django
|
5305649f1d39e1f5b62eb17a4f40f67fe7cfd14b
|
[
"MIT"
] | null | null | null |
users/views.py
|
ananthan-123/react-django
|
5305649f1d39e1f5b62eb17a4f40f67fe7cfd14b
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from rest_framework import viewsets, generics, permissions
from rest_framework.response import Response
from knox.models import AuthToken
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import password_validators_help_texts
import urllib.request
from .serializers import DoctorSerializer, HealthOfficialSerializer, PatientListSerializer, PatientSerializer, \
RegisterSerializer, RegisterUserInfoSerializer, UserInfoSerializer, UserPatientSerializer, UserSerializer, \
LoginSerializer, AdminUserSerializer,PatientNewSerializer
from .models import Doctor, HealthOfficial, Patient, PatientList, UserInfo, Admin_Users
# Create your views here.
class PatientCreateViewSet(generics.ListCreateAPIView):
queryset = Patient.objects.all()
serializer_class = PatientSerializer
class PatientListCreateViewSet(generics.ListCreateAPIView):
queryset = PatientList.objects.all()
serializer_class = PatientListSerializer
# admin create view set
class AdminCreateViewSet(generics.ListCreateAPIView):
queryset = Admin_Users.objects.all()
serializer_class = AdminUserSerializer
# Admin Update view set
class AdminUpdateViewSet(generics.UpdateAPIView):
# permission_classes = (permissions.IsAuthenticated,)
# queryset = Patient.objects.all()
serializer_class = AdminUserSerializer
def get_object(self):
# print("---------------------------------------------------------------")
# print(self.request.data)
return Admin_Users.objects.get(user_info=self.request.data.get('user_info'))
class UserInfoUpdateView(generics.UpdateAPIView):
permission_classes = (permissions.IsAuthenticated,)
serializer_class = RegisterUserInfoSerializer
def get_object(self):
return UserInfo.objects.get(user=self.request.user)
class PatientUpdateViewSet(generics.UpdateAPIView):
# permission_classes = (permissions.IsAuthenticated,)
# queryset = Patient.objects.all()
serializer_class = PatientSerializer
def get_object(self):
# print("---------------------------------------------------------------")
# print(self.request.data)
return Patient.objects.get(user_info=self.request.data.get('user_info'))
class PatientViewSet(generics.UpdateAPIView):
# permission_classes = (permissions.IsAuthenticated,)
# queryset = Patient.objects.all()
serializer_class = PatientSerializer
def get_object(self):
# print("---------------------------------------------------------------")
# print(self.request.data)
return Patient.objects.get(user_info=self.request.data.get('user_info'))
class PatientViewSet(viewsets.ModelViewSet):
serializer_class = PatientNewSerializer
queryset = Patient.objects.all()
class DoctorListCreateViewSet(generics.ListCreateAPIView):
queryset = Doctor.objects.all()
serializer_class = DoctorSerializer
# HealthOfficial
class HealthOfficialListCreateViewSet(generics.ListCreateAPIView):
queryset = HealthOfficial.objects.all()
serializer_class = HealthOfficialSerializer
class RegisterUserInfoListViewSet(generics.ListCreateAPIView):
queryset = UserInfo.objects.all()
serializer_class = RegisterUserInfoSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user_info = serializer.save()
return Response({
# saves user_info and its data
"user_info": UserInfoSerializer(user_info, context=self.get_serializer_context()).data,
})
# Register API
class RegisterAPI(generics.GenericAPIView):
queryset = User.objects.all()
serializer_class = RegisterSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
return Response({
# saves user and its data
"user": UserSerializer(user, context=self.get_serializer_context()).data,
# creates token for that particular user
"token": AuthToken.objects.create(user)[1],
"passwordValidators": password_validators_help_texts(password_validators=None)
})
# Login API
class LoginAPI(generics.GenericAPIView):
serializer_class = LoginSerializer
permission_classes = ()
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data
return Response({
# saves user and its data
"user": UserSerializer(user, context=self.get_serializer_context()).data,
# creates token for that particular user
"token": AuthToken.objects.create(user)[1]
})
class UserByIdAPI(generics.ListAPIView):
"""Get a specific user's info from the user_id
"""
queryset = User.objects.all()
serializer_class = UserSerializer
lookup_url_kwarg = "id"
def get_queryset(self):
id = self.kwargs.get(self.lookup_url_kwarg)
user = User.objects.filter(id=id)
return user
class UserInfoByUserIdAPI(generics.ListAPIView):
"""Get a specific user's info from the user_id
"""
queryset = UserInfo.objects.all()
serializer_class = UserInfoSerializer
lookup_url_kwarg = "user_id"
def get_queryset(self):
user_id = self.kwargs.get(self.lookup_url_kwarg)
user_info = UserInfo.objects.filter(user_id=user_id)
return user_info
class UserInfoByIdAPI(generics.ListAPIView):
"""Get a specific user's info from the user_id
"""
queryset = UserInfo.objects.all()
serializer_class = UserInfoSerializer
lookup_url_kwarg = "id"
def get_queryset(self):
id = self.kwargs.get(self.lookup_url_kwarg)
user_info = UserInfo.objects.filter(id=id)
return user_info
class PatientByUserInfoIdAPI(generics.ListAPIView):
"""Get a specific patient from the user_info_id
"""
queryset = Patient.objects.all()
serializer_class = PatientSerializer
lookup_url_kwarg = "user_info_id"
def get_queryset(self):
user_info_id = self.kwargs.get(self.lookup_url_kwarg)
patient = Patient.objects.filter(user_info_id=user_info_id)
return patient
class PatientByIdAPI(generics.ListAPIView):
"""Get a specific user's info from the user_id
"""
queryset = Patient.objects.all()
serializer_class = PatientSerializer
lookup_url_kwarg = "id"
def get_queryset(self):
id = self.kwargs.get(self.lookup_url_kwarg)
patient = Patient.objects.filter(id=id)
return patient
class DoctorByUserInfoIdAPI(generics.ListAPIView):
"""Get a specific doctor from the user_info_id
"""
queryset = Doctor.objects.all()
serializer_class = DoctorSerializer
lookup_url_kwarg = "user_info_id"
def get_queryset(self):
user_info_id = self.kwargs.get(self.lookup_url_kwarg)
doctor = Doctor.objects.filter(user_info_id=user_info_id)
return doctor
# HealthOfficial
class HealthOfficialByUserInfoIdAPI(generics.ListAPIView):
"""Get a specific health official from the user_info_id
"""
queryset = HealthOfficial.objects.all()
serializer_class = HealthOfficialSerializer
lookup_url_kwarg = "user_info_id"
def get_queryset(self):
user_info_id = self.kwargs.get(self.lookup_url_kwarg)
healthofficial = HealthOfficial.objects.filter(user_info_id=user_info_id)
return healthofficial
# Admin
class AdminByUserInfoIdAPI(generics.ListAPIView):
"""Get a specific health official from the user_info_id
"""
queryset = Admin_Users.objects.all()
serializer_class = AdminUserSerializer
lookup_url_kwarg = "user_info_id"
def get_queryset(self):
user_info_id = self.kwargs.get(self.lookup_url_kwarg)
admin = Admin_Users.objects.filter(user_info_id=user_info_id)
return admin
class DoctorByIdAPI(generics.ListAPIView):
"""Get a specific user's info from the user_id
"""
queryset = Doctor.objects.all()
serializer_class = DoctorSerializer
lookup_url_kwarg = "id"
def get_queryset(self):
id = self.kwargs.get(self.lookup_url_kwarg)
doctor = Doctor.objects.filter(id=id)
return doctor
# HealthOfficial
class HealthOfficialByIdAPI(generics.ListAPIView):
"""Get a specific HealthOfficial's info from the user_id
"""
queryset = HealthOfficial.objects.all()
serializer_class = HealthOfficialSerializer
lookup_url_kwarg = "id"
def get_queryset(self):
id = self.kwargs.get(self.lookup_url_kwarg)
healthofficial = HealthOfficial.objects.filter(id=id)
return healthofficial
class PatientListByDoctorIdAPI(generics.ListAPIView):
"""Get the patient list for a specific doctor
"""
queryset = PatientList.objects.all()
serializer_class = PatientListSerializer
lookup_url_kwarg = "doctor_id"
def get_queryset(self):
doctor_id = self.kwargs.get(self.lookup_url_kwarg)
patient_list = PatientList.objects.filter(doctor_id=doctor_id)
# for i in range(len(patient_list)):
# print(patient_list[i].id)
# patient_list[i].id +=90
return patient_list
class PatientListByPatientIdAPI(generics.ListAPIView):
"""Get the patient list for a specific doctor
"""
queryset = PatientList.objects.all()
serializer_class = PatientListSerializer
lookup_url_kwarg = "patient_id"
def get_queryset(self):
patient_id = self.kwargs.get(self.lookup_url_kwarg)
patient_list = PatientList.objects.filter(patient_id=patient_id)
# for i in range(len(patient_list)):
# print(patient_list[i].id)
# patient_list[i].id +=90
return patient_list
# class UserAPI(viewsets.ModelViewSet):
# queryset = User.objects.all()
# serializer_class = UserSerializer
# class PatientUpdateView(generics.UpdateAPIView):
# permission_classes = (permissions.IsAuthenticated,)
# serializer_class = UserPatientSerializer
# def get_object(self):
# return Patient.objects.get(user=self.request.user)
| 31.598187
| 112
| 0.702075
|
acff9b7a16eba580e9359b6a656c6f82b9b447a6
| 830
|
py
|
Python
|
Programas_Capitulo_05/Cap05_pagina_125.py
|
rojassergio/Aprendiendo-a-programar-en-Python-con-mi-computador
|
ee6fe3a7182033633a45c40e2bb8b912652bab9d
|
[
"MIT"
] | 17
|
2016-04-15T16:44:00.000Z
|
2022-01-27T23:47:30.000Z
|
Programas_Capitulo_05/Cap05_pagina_125.py
|
rojassergio/Aprendiendo-a-programar-en-Python-con-mi-computador
|
ee6fe3a7182033633a45c40e2bb8b912652bab9d
|
[
"MIT"
] | null | null | null |
Programas_Capitulo_05/Cap05_pagina_125.py
|
rojassergio/Aprendiendo-a-programar-en-Python-con-mi-computador
|
ee6fe3a7182033633a45c40e2bb8b912652bab9d
|
[
"MIT"
] | 10
|
2016-04-28T08:07:08.000Z
|
2020-04-12T23:05:05.000Z
|
'''
@author: Sergio Rojas
@contact: rr.sergio@gmail.com
--------------------------
Contenido bajo
Atribución-NoComercial-CompartirIgual 3.0 Venezuela (CC BY-NC-SA 3.0 VE)
http://creativecommons.org/licenses/by-nc-sa/3.0/ve/
Creado en abril 21, 2016
'''
#In [1]:
x = [12.3, 4.82, 2.3, 6.7]; y = [45.6, 23.12, 7.21, 1.2]
#In [2]:
elementos = range(len(x))
#In [3]:
print(elementos)
#In [4]:
print(len(x) == len(y) == len(elementos))
#In [5]:
milista = []
#In [6]:
for i in elementos:
milista.append([ x[i],y[i] ])
print('x = {0:4.2f} ; y = {1:4.2f}'.format(x[i],y[i]))
#In [7]:
print(milista)
#In [8]:
print(zip(x,y))
#In [9]:
milista = []
for i in zip(x,y):
milista.append(i)
print('x = {0:4.2f} ; y = {1:4.2f}'.format(i[0],i[1]))
#In [11]:
print(milista)
#In [12]:
| 16.6
| 73
| 0.531325
|
acff9b9319750393f36034a5ae0ad672ab989b1e
| 358
|
py
|
Python
|
tools/unpacker/upx/bugs/unpacking-succeeded/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2017-12-14T14:25:17.000Z
|
2019-03-09T03:29:12.000Z
|
tools/unpacker/upx/bugs/unpacking-succeeded/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 10
|
2019-06-14T09:12:55.000Z
|
2021-10-01T12:15:43.000Z
|
tools/unpacker/upx/bugs/unpacking-succeeded/test.py
|
xbabka01/retdec-regression-tests
|
1ac40cca5165740364e6f7fb72b20820eac9bc7c
|
[
"MIT"
] | 8
|
2019-05-10T14:59:48.000Z
|
2022-03-07T16:34:23.000Z
|
from regression_tests import *
class Test(Test):
settings=TestSettings(
tool='unpacker',
input=files_in_dir('inputs'),
run_fileinfo=True
)
def test_unpacker_succeeded_no_imports(self):
assert self.unpacker.succeeded
assert self.fileinfo.succeeded
assert "importTable" not in self.fileinfo.output
| 25.571429
| 56
| 0.684358
|
acff9d727222b665d468abb2a30637673d54ce4a
| 21,847
|
py
|
Python
|
app/views.py
|
JanMrlth/fkkev
|
407e4d8c809b4043da737315c3a9ebcfde31a61b
|
[
"Apache-2.0"
] | null | null | null |
app/views.py
|
JanMrlth/fkkev
|
407e4d8c809b4043da737315c3a9ebcfde31a61b
|
[
"Apache-2.0"
] | null | null | null |
app/views.py
|
JanMrlth/fkkev
|
407e4d8c809b4043da737315c3a9ebcfde31a61b
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
import base64
import logging
import random
import string
import urllib
from Crypto.Cipher import AES
from functools import wraps
from logging import Formatter, FileHandler
from flask import render_template, request, redirect, url_for, flash, g
from flask_login import login_user, login_required, current_user, logout_user
from flask_mail import Message
from schwifty import IBAN
from sqlalchemy import exc
from app import app, db, mail, bcrypt, login_manager
from app.forms import *
from config import BLOCK_SIZE, PADDING, secret_key, ADMINS
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
cipher = AES.new(secret_key)
# ----------------------------------------------------------------------------#
# Controllers.
# ----------------------------------------------------------------------------#
from app.models import User, Bankdetails, Confirmation, Forgotpassword
memberType = ['']
endl = '<br>'
chars = (string.ascii_letters + string.digits)
# Custom Functions
def sendAcceptancemail(user_id, selected=True):
# TODO: Yet to do
return redirect(url_for('index'))
def is_image_url(url):
check = url[-4:]
check2 = url[-5:]
if check == '.jpg' or check == '.png' or check2 == '.jpeg':
ret = urllib2.urlopen(url).getcode()
if ret == 200:
return True
else:
return False
else:
return False
def is_admin(g):
if g is not None and not g.admin:
flash('Please Login with a Admin ID!', 'error')
return False
elif g is None:
return False
return True
def is_verified(g):
if g and g.is_authenticated and (not g.confirmed and not g.admin):
flash('Please verify your account from your emailed Link!', 'error')
return False
return True
@login_manager.user_loader
def get_user(ident):
return User.query.get(int(ident))
@app.route('/', methods=['GET'])
def index():
user = current_user
if user.is_authenticated and user.authenticated:
return redirect(url_for('profile'))
form = LoginForm()
return render_template('forms/login.html', form=form)
@login_required
@app.route('/profile',methods=['GET'])
def profile():
if not is_verified(current_user):
return redirect(url_for('logout'))
if current_user.is_authenticated:
return render_template('pages/show-user.html',user=current_user)
else:
flash('Please Login again!','warning')
return redirect(url_for('index'))
@login_required
@app.route('/editprofile',methods=['GET','POST'])
def update_profile():
if current_user.is_authenticated and is_verified(current_user):
form = UpdateProfile()
if form.validate_on_submit():
user = current_user
user.firstname = form.firstname.data if form.firstname.data else user.firstname
user.lastname = form.lastname.data if form.lastname.data else user.lastname
user.bday = form.bday.data if form.bday.data else user.bday
user.road = form.road.data if form.road.data else user.road
user.postcode = form.postcode.data if form.postcode.data else user.postcode
user.town = form.town.data if form.town.data else user.town
user.company = form.company.data if form.company.data else user.company
if form.phone.data:
user.phone = form.phone.data
if form.mobile.data:
user.mobile = form.mobile.data
if form.image_url.data:
if is_image_url(form.image_url.data):
user.image_url = form.image_url.data
else:
flash('Invalid Image URL','error')
return redirect(url_for('update_profile'))
if form.password.data and current_user.is_authenticated:
user.password = bcrypt.generate_password_hash(form.password.data)
flash('Password Updated','success')
db.session.add(current_user)
db.session.commit()
flash('Updated Profile Successfully','success')
else:
flash('Incorrect or Invalid Details','warning')
return render_template('forms/edit-user.html', user=current_user,form=form)
else:
return redirect(url_for('index'))
@app.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
userData = User.query.filter_by(email=form.email.data).first()
if userData is None:
flash('Invalid Email Provided', 'error')
return redirect(url_for('index'))
if bcrypt.check_password_hash(userData.password,form.password.data):
userData.authenticated = True
db.session.add(userData)
db.session.commit()
print('In')
login_user(userData, remember=True)
return redirect(url_for('profile'))
else:
flash('Password Incorrect','error')
return redirect(url_for('index'))
return render_template('forms/login.html',form=form)
@app.route('/logout', methods=["GET"])
@login_required
def logout():
"""Logout the current user."""
user = current_user
user.authenticated = False
db.session.add(user)
db.session.commit()
logout_user()
flash('Logged out successfully', 'success')
return redirect(url_for('logout_check'))
@app.route('/checkLogout', methods=['GET'])
def logout_check():
if current_user.is_authenticated:
logout_user()
return redirect(url_for('index'))
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
form = RegisterForm()
if form.validate_on_submit() and (int(form.membertype.data) in [1,2]) and (int(form.persontype.data) in [0,1,2]):
passwordReal = ''.join((random.choice(chars)) for x in range(app.config['PWS_SIZE']))
passwordTemp = bcrypt.generate_password_hash(passwordReal)
form.membertype.data = int(form.membertype.data)
form.persontype.data = int(form.persontype.data)
image_url=None
if form.image_url and is_image_url(form.image_url.data):
image_url = form.image_url.data
if (form.persontype.data == 1):
userObj = User(email=form.email.data, password=passwordTemp, membertype=form.membertype.data,
persontype=form.persontype.data, fee=form.fee.data,
company=form.company.data, firstname=form.firstname.data, lastname=form.lastname.data,
bday=form.bday.data, town=form.town.data,road=form.road.data ,postcode=form.postcode.data,
phone=form.phone.data, mobile=form.mobile.data,image_url=image_url)
else:
userObj = User(email=form.email.data, password=passwordTemp, membertype=form.membertype.data,
persontype=form.persontype.data, fee=form.fee.data,
firstname=form.firstname.data, lastname=form.lastname.data,
bday=form.bday.data, town=form.town.data,road=form.road.data, postcode=form.postcode.data,
phone=form.phone.data, mobile=form.mobile.data,image_url=image_url)
userObj.town = (userObj.town).encode('utf-8')
bankObj = Bankdetails()
bankObj.account_holder = (form.firstname.data + " " + form.lastname.data).title()
iban = form.iban.data.replace(" ", "")
ibanobj = IBAN(iban)
bic = form.bic.data
bankObj.blz = ibanobj.bank_code
bankObj.account_no = ibanobj.account_code
digits = len(iban)
if not (digits >= 16 and digits <= 34):
flash('Invalid IBAN Number', 'error')
return redirect(url_for('register'))
iban_visible = iban[:6] + 'X' * (digits - 10) + iban[-4:]
digits = len(bic)
bic_visible = bic[:2] + 'X' * (digits - 4) + bic[-2:]
bankObj.iban_visible = iban_visible
bankObj.bic_visible = bic_visible
bankObj.iban = EncodeAES(cipher, iban)
bankObj.bic = EncodeAES(cipher, bic)
rows = User.query.count()
bankObj.sepa_ref = 'FraKeKueV'
if not form.membertype.data:
bankObj.sepa_ref += 'OrdM'
else:
bankObj.sepa_ref += 'FoeM'
bankObj.sepa_ref += iban_visible[:6]
bankObj.sepa_ref += str((5 - len(str(rows))) * '0') + str(rows)
userObj.bankdetails.append(bankObj)
# Sending Email
msg = Message('Anmeldung Frankfurter Kelterei Kultur e.V.', sender=ADMINS[0], recipients=[userObj.email])
body = 'Hallo ' + userObj.firstname + endl
body += 'Login Details:' + endl + 'Email:' + userObj.email + endl + 'Password: ' + passwordReal + endl*3
body += ('Wir freuen uber dein Interesse an der Frankfurter Kelterei Kultur! Du hast folgende Daten fur die Anmeldungubermittelt. Aus Grunden des Datenschutzes, musst du diese Daten ein zweites Mal aktiv bestatigen (double opt-in):') + endl
body += ('Mitgliedsart: ' + str(userObj.membertype)) + endl
if userObj.company:
body += 'Firma:' + userObj.company + endl
body += 'Name: ' + (userObj.firstname + ' ' + userObj.lastname).title() + endl
# body += 'Addresse: ' + userObj.town.decode("windows-1252").encode('utf-8') + endl + 'Zipcode: ' + str(userObj.postcode) + endl
body += 'Alter: ' + userObj.bday.strftime("%Y-%m-%d") + endl * 3
body += 'Kontodaten' + endl * 4 + '================='
body += 'Kontoinhaber :' + bankObj.account_holder + endl
body += 'IBAN :' + bankObj.iban_visible + endl
body += 'BIC :' + bankObj.bic_visible + endl
body += 'Monatsbeitrag:' + str(userObj.fee) + 'Euros' + endl
body += 'Please confirm the correctness of the data by clicking on the following link:' + endl
confirmationSequence = ''.join((random.choice(chars)) for x in range(50))
while Confirmation.query.filter_by(confirmation_code=confirmationSequence).count() > 0:
confirmationSequence = ''.join((random.choice(chars)) for x in range(50))
body += app.config['BASE_URL'] + 'verifyaccount/' + confirmationSequence + endl*3
body += 'Löschen der Anmeldung ' + endl
body += app.config['BASE_URL'] + 'deleteaccount/' + confirmationSequence + endl*3
body += 'Beste Grüße'
msg.html = body.encode('utf-8')
confirmobj = Confirmation(confirmation_code=confirmationSequence)
try:
db.session.add(userObj)
db.session.add(bankObj)
db.session.add(confirmobj)
userObj.confirmation.append(confirmobj)
db.session.commit()
mail.send(msg)
flash('Registered Id Successfully! Please verify using link sent to your email', 'success')
return redirect(url_for('index'))
except exc.IntegrityError as e:
db.session().rollback()
flash('SQLAlchemy Integrity Error!','error')
return redirect(url_for('register'))
return render_template('forms/register.html',form=form)
@app.route('/verifyaccount/<confirmation_sequence>', methods=['GET'])
def confirm_account(confirmation_sequence):
confirmObj = Confirmation.query.filter_by(confirmation_code=confirmation_sequence).first()
if confirmObj is None:
# Invalid Code
flash('Incorrect Validation Code', 'warning')
return redirect(url_for('index'))
userobj = User.query.filter_by(id=confirmObj.user_id).first()
userobj.confirmed = True
db.session.add(confirmObj)
# Sending Email to the Admin
msg = Message('Mitgliedsanmeldung von Website', sender=ADMINS[0], recipients=[ADMINS[0]])
body = 'Folgende Mitgliedsdaten wurden in unserem Anmeldformular eingegeben und per E-Mail bestatigt: '
userObj = User.query.filter_by(id=confirmObj.user_id).first()
bankObj = userObj.bankdetails.first()
body += 'Mitgliedsart: ' + str(userObj.membertype) + endl
if userObj.company:
body += 'Firma:' + userObj.company + endl
body += 'Name: ' + (userObj.firstname + ' ' + userObj.lastname).title() + endl
body += 'Addresse: ' + userObj.town + endl + 'Zipcode: ' + str(userObj.postcode) + endl
body += 'Alter: ' + userObj.bday.strftime("%Y-%m-%d") + endl * 3
body += 'Kontodaten' + endl * 4 + '================='
body += 'Kontoinhaber :' + bankObj.account_holder + endl
body += 'IBAN :' + bankObj.iban_visible + endl
body += 'BIC :' + bankObj.bic_visible + endl
body += 'Monatsbeitrag :' + str(userObj.fee) + 'Euros' + endl
body += 'Mitglied in Verein aufnehmen:' + endl
body += app.config['BASE_URL'] + 'acceptuser/' + confirmation_sequence + endl
body += 'Antrag ablehnen::' + endl
body += app.config['BASE_URL'] + 'rejectuser/' + confirmation_sequence
msg.html = body.encode('utf-8')
mail.send(msg)
db.session.delete(confirmObj)
db.session.commit()
flash('User Validated Successfully!', 'success')
return redirect(url_for('index')) # Will Change this to profile Page
@app.route('/deleteaccount/<deletion_sequence>', methods=['GET'])
def delete_account(deletion_sequence):
confirmObj = Confirmation.query.filter_by(confirmation_code=deletion_sequence)
if confirmObj == None:
# Invalid Code
flash('Incorrect Validation Code', 'warning')
return redirect(url_for('index'))
userobj = User.query.filter_by(id=confirmObj.user_id).first()
db.session.delete(userobj.bankdetails)
db.session.delete(confirmObj)
db.session.delete(userobj)
db.session.commit()
flash('User Deleted Successfully', 'warning')
return redirect(url_for('index'))
@login_required
@app.route('/acceptuser/<confirmation_code>', methods=['GET'])
def accept_request(confirmation_code):
if not is_admin(current_user):
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
user = current_user
if user.admin is not True:
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
confirmObj = Confirmation.query.filter_by(confirmation_code=confirmation_code).first()
if confirmObj is None:
flash('Wrong Acceptance Code', 'error')
return redirect(url_for('index')) # Or any Other
userobj = User.query.filter_by(id=confirmObj.user_id).first()
userobj.confirmed = True
userobj.status = 1
# sendAcceptancemail(confirmObj.user_id.id)
db.session.add(userobj)
db.session.delete(confirmObj)
db.session.commit()
flash('User Accepted to the Organisation', 'success')
return redirect(url_for('index'))
@login_required
@app.route('/rejectuser/<confirmation_code>', methods=['GET'])
def reject_user(confirmation_code):
if not is_admin(current_user):
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
user = current_user
if user.admin is not True:
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
confirmObj = Confirmation.query.filter_by(confirmation_code=confirmation_code).first()
if confirmObj is None:
flash('Wrong Acceptance Code', 'error')
return redirect(url_for('index')) # Or any Other
userobj = User.query.filter_by(id=confirmObj.user_id).first()
# sendAcceptancemail(confirmObj.user_id.id, False)
db.session.delete(confirmObj.user_id)
db.session.delete(confirmObj)
db.session.commit()
flash('User Rejected from the Organisation', 'warning')
return redirect(url_for('index'))
@app.route('/forgot', methods=['GET', 'POST'])
def forgot():
form = ForgotForm()
if form.validate_on_submit():
email = form.email.data
userObj = User.query.filter_by(email=email)
if userObj is None:
flash('Wrong Email Id Provided! Please signup first.', 'error')
return redirect(url_for('index'))
reset_token = ''.join((random.choice(chars)) for x in range(50))
forgotobj = Forgotpassword(forgot_code=reset_token, user_id=userObj)
db.session.add(forgotobj)
msg = Message('Password Reset Email.', sender=ADMINS[0], recipients=[email])
msg.body = 'Click the Below Link to Reset Password ' + endl + app.config[
'BASE_URL'] + '/resetpassword/' + reset_token
mail.send(msg)
flash('Password Reset Email sent to your Email Id!', 'success')
return redirect(url_for('index'))
return render_template('forms/forgot.html',form=form)
@app.route('/resetpassword/<reset_token>',methods=['GET','POST'])
def reset_pass(reset_token):
form = ResetForm()
if request.method == 'GET':
forgotObj = Forgotpassword.query.filter_by(forgot_code=reset_token)
if forgot is None:
flash('Invalid Reset Token!','error')
return redirect(url_for('index'))
return render_template('forms/final_reset.html',reset_token=reset_token,form=form)
elif form.validate_on_submit():
forgotObj = Forgotpassword.query.filter_by(forgot_code=reset_token)
if forgot is None:
flash('Invalid Reset Token!', 'error')
return redirect(url_for('index'))
userobj = User.query.filter_by(id=forgotObj.user_id).first()
userobj.password = bcrypt.generate_password_hash(form.password.data)
db.session.add(userobj)
db.session.add(forgotObj)
db.session.commit()
flash('Password Updated Successfully','success')
return redirect(url_for('index'))
@login_required
@app.route('/bankprofile',methods=['GET'])
def bank_profile():
if not is_verified(current_user):
flash('Please verify your account from your emailed Link!', 'error')
return redirect(url_for('index', next=request.url))
if current_user.is_authenticated and current_user.authenticated:
return render_template('pages/show-bank.html',user=current_user)
return redirect(url_for('index'))
@login_required
@app.route('/editbank',methods=['GET','POST'])
def edit_bank_profile():
if not is_verified(current_user):
flash('Please verify your account from your emailed Link!', 'error')
return redirect(url_for('index', next=request.url))
if not current_user.is_authenticated:
return redirect(url_for('index'))
form = EditBank()
if form.validate_on_submit():
user = current_user
user.bankdetails[0].account_no = form.account_no.data
user.bankdetails[0].account_holder = form.account_holder.data
iban = form.iban.data
iban = form.iban.data.replace(" ", "")
bic = form.bic.data
digits = len(iban)
iban_visible = iban[:6] + 'X' * (digits - 10) + iban[-4:]
digits = len(bic)
bic_visible = bic[:2] + 'X' * (digits - 4) + bic[-2:]
user.bankdetails[0].iban_visible = iban_visible
user.bankdetails[0].iban = EncodeAES(cipher,iban)
user.bankdetails[0].bic_visible = bic_visible
user.bankdetails[0].bic = EncodeAES(cipher,bic)
db.session.add(user)
db.session.commit()
flash('Bank Details update successfully','success')
return render_template('forms/edit-bank.html',form=form,user=current_user)
@login_required
@app.route('/memberslist')
def admin_list():
if not is_admin(current_user):
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
user = current_user
if user.is_authenticated:
#2nd Verification after decorator
user_all = User.query.all()
return render_template('pages/admin-landing.html',user_all=user_all,user=current_user)
else:
flash('Re Login as Admin!','error')
return redirect(url_for('profile'))
@login_required
@app.route('/getmemberprofile/<user_id>',methods=['GET'])
def get_member_profile(user_id):
if not is_admin(current_user):
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
userobj = User.query.filter_by(id=user_id).first()
if userobj is None:
flash('Member Profile ID Invalid')
return redirect(url_for('admin_list'))
return render_template('pages/show-user.html',user=userobj,next="/memberslist",current_user=current_user)
@login_required
@app.route('/makeadmin/<user_id>')
def make_admin(user_id):
if not is_admin(current_user):
flash('Admin Access required', 'warning')
return redirect(url_for('logout'))
user = User.query.filter_by(id=user_id)
if user is not None:
user.admin = True
db.session.add(user)
db.session.commit()
flash('Admin Account Added For Profile Id'+str(user.id),'success')
return redirect(url_for('admin_list'))
flash('User Id Invalid','error')
return redirect(url_for('admin_list'))
# Error handlers.
@app.errorhandler(500)
def internal_error(error):
# db_session.rollback()
return render_template('errors/500.html'), 500
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
# @login_required
# @app.route('/test')
# def test():
# return render_template('pages/admin-landing.html',user=current_user)
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter(
'%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
| 42.094412
| 248
| 0.648876
|
acff9eb0c0f1ae13e2c35f14aed744524982b3fc
| 595
|
py
|
Python
|
examples/inspect_vbr/inspect_vbr.py
|
williballenthin/python-ntfs
|
080275e7e78285c68f5a0b5ca895b5123d754acc
|
[
"Apache-2.0"
] | 58
|
2015-03-05T18:22:46.000Z
|
2022-02-18T02:26:34.000Z
|
examples/inspect_vbr/inspect_vbr.py
|
williballenthin/python-ntfs
|
080275e7e78285c68f5a0b5ca895b5123d754acc
|
[
"Apache-2.0"
] | 5
|
2015-01-30T14:13:26.000Z
|
2018-05-09T14:05:34.000Z
|
examples/inspect_vbr/inspect_vbr.py
|
williballenthin/python-ntfs
|
080275e7e78285c68f5a0b5ca895b5123d754acc
|
[
"Apache-2.0"
] | 23
|
2015-03-10T17:24:16.000Z
|
2022-01-24T15:01:34.000Z
|
"""
Dump the NTFS VBR for a volume.
"""
import logging
from ntfs.volume import FlatVolume
from ntfs.BinaryParser import Mmap
from ntfs.filesystem import NTFSVBR
g_logger = logging.getLogger("ntfs.examples.inspect_vbr")
def main(image_filename, volume_offset):
logging.basicConfig(level=logging.DEBUG)
logging.getLogger("ntfs.mft").setLevel(logging.INFO)
with Mmap(image_filename) as buf:
v = FlatVolume(buf, volume_offset)
vbr = NTFSVBR(v)
print(vbr.get_all_string())
if __name__ == '__main__':
import sys
main(sys.argv[1], int(sys.argv[2]))
| 21.25
| 57
| 0.710924
|
acff9ebbc4247727c1a7d062f80be69506e78f82
| 3,760
|
py
|
Python
|
examples/app.py
|
topless/invenio-rest
|
8c05be26b48b2a7e90cb05298c9234e5dd7f4af1
|
[
"MIT"
] | null | null | null |
examples/app.py
|
topless/invenio-rest
|
8c05be26b48b2a7e90cb05298c9234e5dd7f4af1
|
[
"MIT"
] | null | null | null |
examples/app.py
|
topless/invenio-rest
|
8c05be26b48b2a7e90cb05298c9234e5dd7f4af1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Minimal Flask application example.
SPHINX-START
First install Invenio-REST, setup the application and load
fixture data by running:
.. code-block:: console
$ pip install -e .[all]
$ cd examples
$ ./app-setup.sh
$ ./app-fixtures.sh
Next, start the development server:
.. code-block:: console
$ export FLASK_APP=app.py FLASK_DEBUG=1
$ flask run
and use cURL to explore the simplistic REST API:
.. code-block:: console
$ curl -v -XGET http://0.0.0.0:5000/records/
$ curl -v -XGET http://0.0.0.0:5000/records/ \\
-H Accept:application/xml
The example app demonstrates:
* Use of ``Accept`` headers to change the serialization from JSON to XML via
the :class:`invenio_rest.views.ContentNegotiatedMethodView`.
* CORS headers (``Access-Control-Allow-Origin`` and
``Access-Control-Expose-Headers``).
To reset the example application run:
.. code-block:: console
$ ./app-teardown.sh
SPHINX-END
"""
from __future__ import absolute_import, print_function
import xmltodict
from flask import Blueprint, Flask, jsonify, make_response
from invenio_rest import ContentNegotiatedMethodView, InvenioREST, csrf
def json_v1_search(search_result):
"""Serialize records."""
return make_response(jsonify(search_result))
def xml_v1_search(search_result):
"""Serialize records as text."""
return make_response(xmltodict.unparse(search_result))
class RecordsListResource(ContentNegotiatedMethodView):
"""Example REST resource."""
def __init__(self, **kwargs):
"""Init."""
super(RecordsListResource, self).__init__(
method_serializers={
'GET': {
'application/json': json_v1_search,
'application/xml': xml_v1_search,
},
'POST': {
'application/json': json_v1_search,
'application/xml': xml_v1_search,
},
},
default_method_media_type={
'GET': 'application/json',
'POST': 'application/json',
},
default_media_type='application/json',
**kwargs)
def get(self, **kwargs):
"""Implement the GET /records."""
return {"title": "Test"}
def post(self, **kwargs):
return {"message": "OK"}
def csrf_exempt_view():
return jsonify({"message": "OK with no CSRF"})
def csrf_exempt_blueprint_view():
return jsonify({"message": "OK with no CSRF"})
# Create Flask application
app = Flask(__name__)
app.config.update({
'REST_ENABLE_CORS': True,
'REST_ENABLE_CSRF': True,
})
InvenioREST(app)
blueprint = Blueprint(
'mymodule',
__name__,
url_prefix='/records',
template_folder='templates',
static_folder='static',
)
blueprint_exempt_csrf = Blueprint(
'mymodule_exempt_csrf',
__name__,
url_prefix='/exempt_csrf',
template_folder='templates',
static_folder='static',
)
records_view = RecordsListResource.as_view('records')
blueprint.add_url_rule('/', view_func=records_view)
blueprint.add_url_rule(
'/nocsrf', view_func=csrf.exempt(csrf_exempt_view), methods=['POST'])
# TODO Check this shouldn't exempt it
blueprint.add_url_rule(
'/nocsrf-2', view_func=csrf_exempt_view, methods=['POST'])
blueprint_exempt_csrf.add_url_rule(
'/ping', view_func=csrf_exempt_blueprint_view, methods=['POST'])
csrf.exempt(blueprint_exempt_csrf)
app.register_blueprint(blueprint)
app.register_blueprint(blueprint_exempt_csrf)
| 24.736842
| 76
| 0.668085
|
acff9f1249fc6c2941fbe203a4d45876fe95466a
| 6,482
|
py
|
Python
|
src/dusqlite/find.py
|
coecms/dusqlite
|
d7f30b4cb9bb3ed40def467fde13dddc03a334db
|
[
"Apache-2.0"
] | null | null | null |
src/dusqlite/find.py
|
coecms/dusqlite
|
d7f30b4cb9bb3ed40def467fde13dddc03a334db
|
[
"Apache-2.0"
] | null | null | null |
src/dusqlite/find.py
|
coecms/dusqlite
|
d7f30b4cb9bb3ed40def467fde13dddc03a334db
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2019 ARC Centre of Excellence for Climate Extremes
# author: Scott Wales <scott.wales@unimelb.edu.au>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from . import model
from .handler import get_path_id
from . import __version__
import sqlalchemy as sa
import pandas
import pwd
import grp
import stat
from datetime import datetime
def find_children(root_ids):
"""
Get all child ids of the paths identified by root_ids
Args:
root_ids: list of model.path.c.id
Returns:
sa.select of model.paths.c.id
"""
q = (
sa.select([
model.paths.c.id,
])
.select_from(
model.paths
.join(
model.paths_closure,
model.paths.c.id == model.paths_closure.c.id)
)
.where(model.paths_closure.c.root.in_(root_ids))
.distinct()
)
return q
def find_roots():
"""
Find all root path ids
Returns:
sa.select of model.paths.c.id
"""
q = (
sa.select([
model.paths_fullpath.c.path_id.label('id'),
model.paths_fullpath.c.path,
])
.select_from(
model.paths_fullpath
.join(
model.paths,
model.paths.c.id == model.paths_fullpath.c.path_id,
isouter=True,
)
)
.where(
model.paths.c.parent_id == None
)
)
return q
def to_ncdu(findq, connection):
"""
Format the output of 'find' so it can be read by ``ncdu -e``
"""
findq = findq.with_only_columns([model.paths.c.id])
paths_parents = sa.alias(model.paths_parents)
# Get the metadata needed by ncdu
# Found results, plus all of their parent paths
q = (sa.select([
model.paths.c.id,
model.paths.c.size.label('asize'),
model.paths.c.inode.label('ino'),
model.basenames.c.name,
model.paths.c.mode,
model.paths.c.uid,
model.paths.c.gid,
sa.cast(model.paths.c.mtime, sa.Integer).label('mtime'),
])
.select_from(model.paths
.join(model.paths_parents,
model.paths_parents.c.parent_id == model.paths.c.id)
.join(model.basenames,
model.paths.c.basename_id == model.basenames.c.id)
)
.where(model.paths_parents.c.path_id.in_(findq))
.distinct()
)
tree = {None: [{"name": "."}]}
for r in connection.execute(q):
d = dict(r)
d['dsize'] = d['asize']
i = d.pop('id')
if stat.S_ISDIR(d['mode']):
tree[i] = [d]
else:
tree[i] = d
# Get the tree edges
q = (sa.select([
paths_parents.c.parent_id.label('id'),
model.paths.c.parent_id,
])
.select_from(paths_parents
.join(model.paths,
paths_parents.c.parent_id == model.paths.c.id,
isouter=True,
))
.where(paths_parents.c.path_id.in_(findq))
.distinct()
)
# Construct the tree relationships
for r in connection.execute(q):
tree[r.parent_id].append(tree[r.id])
# Return the data ready to be converted to json
return [1, 1, {"progname": "dusqlite", "progver": __version__,
"timestamp": datetime.utcnow().timestamp()}, tree[None]]
def find(connection, path=None, older_than=None, user=None, group=None, exclude=None, size=None):
"""
Find files in the database
Args:
connection: Database connection from :func:`~dusqlite.db.connect`
path: Parent path to search under
user (str): Find files owned by this user name
group (str): Find files owned by this group name
older_than (datetime.timedelta-like): Find files with modify time at
least this far away from the present
size (int): Find files larger than this size (in bytes)
exclude (list of str): Exclude paths containing these strings as components
Returns a :obj:`sqlalchemy.sql.select` of filesystem URLs matching the constraint
The select has access to tables :obj:`~dusqlite.model.paths_fullpath` and
:obj:`~dusqlite.model.paths` for further querying
"""
j = (model.paths_fullpath
.join(model.paths, model.paths.c.id == model.paths_fullpath.c.path_id))
q = (sa.sql.select([
model.paths_fullpath.c.path,
])
.select_from(j)
)
if path is not None:
path_id = get_path_id(path, connection)
j = j.join(model.paths_parents, model.paths.c.id ==
model.paths_parents.c.path_id)
q = q.select_from(j).where(model.paths_parents.c.parent_id == path_id)
if older_than is not None:
ts = (pandas.Timestamp.now(tz='UTC') - older_than)
ts = ts.timestamp()
q = q.where(model.paths.c.mtime < ts)
if user is not None:
q = q.where(model.paths.c.uid.in_(
[pwd.getpwnam(u).pw_uid for u in user]))
if group is not None:
q = q.where(model.paths.c.gid.in_(
[grp.getgrnam(g).gr_gid for g in group]))
if exclude is not None:
excl_q = (sa.select([model.paths_parents.c.path_id])
.select_from(model.paths_parents
.join(model.paths, model.paths.c.id == model.paths_parents.c.parent_id)
.join(model.basenames, model.basenames.c.id == model.paths.c.basename_id)
)
.where(model.basenames.c.name.in_(exclude)))
q = q.where(~model.paths.c.id.in_(excl_q))
if size is not None:
if size > 0:
q = q.where(model.paths.c.size >= size)
else:
q = q.where(model.paths.c.size < -size)
return q
| 30.431925
| 104
| 0.582999
|
acffa0960198a9f4b56a5c3620cb7d466750fccd
| 1,780
|
py
|
Python
|
xclib/tests/21_ejabberd_test.py
|
scurrvy2020/xmpp-cloud-auth
|
919e98ead79fe3c65716a485917d52b6d2c97358
|
[
"MIT"
] | 62
|
2017-04-20T13:20:49.000Z
|
2022-01-18T20:52:51.000Z
|
xclib/tests/21_ejabberd_test.py
|
scurrvy2020/xmpp-cloud-auth
|
919e98ead79fe3c65716a485917d52b6d2c97358
|
[
"MIT"
] | 85
|
2017-03-19T17:52:28.000Z
|
2022-01-07T17:54:00.000Z
|
xclib/tests/21_ejabberd_test.py
|
scurrvy2020/xmpp-cloud-auth
|
919e98ead79fe3c65716a485917d52b6d2c97358
|
[
"MIT"
] | 22
|
2017-05-04T08:53:32.000Z
|
2022-01-06T07:07:19.000Z
|
# Checks that ejabberd_io works as designed
import sys
import io
import unittest
from xclib.ejabberd_io import ejabberd_io
from xclib.tests.iostub import iostub
class TestEjabberd(unittest.TestCase, iostub):
def test_input(self):
self.stub_stdin(b'\000\015isuser:login:' +
b'\000\021auth:log:dom:pass', ioclass=io.BytesIO)
tester = iter(ejabberd_io.read_request(sys.stdin, sys.stdout))
output = next(tester)
self.assertEqual(output, ('isuser', 'login', ''))
output = next(tester)
self.assertEqual(output, ('auth', 'log', 'dom', 'pass'))
self.assertRaises(StopIteration, next, tester)
def test_input_fake_eof(self):
self.stub_stdin(b'\000\000', ioclass=io.BytesIO)
tester = iter(ejabberd_io.read_request(sys.stdin, sys.stdout))
self.assertRaises(StopIteration, next, tester)
def test_input_short(self):
self.stub_stdin(b'\001\000', ioclass=io.BytesIO)
tester = iter(ejabberd_io.read_request(sys.stdin, sys.stdout))
self.assertRaises(StopIteration, next, tester)
def test_input_negative(self):
self.stub_stdin(b'\377\377', ioclass=io.BytesIO)
tester = iter(ejabberd_io.read_request(sys.stdin, sys.stdout))
self.assertRaises(StopIteration, next, tester)
def test_output_false(self):
self.stub_stdout(ioclass=io.BytesIO)
ejabberd_io.write_response(False, sys.stdout)
self.assertEqual(sys.stdout.getvalue(), b'\000\002\000\000')
# Cannot be merged, as getvalue() returns the aggregate value
def test_output_true(self):
self.stub_stdout(ioclass=io.BytesIO)
ejabberd_io.write_response(True, sys.stdout)
self.assertEqual(sys.stdout.getvalue(), b'\000\002\000\001')
| 39.555556
| 70
| 0.688764
|
acffa281e03e8947e11cd40b36de07ec4e8a852d
| 3,086
|
py
|
Python
|
app/__init__.py
|
calcutec/netbard
|
2c30979ad3ca1cee2f81c521942e8bffea6f81b2
|
[
"BSD-3-Clause"
] | null | null | null |
app/__init__.py
|
calcutec/netbard
|
2c30979ad3ca1cee2f81c521942e8bffea6f81b2
|
[
"BSD-3-Clause"
] | 6
|
2015-05-26T14:03:03.000Z
|
2015-05-26T20:22:18.000Z
|
app/__init__.py
|
calcutec/netbard
|
2c30979ad3ca1cee2f81c521942e8bffea6f81b2
|
[
"BSD-3-Clause"
] | null | null | null |
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.mail import Mail
from config import ADMINS, MAIL_SERVER, MAIL_PORT, MAIL_USERNAME, \
MAIL_PASSWORD, SQLALCHEMY_DATABASE_URI
from .momentjs import momentjs
from flask.json import JSONEncoder
from flask_wtf.csrf import CsrfProtect
app = Flask(__name__)
app.config.from_object('config')
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
db = SQLAlchemy(app)
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
lm.login_message = 'Please log in to access this page.'
mail = Mail(app)
CsrfProtect(app)
app.config['OAUTH_CREDENTIALS'] = {
'facebook': {
'id': os.environ['FACEBOOK_AUTH'],
'secret': os.environ['FACEBOOK_AUTH_SECRET']
},
'google': {
'id': os.environ['GOOGLE_AUTH'],
'secret': os.environ['GOOGLE_AUTH_SECRET']
}
}
class CustomJSONEncoder(JSONEncoder):
"""This class adds support for lazy translation texts to Flask's
JSON encoder. This is necessary when flashing translated texts."""
def default(self, obj):
from speaklater import is_lazy_string
if is_lazy_string(obj):
try:
return unicode(obj) # python 2
except NameError:
return str(obj) # python 3
return super(CustomJSONEncoder, self).default(obj)
app.json_encoder = CustomJSONEncoder
if not app.debug and MAIL_SERVER != '':
import logging
from logging.handlers import SMTPHandler
credentials = None
if MAIL_USERNAME or MAIL_PASSWORD:
credentials = (MAIL_USERNAME, MAIL_PASSWORD)
mail_handler = SMTPHandler((MAIL_SERVER, MAIL_PORT),
'no-reply@' + MAIL_SERVER, ADMINS,
'burtonblog failure', credentials)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
if not app.debug and os.environ.get('HEROKU') is None:
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler('tmp/burtonblog.log', 'a',
1 * 1024 * 1024, 10)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('burtonblog startup')
if os.environ.get('HEROKU') is not None:
import logging
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('burtonblog startup')
app.jinja_env.globals['momentjs'] = momentjs
app.config["S3_LOCATION"] = 'https://s3.amazonaws.com/netbardus/'
app.config["S3_UPLOAD_DIRECTORY"] = 'user_imgs'
app.config["S3_BUCKET"] = 'netbardus'
app.config["AWS_ACCESS_KEY_ID"] = os.environ['AWS_ACCESS_KEY_ID']
app.config["AWS_SECRET_ACCESS_KEY"] = os.environ['AWS_SECRET_ACCESS_KEY']
from app import views, models
| 34.674157
| 79
| 0.697343
|
acffa3ec1e85d42054bfceb341965390b8612316
| 2,542
|
py
|
Python
|
scripts/artifacts/Cast.py
|
Krypterry/ALEAPP
|
970b2e62742603336fe791b7c02e12e1f1e77375
|
[
"MIT"
] | 187
|
2020-02-22T23:35:32.000Z
|
2022-03-31T13:46:24.000Z
|
scripts/artifacts/Cast.py
|
Krypterry/ALEAPP
|
970b2e62742603336fe791b7c02e12e1f1e77375
|
[
"MIT"
] | 65
|
2020-02-25T18:22:47.000Z
|
2022-03-27T21:41:21.000Z
|
scripts/artifacts/Cast.py
|
Krypterry/ALEAPP
|
970b2e62742603336fe791b7c02e12e1f1e77375
|
[
"MIT"
] | 47
|
2020-02-24T22:33:35.000Z
|
2022-03-11T05:19:42.000Z
|
import sqlite3
import textwrap
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
def get_Cast(files_found, report_folder, seeker, wrap_text):
file_found = str(files_found[0])
db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
cursor.execute('''
SELECT
case last_published_timestamp_millis
when 0 then ''
else datetime(last_published_timestamp_millis/1000, 'unixepoch')
end as "Last Published Timestamp",
device_id,
capabilities,
device_version,
friendly_name,
model_name,
receiver_metrics_id,
service_instance_name,
service_address,
service_port,
supported_criteria,
rcn_enabled_status,
hotspot_bssid,
cloud_devcie_id,
case last_discovered_timestamp_millis
when 0 then ''
else datetime(last_discovered_timestamp_millis/1000, 'unixepoch')
end as "Last Discovered Timestamp",
case last_discovered_by_ble_timestamp_millis
when 0 then ''
else datetime(last_discovered_by_ble_timestamp_millis/1000, 'unixepoch')
end as "Last Discovered By BLE Timestamp"
from DeviceInfo
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport('Cast')
report.start_artifact_report(report_folder, 'Cast')
report.add_script()
data_headers = ('Last Published Timestamp','Device ID (SSDP UDN)','Capabilities','Device Version','Device Friendly Name','Device Model Name','Receiver Metrics ID','Service Instance Name','Device IP Address','Device Port','Supported Criteria','RCN Enabled Status','Hotspot BSSID','Cloud Device ID','Last Discovered Timestamp','Last Discovered By BLE Timestamp')
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14],row[15]))
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
tsvname = f'Cast'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = f'Cast'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc('No Cast data available')
db.close()
return
| 38.515152
| 369
| 0.669945
|
acffa528ff20c536db0bf2968883bb293989366e
| 1,114
|
py
|
Python
|
cutout/transformations.py
|
joaopaulomcc/cutout
|
16b0c201c7074f05159483e09eb7df37a20369ce
|
[
"MIT"
] | null | null | null |
cutout/transformations.py
|
joaopaulomcc/cutout
|
16b0c201c7074f05159483e09eb7df37a20369ce
|
[
"MIT"
] | null | null | null |
cutout/transformations.py
|
joaopaulomcc/cutout
|
16b0c201c7074f05159483e09eb7df37a20369ce
|
[
"MIT"
] | null | null | null |
import numpy as np
import cairo
def rotation_matrix(rotation_angle):
return np.array(
[
[np.cos(rotation_angle), -np.sin(rotation_angle), 0],
[np.sin(rotation_angle), np.cos(rotation_angle), 0],
[0, 0, 1],
]
)
def translation_matrix(x_t, y_t):
return np.array(
[
[1, 0, x_t],
[0, 1, y_t],
[0, 0, 1],
]
)
def scale_matrix(scale_x, scale_y):
return np.array(
[
[scale_x, 0, 0],
[0, scale_y, 0],
[0, 0, 1],
]
)
def cairo_matrix(matrix):
return cairo.Matrix(
matrix[0][0],
matrix[1][0],
matrix[0][1],
matrix[1][1],
matrix[0][2],
matrix[1][2],
)
def rotate_around_point(rotation_angle, pivot_point):
rot_matrix = rotation_matrix(rotation_angle)
trans_matrix_0 = translation_matrix(-pivot_point[0], -pivot_point[1])
trans_matrix_1 = translation_matrix(pivot_point[0], pivot_point[1])
return cairo_matrix(trans_matrix_1 @ rot_matrix @ trans_matrix_0)
| 18.881356
| 73
| 0.545781
|
acffa55cd0a9252885cc2f861dd8b5c1fee5d12a
| 12,488
|
py
|
Python
|
tests/framework/session/conftest.py
|
hfwittmann/kedro
|
b0d4fcd8f19b49a7916d78fd09daeb6209a7b6c6
|
[
"Apache-2.0"
] | 1
|
2021-11-25T12:33:13.000Z
|
2021-11-25T12:33:13.000Z
|
tests/framework/session/conftest.py
|
hfwittmann/kedro
|
b0d4fcd8f19b49a7916d78fd09daeb6209a7b6c6
|
[
"Apache-2.0"
] | null | null | null |
tests/framework/session/conftest.py
|
hfwittmann/kedro
|
b0d4fcd8f19b49a7916d78fd09daeb6209a7b6c6
|
[
"Apache-2.0"
] | null | null | null |
import logging
from logging.handlers import QueueHandler, QueueListener
from multiprocessing import Queue
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional
import pandas as pd
import pytest
import toml
import yaml
from dynaconf.validator import Validator
from kedro import __version__ as kedro_version
from kedro.config import ConfigLoader
from kedro.framework.hooks import hook_impl
from kedro.framework.hooks.manager import get_hook_manager
from kedro.framework.project import _ProjectPipelines, _ProjectSettings
from kedro.framework.session import KedroSession
from kedro.io import DataCatalog
from kedro.pipeline import Pipeline
from kedro.pipeline.node import Node, node
from kedro.versioning import Journal
logger = logging.getLogger(__name__)
MOCK_PACKAGE_NAME = "fake_package"
@pytest.fixture
def mock_package_name() -> str:
return MOCK_PACKAGE_NAME
@pytest.fixture
def local_logging_config() -> Dict[str, Any]:
return {
"version": 1,
"formatters": {
"simple": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}
},
"root": {"level": "INFO", "handlers": ["console"]},
"loggers": {"kedro": {"level": "INFO", "handlers": ["console"]}},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "simple",
"stream": "ext://sys.stdout",
}
},
}
def _write_yaml(filepath: Path, config: Dict):
filepath.parent.mkdir(parents=True, exist_ok=True)
yaml_str = yaml.dump(config)
filepath.write_text(yaml_str)
def _write_toml(filepath: Path, config: Dict):
filepath.parent.mkdir(parents=True, exist_ok=True)
toml_str = toml.dumps(config)
filepath.write_text(toml_str)
def _assert_hook_call_record_has_expected_parameters(
call_record: logging.LogRecord, expected_parameters: List[str]
):
"""Assert the given call record has all expected parameters."""
for param in expected_parameters:
assert hasattr(call_record, param)
@pytest.fixture
def local_config(tmp_path):
cars_filepath = str(tmp_path / "cars.csv")
boats_filepath = str(tmp_path / "boats.csv")
return {
"cars": {
"type": "pandas.CSVDataSet",
"filepath": cars_filepath,
"save_args": {"index": False},
"versioned": True,
},
"boats": {
"type": "pandas.CSVDataSet",
"filepath": boats_filepath,
"versioned": True,
},
}
@pytest.fixture(autouse=True)
def clear_hook_manager():
yield
hook_manager = get_hook_manager()
plugins = hook_manager.get_plugins()
for plugin in plugins:
hook_manager.unregister(plugin)
@pytest.fixture(autouse=True)
def config_dir(tmp_path, local_config, local_logging_config):
catalog = tmp_path / "conf" / "base" / "catalog.yml"
credentials = tmp_path / "conf" / "local" / "credentials.yml"
logging = tmp_path / "conf" / "local" / "logging.yml"
pyproject_toml = tmp_path / "pyproject.toml"
_write_yaml(catalog, local_config)
_write_yaml(credentials, {"dev_s3": "foo"})
_write_yaml(logging, local_logging_config)
payload = {
"tool": {
"kedro": {
"project_version": kedro_version,
"project_name": "test hooks",
"package_name": "test_hooks",
}
}
}
_write_toml(pyproject_toml, payload)
def identity_node(x: str):
return x
def assert_exceptions_equal(e1: Exception, e2: Exception):
assert isinstance(e1, type(e2)) and str(e1) == str(e2)
@pytest.fixture
def dummy_dataframe() -> pd.DataFrame:
return pd.DataFrame({"test": [1, 2]})
@pytest.fixture
def mock_pipeline() -> Pipeline:
return Pipeline(
[
node(identity_node, "cars", "planes", name="node1"),
node(identity_node, "boats", "ships", name="node2"),
],
tags="pipeline",
)
class LogRecorder(logging.Handler): # pylint: disable=abstract-method
"""Record logs received from a process-safe log listener"""
def __init__(self):
super().__init__()
self.log_records = []
def handle(self, record):
self.log_records.append(record)
class LogsListener(QueueListener):
"""Listen to logs stream and capture log records with LogRecorder."""
def __init__(self):
# Queue where logs will be sent to
queue = Queue()
# Tells python logging to send logs to this queue
self.log_handler = QueueHandler(queue)
logger.addHandler(self.log_handler)
# The listener listens to new logs on the queue and saves it to the recorder
self.log_recorder = LogRecorder()
super().__init__(queue, self.log_recorder)
@property
def logs(self):
return self.log_recorder.log_records
@pytest.fixture
def logs_listener():
"""Fixture to start the logs listener before a test and clean up after the test finishes"""
listener = LogsListener()
listener.start()
yield listener
logger.removeHandler(listener.log_handler)
listener.stop()
class LoggingHooks:
"""A set of test hooks that only log information when invoked"""
@hook_impl
def after_catalog_created(
self,
catalog: DataCatalog,
conf_catalog: Dict[str, Any],
conf_creds: Dict[str, Any],
feed_dict: Dict[str, Any],
save_version: str,
load_versions: Dict[str, str],
run_id: str,
):
logger.info(
"Catalog created",
extra={
"catalog": catalog,
"conf_catalog": conf_catalog,
"conf_creds": conf_creds,
"feed_dict": feed_dict,
"save_version": save_version,
"load_versions": load_versions,
"run_id": run_id,
},
)
@hook_impl
def before_node_run(
self,
node: Node,
catalog: DataCatalog,
inputs: Dict[str, Any],
is_async: str,
run_id: str,
) -> None:
logger.info(
"About to run node",
extra={
"node": node,
"catalog": catalog,
"inputs": inputs,
"is_async": is_async,
"run_id": run_id,
},
)
@hook_impl
def after_node_run(
self,
node: Node,
catalog: DataCatalog,
inputs: Dict[str, Any],
outputs: Dict[str, Any],
is_async: str,
run_id: str,
) -> None:
logger.info(
"Ran node",
extra={
"node": node,
"catalog": catalog,
"inputs": inputs,
"outputs": outputs,
"is_async": is_async,
"run_id": run_id,
},
)
@hook_impl
def on_node_error(
self,
error: Exception,
node: Node,
catalog: DataCatalog,
inputs: Dict[str, Any],
is_async: bool,
run_id: str,
):
logger.info(
"Node error",
extra={
"error": error,
"node": node,
"catalog": catalog,
"inputs": inputs,
"is_async": is_async,
"run_id": run_id,
},
)
@hook_impl
def before_pipeline_run(
self, run_params: Dict[str, Any], pipeline: Pipeline, catalog: DataCatalog
) -> None:
logger.info(
"About to run pipeline",
extra={"pipeline": pipeline, "run_params": run_params, "catalog": catalog},
)
@hook_impl
def after_pipeline_run(
self,
run_params: Dict[str, Any],
run_result: Dict[str, Any],
pipeline: Pipeline,
catalog: DataCatalog,
) -> None:
logger.info(
"Ran pipeline",
extra={
"pipeline": pipeline,
"run_params": run_params,
"run_result": run_result,
"catalog": catalog,
},
)
@hook_impl
def on_pipeline_error(
self,
error: Exception,
run_params: Dict[str, Any],
pipeline: Pipeline,
catalog: DataCatalog,
) -> None:
logger.info(
"Pipeline error",
extra={
"error": error,
"run_params": run_params,
"pipeline": pipeline,
"catalog": catalog,
},
)
@hook_impl
def before_dataset_loaded(self, dataset_name: str) -> None:
logger.info("Before dataset loaded", extra={"dataset_name": dataset_name})
@hook_impl
def after_dataset_loaded(self, dataset_name: str, data: Any) -> None:
logger.info(
"After dataset loaded", extra={"dataset_name": dataset_name, "data": data}
)
@hook_impl
def before_dataset_saved(self, dataset_name: str, data: Any) -> None:
logger.info(
"Before dataset saved", extra={"dataset_name": dataset_name, "data": data}
)
@hook_impl
def after_dataset_saved(self, dataset_name: str, data: Any) -> None:
logger.info(
"After dataset saved", extra={"dataset_name": dataset_name, "data": data}
)
@hook_impl
def register_config_loader(
self, conf_paths: Iterable[str], env: str, extra_params: Dict[str, Any]
) -> ConfigLoader:
logger.info(
"Registering config loader",
extra={"conf_paths": conf_paths, "env": env, "extra_params": extra_params},
)
return ConfigLoader(conf_paths)
@hook_impl
def register_catalog(
self,
catalog: Optional[Dict[str, Dict[str, Any]]],
credentials: Dict[str, Dict[str, Any]],
load_versions: Dict[str, str],
save_version: str,
journal: Journal,
) -> DataCatalog:
logger.info(
"Registering catalog",
extra={
"catalog": catalog,
"credentials": credentials,
"load_versions": load_versions,
"save_version": save_version,
"journal": journal,
},
)
return DataCatalog.from_config(
catalog, credentials, load_versions, save_version, journal
)
@pytest.fixture
def project_hooks():
"""A set of project hook implementations that log to stdout whenever it is invoked."""
return LoggingHooks()
@pytest.fixture(autouse=True)
def mock_logging(mocker):
# Disable logging.config.dictConfig in KedroSession._setup_logging as
# it changes logging.config and affects other unit tests
return mocker.patch("logging.config.dictConfig")
@pytest.fixture(autouse=True)
def mock_pipelines(mocker, mock_pipeline):
def mock_register_pipelines():
return {
"__default__": mock_pipeline,
"pipe": mock_pipeline,
}
mocker.patch.object(
_ProjectPipelines,
"_get_pipelines_registry_callable",
return_value=mock_register_pipelines,
)
return mock_register_pipelines()
def _mock_imported_settings_paths(mocker, mock_settings):
for path in [
"kedro.framework.context.context.settings",
"kedro.framework.session.session.settings",
"kedro.framework.project.settings",
]:
mocker.patch(path, mock_settings)
return mock_settings
@pytest.fixture
def mock_settings(mocker, project_hooks):
class MockSettings(_ProjectSettings):
_HOOKS = Validator("HOOKS", default=(project_hooks,))
return _mock_imported_settings_paths(mocker, MockSettings())
@pytest.fixture
def mock_session(
mock_settings, mock_package_name, tmp_path
): # pylint: disable=unused-argument
return KedroSession.create(
mock_package_name, tmp_path, extra_params={"params:key": "value"}
)
@pytest.fixture(autouse=True)
def mock_validate_settings(mocker):
# KedroSession eagerly validates that a project's settings.py is correct by
# importing it. settings.py does not actually exists as part of this test suite
# since we are testing session in isolation, so the validation is patched.
mocker.patch("kedro.framework.session.session.validate_settings")
| 28.253394
| 95
| 0.598975
|
acffa640c0078a14f385af77c9d2fd625a04d6b7
| 808
|
py
|
Python
|
cogdl/wrappers/model_wrapper/graph_classification/graph_embedding_mw.py
|
THUDM/cogdl
|
37359d559ae4f9f2c0c34d851abaa0a0950d120a
|
[
"MIT"
] | 1,072
|
2019-08-02T05:46:21.000Z
|
2022-03-31T07:51:53.000Z
|
cogdl/wrappers/model_wrapper/graph_classification/graph_embedding_mw.py
|
THUDM/cogdl
|
37359d559ae4f9f2c0c34d851abaa0a0950d120a
|
[
"MIT"
] | 96
|
2019-08-05T17:27:22.000Z
|
2022-03-03T08:36:57.000Z
|
cogdl/wrappers/model_wrapper/graph_classification/graph_embedding_mw.py
|
THUDM/cogdl
|
37359d559ae4f9f2c0c34d851abaa0a0950d120a
|
[
"MIT"
] | 299
|
2019-08-08T07:33:10.000Z
|
2022-03-31T09:30:07.000Z
|
from torch.utils.data import DataLoader
from cogdl.data import MultiGraphDataset
from .. import register_model_wrapper, EmbeddingModelWrapper
from cogdl.wrappers.tools.wrapper_utils import evaluate_graph_embeddings_using_svm
@register_model_wrapper("graph_embedding_mw")
class GraphEmbeddingModelWrapper(EmbeddingModelWrapper):
def __init__(self, model):
super(GraphEmbeddingModelWrapper, self).__init__()
self.model = model
def train_step(self, batch):
if isinstance(batch, DataLoader) or isinstance(batch, MultiGraphDataset):
graphs = [x for x in batch]
else:
graphs = batch
emb = self.model(graphs)
return emb
def test_step(self, batch):
x, y = batch
return evaluate_graph_embeddings_using_svm(x, y)
| 32.32
| 82
| 0.720297
|
acffa690ea391e8e18bb5d00cad3888d9c2ae7b6
| 213
|
py
|
Python
|
Python/Django/new1/fees/urls.py
|
Vikash-8090-Yadav/AllProgramming_Basic
|
76721256edcb91520d1b5132aa59ac37eebdf7c3
|
[
"MIT"
] | 2
|
2022-01-04T12:04:51.000Z
|
2022-01-04T18:52:26.000Z
|
Python/Django/new1/fees/urls.py
|
Vikash-8090-Yadav/AllProgramming_Basic
|
76721256edcb91520d1b5132aa59ac37eebdf7c3
|
[
"MIT"
] | null | null | null |
Python/Django/new1/fees/urls.py
|
Vikash-8090-Yadav/AllProgramming_Basic
|
76721256edcb91520d1b5132aa59ac37eebdf7c3
|
[
"MIT"
] | null | null | null |
from django.urls import path , include
from .import views
urlpatterns = [
path('dg/',views.dgfess),
path('html/',views.cssfess),
path('css/',views.htmlfess),
path('python/',views.pythonfess),
]
| 17.75
| 38
| 0.652582
|
acffa70343d73eef22c57e9d19dd8c6713ca4b76
| 847
|
py
|
Python
|
src/app.py
|
Data-is-life/apt-get-home
|
77a212c19a90f201c70759fd9e99493657247ae7
|
[
"Unlicense"
] | null | null | null |
src/app.py
|
Data-is-life/apt-get-home
|
77a212c19a90f201c70759fd9e99493657247ae7
|
[
"Unlicense"
] | null | null | null |
src/app.py
|
Data-is-life/apt-get-home
|
77a212c19a90f201c70759fd9e99493657247ae7
|
[
"Unlicense"
] | null | null | null |
import random
from header_list import user_agent_list
from proxies_list import proxies_list_
from initial_scrapper_function import *
from parser_functions import *
from list_df_functions import *
from search_url_gen import *
from get_search_url import *
from get_results import *
ua = user_agent_list
proxies = proxies_list_
''' This puts all the files together and gets the results'''
customer_url = input("Paste the Redfin URL of the Home: ")
url = customer_url
print(f'The Redfin link you entered for the home: {url}')
proxy = random.sample(proxies, 1)[0]
header = random.sample(ua, 1)[0]
soup = session_creator(ua, url, proxy)
customer_df = info_from_property(soup)
print(customer_df.T)
c_url = gen_zip_url(customer_df)
header = random.sample(ua, 1)[0]
soup_ = session_creator(ua, c_url, proxy)
print(get_results(soup_, customer_df))
| 24.911765
| 60
| 0.780401
|
acffa76b433e6c21250029e3d2e4f3f7fb4a40df
| 5,397
|
py
|
Python
|
vaccine_availability.py
|
MannyP31/VaccineAvailabilityNotifier
|
ec223d5b49a1ebffc5e141f84b219c0cec07fee0
|
[
"MIT"
] | 19
|
2021-05-05T05:12:59.000Z
|
2021-06-03T12:50:53.000Z
|
vaccine_availability.py
|
MannyP31/VaccineAvailabilityNotifier
|
ec223d5b49a1ebffc5e141f84b219c0cec07fee0
|
[
"MIT"
] | null | null | null |
vaccine_availability.py
|
MannyP31/VaccineAvailabilityNotifier
|
ec223d5b49a1ebffc5e141f84b219c0cec07fee0
|
[
"MIT"
] | 8
|
2021-05-05T05:55:28.000Z
|
2021-05-10T17:13:32.000Z
|
"""
python vaccine_availability.py
"""
# standard imports
import requests
import datetime
import json
# import pandas as pd
import smtplib
def logger(line):
with open('log.txt', 'a+') as f:
f.write(line+"\n")
"""
To get the state code
for state_code in range(1,40):
# print("State code: ", state_code)
logger("State code: "+ str(state_code))
response = requests.get(
"https://cdn-api.co-vin.in/api/v2/admin/location/districts/{}".format(state_code))
json_data = json.loads(response.text)
for i in json_data["districts"]:
# print(i["district_id"],'\t', i["district_name"])
logger(str(i["district_id"])+'\t'+str(i["district_name"]))
# print("\n")
logger("\n")
"""
DIST_ID = 446
numdays = 20
age = 19
# Print available centre description (y/n)?
print_flag = 'y'
base = datetime.datetime.today()
date_list = [base + datetime.timedelta(days=x) for x in range(numdays)]
date_str = [x.strftime("%d-%m-%Y") for x in date_list]
def getSlots(DIST_ID=446, numdays=20, age=19):
flag_available = False
Available_Slots = []
for INP_DATE in date_str:
URL = "https://cdn-api.co-vin.in/api/v2/appointment/sessions/public/calendarByDistrict?district_id={}&date={}".format(
DIST_ID, INP_DATE)
response = requests.get(URL)
if response.ok:
resp_json = response.json()
# print(json.dumps(resp_json, indent = 2))
if resp_json["centers"]:
# print("Available on: {}".format(INP_DATE))
logger("Checking on: {}".format(INP_DATE))
if(print_flag == 'y' or print_flag == 'Y'):
for center in resp_json["centers"]:
for session in center["sessions"]:
if not int(session["available_capacity"]) == 0:
if session["min_age_limit"] <= age:
flag_available = True
dict_to_add = {"Date": INP_DATE,
"Name": center["name"],
"Block Name": center["block_name"],
"Fee Type": center["fee_type"],
"Available Capacity": session["available_capacity"],
"Vaccine": session["vaccine"]}
# print(dict_to_add)
Available_Slots.append(dict_to_add)
# print(Available_Slots)
logger("\t" + str(center["name"]))
logger("\t" + str(center["block_name"]))
logger("\t Price: " +
str(center["fee_type"]))
logger("\t Available Capacity: " +
str(session["available_capacity"]))
"""
print("\t", center["name"])
print("\t", center["block_name"])
print("\t Price: ", center["fee_type"])
print("\t Available Capacity: ",
session["available_capacity"])
"""
if(session["vaccine"] != ''):
logger("\t Vaccine: " +
str(session["vaccine"]))
# print("\n\n")
logger("\n\n")
return flag_available, Available_Slots
"""
if flag_available == False:
logger("No available slots on {}".format(INP_DATE))
return flag_available, Available_Slots
else:
# print("No available slots on {}".format(INP_DATE))
logger("No available slots on {}".format(INP_DATE))
return flag_available, Available_Slots
"""
def send_mail(body, receiver_email='swaymsdennings@gmail.com', subject='VACCINE AVAILABILITY NOTIFICATION'):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
sender_email = 'pythonhai.000@gmail.com'
server.login(sender_email, 'machinelearning@#$000')
msg = f"Subject: {subject}\n\n{body}"
server.sendmail(
sender_email,
receiver_email,
msg
)
# print('Email has been sent !')
server.quit()
def convert_to_str(Available_Slots):
string = ""
for slots in Available_Slots:
for key in slots:
val = slots[key]
string += str(key) + " : " + str(val) + "\n"
string += "\n\n"
return string
if __name__ == '__main__':
flag_available, Available_Slots = getSlots(DIST_ID=255, numdays=20, age=21)
msg = "No available slots found"
body = convert_to_str(Available_Slots) if len(Available_Slots) > 0 else msg
MAILS = ['swaymsdennings@gmail.com', 'njrfarhandasilva10@gmail.com']
for mail in MAILS:
send_mail(body, receiver_email=mail,
subject='VACCINE AVAILABILITY NOTIFICATION')
| 35.98
| 126
| 0.494163
|
acffa7d473d2149d9e33516b92903a7a1642cb62
| 17,699
|
py
|
Python
|
lib/weighted_resblock.py
|
googleinterns/loop-project
|
28acb1c815e0a65f51e809d278eea08ffb06483e
|
[
"Apache-2.0"
] | 3
|
2020-05-29T00:34:34.000Z
|
2020-12-14T21:50:12.000Z
|
lib/weighted_resblock.py
|
googleinterns/loop-project
|
28acb1c815e0a65f51e809d278eea08ffb06483e
|
[
"Apache-2.0"
] | 2
|
2020-07-21T00:57:15.000Z
|
2020-09-04T22:09:33.000Z
|
lib/weighted_resblock.py
|
googleinterns/loop-project
|
28acb1c815e0a65f51e809d278eea08ffb06483e
|
[
"Apache-2.0"
] | null | null | null |
""" Weighted ResBlock for compositional networks.
"""
import tensorflow as tf
from tensorflow.python.keras import regularizers
from lib import weighted_layers_v2 as wl
class ResBlockTemplate():
"""Custom residual block template class.
Stores the templates of weighted expansion, depthwise convolution and
projection layers of the custom weighted residual block.
Arguments:
expansion_template: None or a tuple of two expansion layer
(1x1 WeightedConv2D) templates tensors (T_k, T_b), where T_k stores weights
of template kernels, T_b represents template biases. T_k should have shape
(N, kernel_size, kernel_size, input_channel, filters), T_b should be of
shape (N, filters), where `N` is the number of templates, `kernel_size` is
an integer size of the convolution window, `input_channel` is the number of
channels in the input tensor and `filters` is the number of filters in the
convolution.
depthwise_template: None or a tuple of two tensors (T_k, T_b), where T_k
stores weights of template kernels, T_b represents template biases of the
weighted depthwise convolution layer. T_k should have shape
(N, kernel_size, kernel_size, input_channel, depth_multiplier), T_b should
be of size (N, depth_multiplier*input_channel), where `N` is the number of
templates, `kernel_size` is an integer size of the convolution window,
`input_channel` is the number of channels in the input tensor and
`depth_multiplier` is the number of output channels for each layer input
channel.
projection_template: None or a tuple of two projection layer
(1x1 WeightedConv2D) templates tensors (T_k, T_b), where T_k stores weights
of template kernels, T_b represents template biases. T_k should have shape
(N, kernel_size, kernel_size, input_channel, filters), T_b should be of
shape (N, filters), where `N` is the number of templates, `kernel_size` is
an integer size of the convolution window, `input_channel` is the number of
channels in the input tensor and `filters` is the number of filters in the
convolution.
bn_1_template: None or a tuple of four templates tensors (gamma, beta) for
the first batch normalization layer. All templates tensors should be of
size (N, n_channels), where N is the number of templates, n_channels is
the number of channels of the layer inputs.
bn_2_template: None or a tuple of four templates tensors (gamma, beta) for
the second batch normalization layer. All templates tensors should be of
size (N, n_channels), where N is the number of templates, n_channels is
the number of channels of the layer inputs.
bn_3_template: None or a tuple of four templates tensors (gamma, beta) for
the third batch normalization layer. All templates tensors should be of
size (N, n_channels), where N is the number of templates, n_channels is
the number of channels of the layer inputs.
"""
def __init__(self, expansion_template=None, depthwise_template=None,
projection_template=None, bn_1_template=None,
bn_2_template=None, bn_3_template=None):
self._check_dims(expansion_template, depthwise_template,
projection_template)
self.expansion_template = expansion_template
self.depthwise_template = depthwise_template
self.projection_template = projection_template
self.bn_1_template = bn_1_template
self.bn_2_template = bn_2_template
self.bn_3_template = bn_3_template
def get_expansion_template(self):
"""returns the expansion template."""
if self.expansion_template is None:
return "he_normal", "zeros"
kernel_init = tf.constant_initializer(self.expansion_template[0])
bias_init = tf.constant_initializer(self.expansion_template[1])
return kernel_init, bias_init
def get_bn1_template(self):
"""returns the first batch normalization template."""
if self.bn_1_template is None:
return "zeros", "ones"
beta_init = tf.constant_initializer(self.bn_1_template[0])
gamma_init = tf.constant_initializer(self.bn_1_template[1])
return gamma_init, beta_init
def get_bn2_template(self):
"""returns the first batch normalization template."""
if self.bn_2_template is None:
return "zeros", "ones"
beta_init = tf.constant_initializer(self.bn_2_template[0])
gamma_init = tf.constant_initializer(self.bn_2_template[1])
return gamma_init, beta_init
def get_bn3_template(self):
"""returns the first batch normalization template."""
if self.bn_3_template is None:
return "zeros", "ones"
beta_init = tf.constant_initializer(self.bn_3_template[0])
gamma_init = tf.constant_initializer(self.bn_3_template[1])
return gamma_init, beta_init
def get_depthwise_template(self):
"""returns the depthwise convolution template."""
if self.depthwise_template is None:
return "he_normal", "zeros"
kernel_init = tf.constant_initializer(self.depthwise_template[0])
bias_init = tf.constant_initializer(self.depthwise_template[1])
return kernel_init, bias_init
def get_projection_template(self):
"""returns the projection template."""
if self.projection_template is None:
return "he_normal", "zeros"
kernel_init = tf.constant_initializer(self.projection_template[0])
bias_init = tf.constant_initializer(self.projection_template[1])
return kernel_init, bias_init
def set_expansion_template(self, expansion_template):
"""sets the expansion template."""
self._check_dims(expansion_template, None, None)
self.expansion_template = expansion_template
def set_depthwise_template(self, depthwise_template):
"""sets the depthwise convolution template."""
self._check_dims(None, depthwise_template, None)
self.depthwise_template = depthwise_template
def set_projection_template(self, projection_template):
"""sets the projection template."""
self._check_dims(None, None, projection_template)
self.projection_template = projection_template
def _check_dims(self, expansion, depthwise, projection):
"""Checks if the templates are tensors of correct dimensionality."""
if expansion is not None:
if (not (isinstance(expansion[0], tf.Tensor) or
isinstance(expansion[0], tf.Variable)) or
expansion[0].get_shape().ndims != 5):
raise ValueError(
"Expansion kernel template should be 5-dimensional tensor.")
if (not (isinstance(expansion[1], tf.Tensor) or
isinstance(expansion[1], tf.Variable)) or
expansion[1].get_shape().ndims != 2):
raise ValueError(
"Expansion bias template should be 2-dimensional tensor.")
if depthwise is not None:
if (not (isinstance(depthwise[0], tf.Tensor) or
isinstance(depthwise[0], tf.Variable)) or
depthwise[0].get_shape().ndims != 5):
raise ValueError(
"Depthwise kernel template should be 5-dimensional tensor.")
if (not (isinstance(depthwise[1], tf.Tensor) or
isinstance(depthwise[1], tf.Variable)) or
depthwise[1].get_shape().ndims != 2):
raise ValueError(
"Depthwise bias template should be 2-dimensional tensor.")
if projection is not None:
if (not (isinstance(projection[0], tf.Tensor) or
isinstance(projection[0], tf.Variable)) or
projection[0].get_shape().ndims != 5):
raise ValueError(
"Projection kernel template should be 5-dimensional tensor.")
if (not (isinstance(projection[1], tf.Tensor) or
isinstance(projection[1], tf.Variable)) or
projection[1].get_shape().ndims != 2):
raise ValueError(
"Projection bias template should be 2-dimensional tensor.")
class WeightedResBlockSeparateBN(tf.keras.layers.Layer):
""" Weighted ResBlock with separate batch normalization.
Arguments:
resblock: WeightedResBlock object.
"""
def __init__(self, resblock, **kwargs):
super(WeightedResBlockSeparateBN, self).__init__(**kwargs)
if not isinstance(resblock, WeightedResBlock):
raise TypeError("resblock should be a WeightedResBlock class instance.")
self.resblock = resblock
self.bn1 = tf.keras.layers.BatchNormalization(
center=False, scale=False)
self.bn2 = tf.keras.layers.BatchNormalization(
center=False, scale=False)
self.bn3 = tf.keras.layers.BatchNormalization(
center=False, scale=False)
def build(self, input_shape):
super(WeightedResBlockSeparateBN, self).build(input_shape)
self.resblock.build(input_shape)
def call(self, inputs, training=None):
"""Calls regular batch normalization between resblock operations.
"""
layer_input, mix_weights = inputs
x = self.resblock.conv1([layer_input, mix_weights])
x = self.bn1(x, training)
x = self.resblock.bn1([x, mix_weights], training)
x = self.resblock.activation(x)
x = self.resblock.conv2([x, mix_weights])
x = self.bn2(x, training)
x = self.resblock.bn2([x, mix_weights], training)
x = self.resblock.activation(x)
x = self.resblock.conv3([x, mix_weights])
x = self.bn3(x, training)
x = self.resblock.bn3([x, mix_weights], training)
x += layer_input
return x
class WeightedResBlock(tf.keras.Model):
""" A ResBlock module class with expansion, depthwise convolution and
projection that uses weighted convolutions.
In this ResBlock, standard 2D convolutions are replaced by 1x1 weighted
convolution that expands the input tensor along the channel dimension,
weighted depthwise convolution and weighted 1x1 convolution that projects the
tensor back to the original number of channels.
Args:
kernel_size: size of the depthwise convolution kernel.
expansion_factor: expansion factor of the first 1x1 convolution.
e.g., if the input tensor has N channels, then the first 1x1
convolution layer will expand it to expansion_factor*N channels.
activation: activation name or function. Supported function
names are 'relu', 'relu6', 'lrelu', 'swish'.
template: a ResBlockTemplate object.
kernel_reg: kernel regularizer parameter.
"""
def __init__(self, kernel_size=3, expansion_factor=6, activation="relu",
num_templates=10, template=None, kernel_reg=1e-5, **kwargs):
super(WeightedResBlock, self).__init__(**kwargs)
if expansion_factor < 1:
raise ValueError("The expansion factor value should be "
"greater than or equal to one.")
self.expansion_factor = expansion_factor
self.activation = self.map_activation_fn(activation)
self.kernel_size = kernel_size
self.template = ResBlockTemplate() if template is None else template
self.num_templates = num_templates
self.kernel_reg = kernel_reg
def build(self, input_shape):
input_channel = input_shape[0][-1]
self.expanded_channel = input_channel * self.expansion_factor
kernel_init, bias_init = self.template.get_expansion_template()
self.conv1 = wl.WeightedConv2D(
filters=self.expanded_channel, kernel_size=1, strides=(1, 1),
padding="same", num_templates=self.num_templates,
kernel_initializer=kernel_init,
kernel_regularizer=regularizers.l2(self.kernel_reg),
bias_initializer=bias_init)
self.conv1.build(input_shape)
beta, gamma = self.template.get_bn1_template()
self.bn1 = wl.WeightedBatchNormalizationSeparate(
num_templates=self.num_templates, gamma_initializer=gamma,
beta_initializer=beta)
depthwise_kernel_init, bias_init = self.template.get_depthwise_template()
self.conv2 = wl.WeightedDepthwiseConv2D(
kernel_size=self.kernel_size, strides=(1, 1), padding="same",
num_templates=self.num_templates,
depthwise_initializer=depthwise_kernel_init,
bias_initializer=bias_init)
cov2_in_shape = ((input_shape[0][0], input_shape[0][1], input_shape[0][2],
self.expanded_channel), (self.num_templates,))
self.conv2.build(cov2_in_shape)
beta, gamma = self.template.get_bn2_template()
self.bn2 = wl.WeightedBatchNormalizationSeparate(
num_templates=self.num_templates, gamma_initializer=gamma,
beta_initializer=beta)
kernel_init, bias_init = self.template.get_projection_template()
self.conv3 = wl.WeightedConv2D(
filters=input_channel, kernel_size=1, strides=(1, 1), padding="same",
num_templates=self.num_templates,
kernel_initializer=kernel_init,
kernel_regularizer=regularizers.l2(self.kernel_reg),
bias_initializer=bias_init)
self.conv3.build(cov2_in_shape)
beta, gamma = self.template.get_bn3_template()
self.bn3 = wl.WeightedBatchNormalizationSeparate(
num_templates=self.num_templates, gamma_initializer=gamma,
beta_initializer=beta)
self.built = True
def call(self, inputs, training=None):
layer_input, mix_weights = inputs
x = self.conv1([layer_input, mix_weights])
x = self.bn1([x, mix_weights], training)
x = self.activation(x)
x = self.conv2([x, mix_weights])
x = self.bn2([x, mix_weights], training)
x = self.activation(x)
x = self.conv3([x, mix_weights])
x = self.bn3([x, mix_weights], training)
x += layer_input
return x
def map_activation_fn(self, activation):
"""Maps activation function name to function."""
if callable(activation):
return activation
switcher = {"relu": tf.nn.relu,
"relu6": tf.nn.relu6,
"lrelu": tf.nn.leaky_relu,
"swish": tf.nn.swish}
res = switcher.get(activation)
if not res:
raise Exception("Given activation function is not supported.")
return res
def _get_input_channel(self, input_shape):
if input_shape.dims[-1].value is None:
raise ValueError("The channel dimension of the inputs "
"should be defined. Found `None`.")
return int(input_shape[-1])
class MixtureWeight(tf.keras.layers.Layer):
"""Mixture weights layer.
Arguments:
num_templates: integer number of templates in weighted block.
initializer: mixture weights initializer (see `keras.initializers`).
regularizer: mixture weights regularizer (see `keras.regularizers`).
constraint: constraint function applied to mixture weight vector
(see `keras.constraints`)
dtype: type of variable."""
def __init__(self, num_templates=10, initializer="glorot_uniform",
regularizer=None, constraint=None, dtype=tf.float32,
**kwargs):
super(MixtureWeight, self).__init__(**kwargs)
self.num_templates = num_templates
self.initializer = initializer
self.regularizer = regularizer
self.constraint = constraint
self.tensor_type = dtype
def build(self, input_shape):
self.mixture_weights = self.add_weight(
name="mixture_weight",
shape=(1, self.num_templates),
initializer=self.initializer,
regularizer=self.regularizer,
constraint=self.constraint,
trainable=True,
dtype=self.tensor_type)
self.built = True
def call(self, inputs):
return tf.nn.softmax(self.mixture_weights, axis=1)
def get_config(self):
config = super(MixtureWeight, self).get_config()
config.update({"num_templates": self.num_templates})
return config
class WeightedMultitaskResBlock(tf.keras.layers.Layer):
""" Weighted ResBlock with separate batch normalization for each task/domain.
Arguments:
resblock: WeightedResBlock object.
domain_list: list of domain names.
"""
def __init__(self, resblock, domain_list, **kwargs):
super(WeightedMultitaskResBlock, self).__init__(**kwargs)
if not isinstance(resblock, WeightedResBlock):
raise TypeError("resblock should be a WeightedResBlock class instance.")
self.resblock = resblock
self.domain_list = domain_list
self.num_domains = len(self.domain_list)
self.bn1 = []
self.bn2 = []
self.bn3 = []
for ds in domain_list:
self.bn1.append(tf.keras.layers.BatchNormalization(
center=False, scale=False, name="bn1_%s" % ds))
self.bn2.append(tf.keras.layers.BatchNormalization(
center=False, scale=False, name="bn2_%s" % ds))
self.bn3.append(tf.keras.layers.BatchNormalization(
center=False, scale=False, name="bn3_%s" % ds))
def build(self, input_shape):
super(WeightedMultitaskResBlock, self).build(input_shape)
self.resblock.build([input_shape[0][0], input_shape[1][0]])
def call(self, inputs, training=None):
"""Calls task-specific batch normalization between resblock operations.
"""
layer_input, mix_weights = inputs
outputs = []
for idx in range(self.num_domains):
x = self.resblock.conv1([layer_input[idx], mix_weights[idx]])
x = self.bn1[idx](x, training)
x = self.resblock.bn1([x, mix_weights[idx]], training)
x = self.resblock.activation(x)
x = self.resblock.conv2([x, mix_weights[idx]])
x = self.bn2[idx](x, training)
x = self.resblock.bn2([x, mix_weights[idx]], training)
x = self.resblock.activation(x)
x = self.resblock.conv3([x, mix_weights[idx]])
x = self.bn3[idx](x, training)
x = self.resblock.bn3([x, mix_weights[idx]], training)
x += layer_input[idx]
outputs.append(x)
return outputs
def get_config(self):
config = super(WeightedMultitaskResBlock, self).get_config()
config.update({"num_templates": self.resblock.num_templates})
config.update({"num_domains": self.num_domains})
return config
| 40.781106
| 79
| 0.708232
|
acffa8737c55b7c8073b847769fe492512dc5cd1
| 118,407
|
py
|
Python
|
stubs.min/System/Windows/Controls/__init___parts/UserControl.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/System/Windows/Controls/__init___parts/UserControl.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Windows/Controls/__init___parts/UserControl.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class UserControl(ContentControl,IResource,IAnimatable,IInputElement,IFrameworkInputElement,ISupportInitialize,IHaveResources,IQueryAmbient,IAddChild):
"""
Provides a simple way to create a control.
UserControl()
"""
def AddChild(self,*args):
"""
AddChild(self: ContentControl,value: object)
Adds a specified object as the child of a
System.Windows.Controls.ContentControl.
value: The object to add.
AddChild(self: Window_16$17,value: object)AddChild(self: Label_17$18,value: object)AddChild(self: Button_19$20,value: object)AddChild(self: CheckBox_20$21,value: object)
"""
pass
def AddLogicalChild(self,*args):
"""
AddLogicalChild(self: FrameworkElement,child: object)
Adds the provided object to the logical tree of this element.
child: Child element to be added.
AddLogicalChild(self: Window_16$17,child: object)AddLogicalChild(self: Label_17$18,child: object)AddLogicalChild(self: TextBox_18$19,child: object)AddLogicalChild(self: Button_19$20,child: object)AddLogicalChild(self: CheckBox_20$21,child: object)AddLogicalChild(self: ComboBox_21$22,child: object)AddLogicalChild(self: Separator_22$23,child: object)
"""
pass
def AddText(self,*args):
"""
AddText(self: ContentControl,text: str)
Adds a specified text string to a System.Windows.Controls.ContentControl.
text: The string to add.
AddText(self: Window_16$17,text: str)AddText(self: Label_17$18,text: str)AddText(self: Button_19$20,text: str)AddText(self: CheckBox_20$21,text: str)
"""
pass
def AddVisualChild(self,*args):
"""
AddVisualChild(self: Visual,child: Visual)
Defines the parent-child relationship between two visuals.
child: The child visual object to add to parent visual.
AddVisualChild(self: Window_16$17,child: Window_16$17)AddVisualChild(self: Label_17$18,child: Label_17$18)AddVisualChild(self: TextBox_18$19,child: TextBox_18$19)AddVisualChild(self: Button_19$20,child: Button_19$20)AddVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)AddVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)AddVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def ArrangeCore(self,*args):
"""
ArrangeCore(self: FrameworkElement,finalRect: Rect)
Implements System.Windows.UIElement.ArrangeCore(System.Windows.Rect) (defined
as virtual in System.Windows.UIElement) and seals the implementation.
finalRect: The final area within the parent that this element should use to arrange itself
and its children.
ArrangeCore(self: Window_16$17,finalRect: Rect)ArrangeCore(self: Label_17$18,finalRect: Rect)ArrangeCore(self: TextBox_18$19,finalRect: Rect)ArrangeCore(self: Button_19$20,finalRect: Rect)ArrangeCore(self: CheckBox_20$21,finalRect: Rect)ArrangeCore(self: ComboBox_21$22,finalRect: Rect)ArrangeCore(self: Separator_22$23,finalRect: Rect)
"""
pass
def ArrangeOverride(self,*args):
"""
ArrangeOverride(self: Control,arrangeBounds: Size) -> Size
Called to arrange and size the content of a System.Windows.Controls.Control
object.
arrangeBounds: The computed size that is used to arrange the content.
Returns: The size of the control.
ArrangeOverride(self: Window_16$17,arrangeBounds: Size) -> Size
ArrangeOverride(self: Label_17$18,arrangeBounds: Size) -> Size
ArrangeOverride(self: TextBox_18$19,arrangeBounds: Size) -> Size
ArrangeOverride(self: Button_19$20,arrangeBounds: Size) -> Size
ArrangeOverride(self: CheckBox_20$21,arrangeBounds: Size) -> Size
ArrangeOverride(self: ComboBox_21$22,arrangeBounds: Size) -> Size
ArrangeOverride(self: Separator_22$23,arrangeBounds: Size) -> Size
"""
pass
def GetLayoutClip(self,*args):
"""
GetLayoutClip(self: FrameworkElement,layoutSlotSize: Size) -> Geometry
Returns a geometry for a clipping mask. The mask applies if the layout system
attempts to arrange an element that is larger than the available display space.
layoutSlotSize: The size of the part of the element that does visual presentation.
Returns: The clipping geometry.
GetLayoutClip(self: Window_16$17,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Label_17$18,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: TextBox_18$19,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Button_19$20,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: CheckBox_20$21,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: ComboBox_21$22,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Separator_22$23,layoutSlotSize: Size) -> Geometry
"""
pass
def GetTemplateChild(self,*args):
"""
GetTemplateChild(self: FrameworkElement,childName: str) -> DependencyObject
Returns the named element in the visual tree of an instantiated
System.Windows.Controls.ControlTemplate.
childName: Name of the child to find.
Returns: The requested element. May be null if no element of the requested name exists.
GetTemplateChild(self: Window_16$17,childName: str) -> DependencyObject
GetTemplateChild(self: Label_17$18,childName: str) -> DependencyObject
GetTemplateChild(self: TextBox_18$19,childName: str) -> DependencyObject
GetTemplateChild(self: Button_19$20,childName: str) -> DependencyObject
GetTemplateChild(self: CheckBox_20$21,childName: str) -> DependencyObject
GetTemplateChild(self: ComboBox_21$22,childName: str) -> DependencyObject
GetTemplateChild(self: Separator_22$23,childName: str) -> DependencyObject
"""
pass
def GetUIParentCore(self,*args):
"""
GetUIParentCore(self: FrameworkElement) -> DependencyObject
Returns an alternative logical parent for this element if there is no visual
parent.
Returns: Returns something other than null whenever a WPF framework-level implementation
of this method has a non-visual parent connection.
GetUIParentCore(self: Window_16$17) -> DependencyObject
GetUIParentCore(self: Label_17$18) -> DependencyObject
GetUIParentCore(self: TextBox_18$19) -> DependencyObject
GetUIParentCore(self: Button_19$20) -> DependencyObject
GetUIParentCore(self: CheckBox_20$21) -> DependencyObject
GetUIParentCore(self: ComboBox_21$22) -> DependencyObject
GetUIParentCore(self: Separator_22$23) -> DependencyObject
"""
pass
def GetVisualChild(self,*args):
"""
GetVisualChild(self: FrameworkElement,index: int) -> Visual
Overrides System.Windows.Media.Visual.GetVisualChild(System.Int32),and returns
a child at the specified index from a collection of child elements.
index: The zero-based index of the requested child element in the collection.
Returns: The requested child element. This should not return null; if the provided index
is out of range,an exception is thrown.
GetVisualChild(self: Window_16$17,index: int) -> Visual
GetVisualChild(self: Label_17$18,index: int) -> Visual
GetVisualChild(self: TextBox_18$19,index: int) -> Visual
GetVisualChild(self: Button_19$20,index: int) -> Visual
GetVisualChild(self: CheckBox_20$21,index: int) -> Visual
GetVisualChild(self: ComboBox_21$22,index: int) -> Visual
GetVisualChild(self: Separator_22$23,index: int) -> Visual
"""
pass
def HitTestCore(self,*args):
"""
HitTestCore(self: UIElement,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.GeometryHitTestPara
meters) to supply base element hit testing behavior (returning
System.Windows.Media.GeometryHitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated geometry.
HitTestCore(self: UIElement,hitTestParameters: PointHitTestParameters) -> HitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.PointHitTestParamet
ers) to supply base element hit testing behavior (returning
System.Windows.Media.HitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated point.
HitTestCore(self: Window_16$17,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Window_16$17,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Label_17$18,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Label_17$18,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: TextBox_18$19,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: TextBox_18$19,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Button_19$20,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Button_19$20,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: CheckBox_20$21,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: CheckBox_20$21,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: ComboBox_21$22,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: ComboBox_21$22,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Separator_22$23,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Separator_22$23,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
"""
pass
def MeasureCore(self,*args):
"""
MeasureCore(self: FrameworkElement,availableSize: Size) -> Size
Implements basic measure-pass layout system behavior for
System.Windows.FrameworkElement.
availableSize: The available size that the parent element can give to the child elements.
Returns: The desired size of this element in layout.
MeasureCore(self: Window_16$17,availableSize: Size) -> Size
MeasureCore(self: Label_17$18,availableSize: Size) -> Size
MeasureCore(self: TextBox_18$19,availableSize: Size) -> Size
MeasureCore(self: Button_19$20,availableSize: Size) -> Size
MeasureCore(self: CheckBox_20$21,availableSize: Size) -> Size
MeasureCore(self: ComboBox_21$22,availableSize: Size) -> Size
MeasureCore(self: Separator_22$23,availableSize: Size) -> Size
"""
pass
def MeasureOverride(self,*args):
"""
MeasureOverride(self: Control,constraint: Size) -> Size
Called to remeasure a control.
constraint: The maximum size that the method can return.
Returns: The size of the control,up to the maximum specified by constraint.
MeasureOverride(self: Window_16$17,availableSize: Size) -> Size
MeasureOverride(self: Label_17$18,constraint: Size) -> Size
MeasureOverride(self: TextBox_18$19,constraint: Size) -> Size
MeasureOverride(self: Button_19$20,constraint: Size) -> Size
MeasureOverride(self: CheckBox_20$21,constraint: Size) -> Size
MeasureOverride(self: ComboBox_21$22,constraint: Size) -> Size
MeasureOverride(self: Separator_22$23,constraint: Size) -> Size
"""
pass
def OnAccessKey(self,*args):
"""
OnAccessKey(self: UIElement,e: AccessKeyEventArgs)
Provides class handling for when an access key that is meaningful for this
element is invoked.
e: The event data to the access key event. The event data reports which key was
invoked,and indicate whether the System.Windows.Input.AccessKeyManager object
that controls the sending of these events also sent this access key invocation
to other elements.
OnAccessKey(self: Window_16$17,e: AccessKeyEventArgs)OnAccessKey(self: Label_17$18,e: AccessKeyEventArgs)OnAccessKey(self: TextBox_18$19,e: AccessKeyEventArgs)OnAccessKey(self: Button_19$20,e: AccessKeyEventArgs)OnAccessKey(self: CheckBox_20$21,e: AccessKeyEventArgs)OnAccessKey(self: ComboBox_21$22,e: AccessKeyEventArgs)OnAccessKey(self: Separator_22$23,e: AccessKeyEventArgs)
"""
pass
def OnChildDesiredSizeChanged(self,*args):
"""
OnChildDesiredSizeChanged(self: UIElement,child: UIElement)
Supports layout behavior when a child element is resized.
child: The child element that is being resized.
OnChildDesiredSizeChanged(self: Window_16$17,child: Window_16$17)OnChildDesiredSizeChanged(self: Label_17$18,child: Label_17$18)OnChildDesiredSizeChanged(self: TextBox_18$19,child: TextBox_18$19)OnChildDesiredSizeChanged(self: Button_19$20,child: Button_19$20)OnChildDesiredSizeChanged(self: CheckBox_20$21,child: CheckBox_20$21)OnChildDesiredSizeChanged(self: ComboBox_21$22,child: ComboBox_21$22)OnChildDesiredSizeChanged(self: Separator_22$23,child: Separator_22$23)
"""
pass
def OnContentChanged(self,*args):
"""
OnContentChanged(self: ContentControl,oldContent: object,newContent: object)
Called when the System.Windows.Controls.ContentControl.Content property changes.
oldContent: The old value of the System.Windows.Controls.ContentControl.Content property.
newContent: The new value of the System.Windows.Controls.ContentControl.Content property.
OnContentChanged(self: Window_16$17,oldContent: object,newContent: object)OnContentChanged(self: Label_17$18,oldContent: object,newContent: object)OnContentChanged(self: Button_19$20,oldContent: object,newContent: object)OnContentChanged(self: CheckBox_20$21,oldContent: object,newContent: object)
"""
pass
def OnContentStringFormatChanged(self,*args):
"""
OnContentStringFormatChanged(self: ContentControl,oldContentStringFormat: str,newContentStringFormat: str)
Occurs when the System.Windows.Controls.ContentControl.ContentStringFormat
property changes.
oldContentStringFormat: The old value of System.Windows.Controls.ContentControl.ContentStringFormat.
newContentStringFormat: The new value of System.Windows.Controls.ContentControl.ContentStringFormat.
OnContentStringFormatChanged(self: Window_16$17,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: Label_17$18,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: Button_19$20,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: CheckBox_20$21,oldContentStringFormat: str,newContentStringFormat: str)
"""
pass
def OnContentTemplateChanged(self,*args):
"""
OnContentTemplateChanged(self: ContentControl,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)
Called when the System.Windows.Controls.ContentControl.ContentTemplate property
changes.
oldContentTemplate: The old value of the System.Windows.Controls.ContentControl.ContentTemplate
property.
newContentTemplate: The new value of the System.Windows.Controls.ContentControl.ContentTemplate
property.
OnContentTemplateChanged(self: Window_16$17,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: Label_17$18,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: Button_19$20,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: CheckBox_20$21,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)
"""
pass
def OnContentTemplateSelectorChanged(self,*args):
"""
OnContentTemplateSelectorChanged(self: ContentControl,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)
Called when the System.Windows.Controls.ContentControl.ContentTemplateSelector
property changes.
oldContentTemplateSelector: The old value of the
System.Windows.Controls.ContentControl.ContentTemplateSelector property.
newContentTemplateSelector: The new value of the
System.Windows.Controls.ContentControl.ContentTemplateSelector property.
OnContentTemplateSelectorChanged(self: Window_16$17,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: Label_17$18,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: Button_19$20,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: CheckBox_20$21,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)
"""
pass
def OnContextMenuClosing(self,*args):
"""
OnContextMenuClosing(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled
System.Windows.FrameworkElement.ContextMenuClosing routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
OnContextMenuClosing(self: Window_16$17,e: ContextMenuEventArgs)OnContextMenuClosing(self: Label_17$18,e: ContextMenuEventArgs)OnContextMenuClosing(self: TextBox_18$19,e: ContextMenuEventArgs)OnContextMenuClosing(self: Button_19$20,e: ContextMenuEventArgs)OnContextMenuClosing(self: CheckBox_20$21,e: ContextMenuEventArgs)OnContextMenuClosing(self: ComboBox_21$22,e: ContextMenuEventArgs)OnContextMenuClosing(self: Separator_22$23,e: ContextMenuEventArgs)
"""
pass
def OnContextMenuOpening(self,*args):
"""
OnContextMenuOpening(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled
System.Windows.FrameworkElement.ContextMenuOpening routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnContextMenuOpening(self: Window_16$17,e: ContextMenuEventArgs)OnContextMenuOpening(self: Label_17$18,e: ContextMenuEventArgs)OnContextMenuOpening(self: TextBox_18$19,e: ContextMenuEventArgs)OnContextMenuOpening(self: Button_19$20,e: ContextMenuEventArgs)OnContextMenuOpening(self: CheckBox_20$21,e: ContextMenuEventArgs)OnContextMenuOpening(self: ComboBox_21$22,e: ContextMenuEventArgs)OnContextMenuOpening(self: Separator_22$23,e: ContextMenuEventArgs)
"""
pass
def OnCreateAutomationPeer(self,*args):
"""
OnCreateAutomationPeer(self: UserControl) -> AutomationPeer
Creates and returns an System.Windows.Automation.Peers.AutomationPeer for this
System.Windows.Controls.UserControl.
Returns: A new System.Windows.Automation.Peers.UserControlAutomationPeer for this
System.Windows.Controls.UserControl.
"""
pass
def OnDpiChanged(self,*args):
""" OnDpiChanged(self: Visual,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Window_16$17,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Label_17$18,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: TextBox_18$19,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Button_19$20,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: CheckBox_20$21,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: ComboBox_21$22,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Separator_22$23,oldDpi: DpiScale,newDpi: DpiScale) """
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragEnter(self: Window_16$17,e: DragEventArgs)OnDragEnter(self: Label_17$18,e: DragEventArgs)OnDragEnter(self: TextBox_18$19,e: DragEventArgs)OnDragEnter(self: Button_19$20,e: DragEventArgs)OnDragEnter(self: CheckBox_20$21,e: DragEventArgs)OnDragEnter(self: ComboBox_21$22,e: DragEventArgs)OnDragEnter(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragLeave�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragLeave(self: Window_16$17,e: DragEventArgs)OnDragLeave(self: Label_17$18,e: DragEventArgs)OnDragLeave(self: TextBox_18$19,e: DragEventArgs)OnDragLeave(self: Button_19$20,e: DragEventArgs)OnDragLeave(self: CheckBox_20$21,e: DragEventArgs)OnDragLeave(self: ComboBox_21$22,e: DragEventArgs)OnDragLeave(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragOver�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragOver(self: Window_16$17,e: DragEventArgs)OnDragOver(self: Label_17$18,e: DragEventArgs)OnDragOver(self: TextBox_18$19,e: DragEventArgs)OnDragOver(self: Button_19$20,e: DragEventArgs)OnDragOver(self: CheckBox_20$21,e: DragEventArgs)OnDragOver(self: ComboBox_21$22,e: DragEventArgs)OnDragOver(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDrop(self,*args):
"""
OnDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDrop(self: Window_16$17,e: DragEventArgs)OnDrop(self: Label_17$18,e: DragEventArgs)OnDrop(self: TextBox_18$19,e: DragEventArgs)OnDrop(self: Button_19$20,e: DragEventArgs)OnDrop(self: CheckBox_20$21,e: DragEventArgs)OnDrop(self: ComboBox_21$22,e: DragEventArgs)OnDrop(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.GiveFeedback�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
OnGiveFeedback(self: Window_16$17,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Label_17$18,e: GiveFeedbackEventArgs)OnGiveFeedback(self: TextBox_18$19,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Button_19$20,e: GiveFeedbackEventArgs)OnGiveFeedback(self: CheckBox_20$21,e: GiveFeedbackEventArgs)OnGiveFeedback(self: ComboBox_21$22,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Separator_22$23,e: GiveFeedbackEventArgs)
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: FrameworkElement,e: RoutedEventArgs)
Invoked whenever an unhandled System.Windows.UIElement.GotFocus event reaches
this element in its route.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnGotFocus(self: Window_16$17,e: RoutedEventArgs)OnGotFocus(self: Label_17$18,e: RoutedEventArgs)OnGotFocus(self: TextBox_18$19,e: RoutedEventArgs)OnGotFocus(self: Button_19$20,e: RoutedEventArgs)OnGotFocus(self: CheckBox_20$21,e: RoutedEventArgs)OnGotFocus(self: Separator_22$23,e: RoutedEventArgs)
"""
pass
def OnGotKeyboardFocus(self,*args):
"""
OnGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.GotKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnGotKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnGotMouseCapture(self,*args):
"""
OnGotMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.GotMouseCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnGotMouseCapture(self: Window_16$17,e: MouseEventArgs)OnGotMouseCapture(self: Label_17$18,e: MouseEventArgs)OnGotMouseCapture(self: TextBox_18$19,e: MouseEventArgs)OnGotMouseCapture(self: Button_19$20,e: MouseEventArgs)OnGotMouseCapture(self: CheckBox_20$21,e: MouseEventArgs)OnGotMouseCapture(self: ComboBox_21$22,e: MouseEventArgs)OnGotMouseCapture(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnGotStylusCapture(self,*args):
"""
OnGotStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.GotStylusCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnGotStylusCapture(self: Window_16$17,e: StylusEventArgs)OnGotStylusCapture(self: Label_17$18,e: StylusEventArgs)OnGotStylusCapture(self: TextBox_18$19,e: StylusEventArgs)OnGotStylusCapture(self: Button_19$20,e: StylusEventArgs)OnGotStylusCapture(self: CheckBox_20$21,e: StylusEventArgs)OnGotStylusCapture(self: ComboBox_21$22,e: StylusEventArgs)OnGotStylusCapture(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnGotTouchCapture(self,*args):
"""
OnGotTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.GotTouchCapture routed
event that occurs when a touch is captured to this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnGotTouchCapture(self: Window_16$17,e: TouchEventArgs)OnGotTouchCapture(self: Label_17$18,e: TouchEventArgs)OnGotTouchCapture(self: TextBox_18$19,e: TouchEventArgs)OnGotTouchCapture(self: Button_19$20,e: TouchEventArgs)OnGotTouchCapture(self: CheckBox_20$21,e: TouchEventArgs)OnGotTouchCapture(self: ComboBox_21$22,e: TouchEventArgs)OnGotTouchCapture(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnInitialized(self,*args):
"""
OnInitialized(self: FrameworkElement,e: EventArgs)
Raises the System.Windows.FrameworkElement.Initialized event. This method is
invoked whenever System.Windows.FrameworkElement.IsInitialized is set to true
internally.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnInitialized(self: Window_16$17,e: EventArgs)OnInitialized(self: Label_17$18,e: EventArgs)OnInitialized(self: TextBox_18$19,e: EventArgs)OnInitialized(self: Button_19$20,e: EventArgs)OnInitialized(self: CheckBox_20$21,e: EventArgs)OnInitialized(self: ComboBox_21$22,e: EventArgs)OnInitialized(self: Separator_22$23,e: EventArgs)
"""
pass
def OnIsKeyboardFocusedChanged(self,*args):
"""
OnIsKeyboardFocusedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsKeyboardFocusedChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsKeyboardFocusedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsKeyboardFocusWithinChanged(self,*args):
"""
OnIsKeyboardFocusWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked just before the System.Windows.UIElement.IsKeyboardFocusWithinChanged
event is raised by this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsKeyboardFocusWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseCapturedChanged(self,*args):
"""
OnIsMouseCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCapturedChanged event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseCapturedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseCaptureWithinChanged(self,*args):
"""
OnIsMouseCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCaptureWithinChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseCaptureWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseDirectlyOverChanged(self,*args):
"""
OnIsMouseDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseDirectlyOverChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseDirectlyOverChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusCapturedChanged(self,*args):
"""
OnIsStylusCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCapturedChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusCapturedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusCaptureWithinChanged(self,*args):
"""
OnIsStylusCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCaptureWithinChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusCaptureWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusDirectlyOverChanged(self,*args):
"""
OnIsStylusDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusDirectlyOverChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusDirectlyOverChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnKeyDown(self: Window_16$17,e: KeyEventArgs)OnKeyDown(self: Label_17$18,e: KeyEventArgs)OnKeyDown(self: TextBox_18$19,e: KeyEventArgs)OnKeyDown(self: Button_19$20,e: KeyEventArgs)OnKeyDown(self: CheckBox_20$21,e: KeyEventArgs)OnKeyDown(self: ComboBox_21$22,e: KeyEventArgs)OnKeyDown(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyUp�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnKeyUp(self: Window_16$17,e: KeyEventArgs)OnKeyUp(self: Label_17$18,e: KeyEventArgs)OnKeyUp(self: TextBox_18$19,e: KeyEventArgs)OnKeyUp(self: Button_19$20,e: KeyEventArgs)OnKeyUp(self: CheckBox_20$21,e: KeyEventArgs)OnKeyUp(self: ComboBox_21$22,e: KeyEventArgs)OnKeyUp(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: UIElement,e: RoutedEventArgs)
Raises the System.Windows.UIElement.LostFocus�routed event by using the event
data that is provided.
e: A System.Windows.RoutedEventArgs that contains event data. This event data must
contain the identifier for the System.Windows.UIElement.LostFocus event.
OnLostFocus(self: Window_16$17,e: RoutedEventArgs)OnLostFocus(self: Label_17$18,e: RoutedEventArgs)OnLostFocus(self: TextBox_18$19,e: RoutedEventArgs)OnLostFocus(self: Button_19$20,e: RoutedEventArgs)OnLostFocus(self: CheckBox_20$21,e: RoutedEventArgs)OnLostFocus(self: ComboBox_21$22,e: RoutedEventArgs)OnLostFocus(self: Separator_22$23,e: RoutedEventArgs)
"""
pass
def OnLostKeyboardFocus(self,*args):
"""
OnLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.LostKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains event data.
OnLostKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnLostMouseCapture(self,*args):
"""
OnLostMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.LostMouseCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains event data.
OnLostMouseCapture(self: Window_16$17,e: MouseEventArgs)OnLostMouseCapture(self: Label_17$18,e: MouseEventArgs)OnLostMouseCapture(self: TextBox_18$19,e: MouseEventArgs)OnLostMouseCapture(self: Button_19$20,e: MouseEventArgs)OnLostMouseCapture(self: CheckBox_20$21,e: MouseEventArgs)OnLostMouseCapture(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnLostStylusCapture(self,*args):
"""
OnLostStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.LostStylusCapture�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains event data.
OnLostStylusCapture(self: Window_16$17,e: StylusEventArgs)OnLostStylusCapture(self: Label_17$18,e: StylusEventArgs)OnLostStylusCapture(self: TextBox_18$19,e: StylusEventArgs)OnLostStylusCapture(self: Button_19$20,e: StylusEventArgs)OnLostStylusCapture(self: CheckBox_20$21,e: StylusEventArgs)OnLostStylusCapture(self: ComboBox_21$22,e: StylusEventArgs)OnLostStylusCapture(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnLostTouchCapture(self,*args):
"""
OnLostTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.LostTouchCapture
routed event that occurs when this element loses a touch capture.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnLostTouchCapture(self: Window_16$17,e: TouchEventArgs)OnLostTouchCapture(self: Label_17$18,e: TouchEventArgs)OnLostTouchCapture(self: TextBox_18$19,e: TouchEventArgs)OnLostTouchCapture(self: Button_19$20,e: TouchEventArgs)OnLostTouchCapture(self: CheckBox_20$21,e: TouchEventArgs)OnLostTouchCapture(self: ComboBox_21$22,e: TouchEventArgs)OnLostTouchCapture(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnManipulationBoundaryFeedback(self,*args):
"""
OnManipulationBoundaryFeedback(self: UIElement,e: ManipulationBoundaryFeedbackEventArgs)
Called when the System.Windows.UIElement.ManipulationBoundaryFeedback event
occurs.
e: The data for the event.
OnManipulationBoundaryFeedback(self: Window_16$17,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Label_17$18,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: TextBox_18$19,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Button_19$20,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: CheckBox_20$21,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: ComboBox_21$22,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Separator_22$23,e: ManipulationBoundaryFeedbackEventArgs)
"""
pass
def OnManipulationCompleted(self,*args):
"""
OnManipulationCompleted(self: UIElement,e: ManipulationCompletedEventArgs)
Called when the System.Windows.UIElement.ManipulationCompleted event occurs.
e: The data for the event.
OnManipulationCompleted(self: Window_16$17,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Label_17$18,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: TextBox_18$19,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Button_19$20,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: CheckBox_20$21,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: ComboBox_21$22,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Separator_22$23,e: ManipulationCompletedEventArgs)
"""
pass
def OnManipulationDelta(self,*args):
"""
OnManipulationDelta(self: UIElement,e: ManipulationDeltaEventArgs)
Called when the System.Windows.UIElement.ManipulationDelta event occurs.
e: The data for the event.
OnManipulationDelta(self: Window_16$17,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Label_17$18,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: TextBox_18$19,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Button_19$20,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: CheckBox_20$21,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: ComboBox_21$22,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Separator_22$23,e: ManipulationDeltaEventArgs)
"""
pass
def OnManipulationInertiaStarting(self,*args):
"""
OnManipulationInertiaStarting(self: UIElement,e: ManipulationInertiaStartingEventArgs)
Called when the System.Windows.UIElement.ManipulationInertiaStarting event
occurs.
e: The data for the event.
OnManipulationInertiaStarting(self: Window_16$17,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Label_17$18,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: TextBox_18$19,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Button_19$20,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: CheckBox_20$21,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: ComboBox_21$22,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Separator_22$23,e: ManipulationInertiaStartingEventArgs)
"""
pass
def OnManipulationStarted(self,*args):
"""
OnManipulationStarted(self: UIElement,e: ManipulationStartedEventArgs)
Called when the System.Windows.UIElement.ManipulationStarted event occurs.
e: The data for the event.
OnManipulationStarted(self: Window_16$17,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Label_17$18,e: ManipulationStartedEventArgs)OnManipulationStarted(self: TextBox_18$19,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Button_19$20,e: ManipulationStartedEventArgs)OnManipulationStarted(self: CheckBox_20$21,e: ManipulationStartedEventArgs)OnManipulationStarted(self: ComboBox_21$22,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Separator_22$23,e: ManipulationStartedEventArgs)
"""
pass
def OnManipulationStarting(self,*args):
"""
OnManipulationStarting(self: UIElement,e: ManipulationStartingEventArgs)
Provides class handling for the System.Windows.UIElement.ManipulationStarting
routed event that occurs when the manipulation processor is first created.
e: A System.Windows.Input.ManipulationStartingEventArgs that contains the event
data.
OnManipulationStarting(self: Window_16$17,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Label_17$18,e: ManipulationStartingEventArgs)OnManipulationStarting(self: TextBox_18$19,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Button_19$20,e: ManipulationStartingEventArgs)OnManipulationStarting(self: CheckBox_20$21,e: ManipulationStartingEventArgs)OnManipulationStarting(self: ComboBox_21$22,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Separator_22$23,e: ManipulationStartingEventArgs)
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.MouseDoubleClick routed event.
e: The event data.
OnMouseDoubleClick(self: Window_16$17,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Label_17$18,e: MouseButtonEventArgs)OnMouseDoubleClick(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Button_19$20,e: MouseButtonEventArgs)OnMouseDoubleClick(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseDoubleClick(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data.
This event data reports details about the mouse button that was pressed and the
handled state.
OnMouseDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseEnter�attached event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseEnter(self: Window_16$17,e: MouseEventArgs)OnMouseEnter(self: Label_17$18,e: MouseEventArgs)OnMouseEnter(self: TextBox_18$19,e: MouseEventArgs)OnMouseEnter(self: Button_19$20,e: MouseEventArgs)OnMouseEnter(self: CheckBox_20$21,e: MouseEventArgs)OnMouseEnter(self: ComboBox_21$22,e: MouseEventArgs)OnMouseEnter(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseLeave�attached event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseLeave(self: Window_16$17,e: MouseEventArgs)OnMouseLeave(self: Label_17$18,e: MouseEventArgs)OnMouseLeave(self: TextBox_18$19,e: MouseEventArgs)OnMouseLeave(self: Button_19$20,e: MouseEventArgs)OnMouseLeave(self: CheckBox_20$21,e: MouseEventArgs)OnMouseLeave(self: ComboBox_21$22,e: MouseEventArgs)OnMouseLeave(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseLeftButtonDown(self,*args):
"""
OnMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonDown�routed
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was pressed.
OnMouseLeftButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseLeftButtonUp(self,*args):
"""
OnMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonUp�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was released.
OnMouseLeftButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseMove�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseMove(self: Window_16$17,e: MouseEventArgs)OnMouseMove(self: Label_17$18,e: MouseEventArgs)OnMouseMove(self: TextBox_18$19,e: MouseEventArgs)OnMouseMove(self: Button_19$20,e: MouseEventArgs)OnMouseMove(self: CheckBox_20$21,e: MouseEventArgs)OnMouseMove(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseRightButtonDown(self,*args):
"""
OnMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonDown�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was pressed.
OnMouseRightButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseRightButtonUp(self,*args):
"""
OnMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonUp�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was released.
OnMouseRightButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseUp�routed event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the mouse button was released.
OnMouseUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseWheel�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
OnMouseWheel(self: Window_16$17,e: MouseWheelEventArgs)OnMouseWheel(self: Label_17$18,e: MouseWheelEventArgs)OnMouseWheel(self: TextBox_18$19,e: MouseWheelEventArgs)OnMouseWheel(self: Button_19$20,e: MouseWheelEventArgs)OnMouseWheel(self: CheckBox_20$21,e: MouseWheelEventArgs)OnMouseWheel(self: Separator_22$23,e: MouseWheelEventArgs)
"""
pass
def OnPreviewDragEnter(self,*args):
"""
OnPreviewDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragEnter�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragEnter(self: Window_16$17,e: DragEventArgs)OnPreviewDragEnter(self: Label_17$18,e: DragEventArgs)OnPreviewDragEnter(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragEnter(self: Button_19$20,e: DragEventArgs)OnPreviewDragEnter(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragEnter(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragEnter(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDragLeave(self,*args):
"""
OnPreviewDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragLeave�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragLeave(self: Window_16$17,e: DragEventArgs)OnPreviewDragLeave(self: Label_17$18,e: DragEventArgs)OnPreviewDragLeave(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragLeave(self: Button_19$20,e: DragEventArgs)OnPreviewDragLeave(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragLeave(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragLeave(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDragOver(self,*args):
"""
OnPreviewDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragOver�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragOver(self: Window_16$17,e: DragEventArgs)OnPreviewDragOver(self: Label_17$18,e: DragEventArgs)OnPreviewDragOver(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragOver(self: Button_19$20,e: DragEventArgs)OnPreviewDragOver(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragOver(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragOver(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDrop(self,*args):
"""
OnPreviewDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDrop�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDrop(self: Window_16$17,e: DragEventArgs)OnPreviewDrop(self: Label_17$18,e: DragEventArgs)OnPreviewDrop(self: TextBox_18$19,e: DragEventArgs)OnPreviewDrop(self: Button_19$20,e: DragEventArgs)OnPreviewDrop(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDrop(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDrop(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewGiveFeedback(self,*args):
"""
OnPreviewGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewGiveFeedback�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
OnPreviewGiveFeedback(self: Window_16$17,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Label_17$18,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: TextBox_18$19,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Button_19$20,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: CheckBox_20$21,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: ComboBox_21$22,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Separator_22$23,e: GiveFeedbackEventArgs)
"""
pass
def OnPreviewGotKeyboardFocus(self,*args):
"""
OnPreviewGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewGotKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnPreviewGotKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnPreviewKeyDown(self: Window_16$17,e: KeyEventArgs)OnPreviewKeyDown(self: Label_17$18,e: KeyEventArgs)OnPreviewKeyDown(self: TextBox_18$19,e: KeyEventArgs)OnPreviewKeyDown(self: Button_19$20,e: KeyEventArgs)OnPreviewKeyDown(self: CheckBox_20$21,e: KeyEventArgs)OnPreviewKeyDown(self: ComboBox_21$22,e: KeyEventArgs)OnPreviewKeyDown(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnPreviewKeyUp(self,*args):
"""
OnPreviewKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnPreviewKeyUp(self: Window_16$17,e: KeyEventArgs)OnPreviewKeyUp(self: Label_17$18,e: KeyEventArgs)OnPreviewKeyUp(self: TextBox_18$19,e: KeyEventArgs)OnPreviewKeyUp(self: Button_19$20,e: KeyEventArgs)OnPreviewKeyUp(self: CheckBox_20$21,e: KeyEventArgs)OnPreviewKeyUp(self: ComboBox_21$22,e: KeyEventArgs)OnPreviewKeyUp(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnPreviewLostKeyboardFocus(self,*args):
"""
OnPreviewLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnPreviewLostKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnPreviewMouseDoubleClick(self,*args):
"""
OnPreviewMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.PreviewMouseDoubleClick routed event.
e: The event data.
OnPreviewMouseDoubleClick(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseDown(self,*args):
"""
OnPreviewMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseDown attached�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that one or more mouse buttons were pressed.
OnPreviewMouseDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseLeftButtonDown(self,*args):
"""
OnPreviewMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonDown�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was pressed.
OnPreviewMouseLeftButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseLeftButtonUp(self,*args):
"""
OnPreviewMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonUp�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was released.
OnPreviewMouseLeftButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseMove(self,*args):
"""
OnPreviewMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseMove�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnPreviewMouseMove(self: Window_16$17,e: MouseEventArgs)OnPreviewMouseMove(self: Label_17$18,e: MouseEventArgs)OnPreviewMouseMove(self: TextBox_18$19,e: MouseEventArgs)OnPreviewMouseMove(self: Button_19$20,e: MouseEventArgs)OnPreviewMouseMove(self: CheckBox_20$21,e: MouseEventArgs)OnPreviewMouseMove(self: ComboBox_21$22,e: MouseEventArgs)OnPreviewMouseMove(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnPreviewMouseRightButtonDown(self,*args):
"""
OnPreviewMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonDown�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was pressed.
OnPreviewMouseRightButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseRightButtonUp(self,*args):
"""
OnPreviewMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonUp�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was released.
OnPreviewMouseRightButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseUp(self,*args):
"""
OnPreviewMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that one or more mouse buttons were released.
OnPreviewMouseUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseWheel(self,*args):
"""
OnPreviewMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseWheel�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
OnPreviewMouseWheel(self: Window_16$17,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Label_17$18,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: TextBox_18$19,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Button_19$20,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: CheckBox_20$21,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: ComboBox_21$22,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Separator_22$23,e: MouseWheelEventArgs)
"""
pass
def OnPreviewQueryContinueDrag(self,*args):
"""
OnPreviewQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewQueryContinueDrag�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
OnPreviewQueryContinueDrag(self: Window_16$17,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Label_17$18,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: TextBox_18$19,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Button_19$20,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: CheckBox_20$21,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: ComboBox_21$22,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Separator_22$23,e: QueryContinueDragEventArgs)
"""
pass
def OnPreviewStylusButtonDown(self,*args):
"""
OnPreviewStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonDown�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnPreviewStylusButtonDown(self: Window_16$17,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Label_17$18,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: TextBox_18$19,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Button_19$20,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: CheckBox_20$21,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: ComboBox_21$22,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnPreviewStylusButtonUp(self,*args):
"""
OnPreviewStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonUp�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnPreviewStylusButtonUp(self: Window_16$17,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Label_17$18,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: TextBox_18$19,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Button_19$20,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: CheckBox_20$21,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: ComboBox_21$22,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnPreviewStylusDown(self,*args):
"""
OnPreviewStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusDown�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
OnPreviewStylusDown(self: Window_16$17,e: StylusDownEventArgs)OnPreviewStylusDown(self: Label_17$18,e: StylusDownEventArgs)OnPreviewStylusDown(self: TextBox_18$19,e: StylusDownEventArgs)OnPreviewStylusDown(self: Button_19$20,e: StylusDownEventArgs)OnPreviewStylusDown(self: CheckBox_20$21,e: StylusDownEventArgs)OnPreviewStylusDown(self: ComboBox_21$22,e: StylusDownEventArgs)OnPreviewStylusDown(self: Separator_22$23,e: StylusDownEventArgs)
"""
pass
def OnPreviewStylusInAirMove(self,*args):
"""
OnPreviewStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInAirMove�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusInAirMove(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusInAirMove(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusInAirMove(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusInAirMove(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusInRange(self,*args):
"""
OnPreviewStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInRange�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusInRange(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusInRange(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusInRange(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusInRange(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusInRange(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusInRange(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusInRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusMove(self,*args):
"""
OnPreviewStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusMove�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusMove(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusMove(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusMove(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusMove(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusMove(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusMove(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusOutOfRange(self,*args):
"""
OnPreviewStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusOutOfRange�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusOutOfRange(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusSystemGesture(self,*args):
"""
OnPreviewStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled
System.Windows.Input.Stylus.PreviewStylusSystemGesture�attached event reaches
an element in its route that is derived from this class. Implement this method
to add class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event
data.
OnPreviewStylusSystemGesture(self: Window_16$17,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Label_17$18,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: TextBox_18$19,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Button_19$20,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: CheckBox_20$21,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: ComboBox_21$22,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Separator_22$23,e: StylusSystemGestureEventArgs)
"""
pass
def OnPreviewStylusUp(self,*args):
"""
OnPreviewStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusUp(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusUp(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusUp(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusUp(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusUp(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusUp(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusUp(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewTextInput(self,*args):
"""
OnPreviewTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled
System.Windows.Input.TextCompositionManager.PreviewTextInput�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
OnPreviewTextInput(self: Window_16$17,e: TextCompositionEventArgs)OnPreviewTextInput(self: Label_17$18,e: TextCompositionEventArgs)OnPreviewTextInput(self: TextBox_18$19,e: TextCompositionEventArgs)OnPreviewTextInput(self: Button_19$20,e: TextCompositionEventArgs)OnPreviewTextInput(self: CheckBox_20$21,e: TextCompositionEventArgs)OnPreviewTextInput(self: ComboBox_21$22,e: TextCompositionEventArgs)OnPreviewTextInput(self: Separator_22$23,e: TextCompositionEventArgs)
"""
pass
def OnPreviewTouchDown(self,*args):
"""
OnPreviewTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchDown
routed event that occurs when a touch presses this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchDown(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchDown(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchDown(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchDown(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchDown(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchDown(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchDown(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPreviewTouchMove(self,*args):
"""
OnPreviewTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchMove
routed event that occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchMove(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchMove(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchMove(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchMove(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchMove(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchMove(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchMove(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPreviewTouchUp(self,*args):
"""
OnPreviewTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchUp routed
event that occurs when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchUp(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchUp(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchUp(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchUp(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchUp(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchUp(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchUp(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: FrameworkElement,e: DependencyPropertyChangedEventArgs)
Invoked whenever the effective value of any dependency property on this
System.Windows.FrameworkElement has been updated. The specific dependency
property that changed is reported in the arguments parameter. Overrides
System.Windows.DependencyObject.OnPropertyChanged(System.Windows.DependencyPrope
rtyChangedEventArgs).
e: The event data that describes the property that changed,as well as old and new
values.
OnPropertyChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.QueryContinueDrag�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
OnQueryContinueDrag(self: Window_16$17,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Label_17$18,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: TextBox_18$19,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Button_19$20,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: CheckBox_20$21,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: ComboBox_21$22,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Separator_22$23,e: QueryContinueDragEventArgs)
"""
pass
def OnQueryCursor(self,*args):
"""
OnQueryCursor(self: UIElement,e: QueryCursorEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.QueryCursor�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.QueryCursorEventArgs that contains the event data.
OnQueryCursor(self: Window_16$17,e: QueryCursorEventArgs)OnQueryCursor(self: Label_17$18,e: QueryCursorEventArgs)OnQueryCursor(self: TextBox_18$19,e: QueryCursorEventArgs)OnQueryCursor(self: Button_19$20,e: QueryCursorEventArgs)OnQueryCursor(self: CheckBox_20$21,e: QueryCursorEventArgs)OnQueryCursor(self: ComboBox_21$22,e: QueryCursorEventArgs)OnQueryCursor(self: Separator_22$23,e: QueryCursorEventArgs)
"""
pass
def OnRender(self,*args):
"""
OnRender(self: UIElement,drawingContext: DrawingContext)
When overridden in a derived class,participates in rendering operations that
are directed by the layout system. The rendering instructions for this element
are not used directly when this method is invoked,and are instead preserved
for later asynchronous use by layout and drawing.
drawingContext: The drawing instructions for a specific element. This context is provided to
the layout system.
OnRender(self: Window_16$17,drawingContext: DrawingContext)OnRender(self: Label_17$18,drawingContext: DrawingContext)OnRender(self: TextBox_18$19,drawingContext: DrawingContext)OnRender(self: Button_19$20,drawingContext: DrawingContext)OnRender(self: CheckBox_20$21,drawingContext: DrawingContext)OnRender(self: ComboBox_21$22,drawingContext: DrawingContext)OnRender(self: Separator_22$23,drawingContext: DrawingContext)
"""
pass
def OnRenderSizeChanged(self,*args):
"""
OnRenderSizeChanged(self: FrameworkElement,sizeInfo: SizeChangedInfo)
Raises the System.Windows.FrameworkElement.SizeChanged event,using the
specified information as part of the eventual event data.
sizeInfo: Details of the old and new size involved in the change.
OnRenderSizeChanged(self: Window_16$17,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Label_17$18,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: TextBox_18$19,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Button_19$20,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: CheckBox_20$21,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: ComboBox_21$22,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Separator_22$23,sizeInfo: SizeChangedInfo)
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: FrameworkElement,oldStyle: Style,newStyle: Style)
Invoked when the style in use on this element changes,which will invalidate
the layout.
oldStyle: The old style.
newStyle: The new style.
OnStyleChanged(self: Window_16$17,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Label_17$18,oldStyle: Style,newStyle: Style)OnStyleChanged(self: TextBox_18$19,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Button_19$20,oldStyle: Style,newStyle: Style)OnStyleChanged(self: CheckBox_20$21,oldStyle: Style,newStyle: Style)OnStyleChanged(self: ComboBox_21$22,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Separator_22$23,oldStyle: Style,newStyle: Style)
"""
pass
def OnStylusButtonDown(self,*args):
"""
OnStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnStylusButtonDown(self: Window_16$17,e: StylusButtonEventArgs)OnStylusButtonDown(self: Label_17$18,e: StylusButtonEventArgs)OnStylusButtonDown(self: TextBox_18$19,e: StylusButtonEventArgs)OnStylusButtonDown(self: Button_19$20,e: StylusButtonEventArgs)OnStylusButtonDown(self: CheckBox_20$21,e: StylusButtonEventArgs)OnStylusButtonDown(self: ComboBox_21$22,e: StylusButtonEventArgs)OnStylusButtonDown(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnStylusButtonUp(self,*args):
"""
OnStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnStylusButtonUp(self: Window_16$17,e: StylusButtonEventArgs)OnStylusButtonUp(self: Label_17$18,e: StylusButtonEventArgs)OnStylusButtonUp(self: TextBox_18$19,e: StylusButtonEventArgs)OnStylusButtonUp(self: Button_19$20,e: StylusButtonEventArgs)OnStylusButtonUp(self: CheckBox_20$21,e: StylusButtonEventArgs)OnStylusButtonUp(self: ComboBox_21$22,e: StylusButtonEventArgs)OnStylusButtonUp(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnStylusDown(self,*args):
"""
OnStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
OnStylusDown(self: Window_16$17,e: StylusDownEventArgs)OnStylusDown(self: Label_17$18,e: StylusDownEventArgs)OnStylusDown(self: TextBox_18$19,e: StylusDownEventArgs)OnStylusDown(self: Button_19$20,e: StylusDownEventArgs)OnStylusDown(self: CheckBox_20$21,e: StylusDownEventArgs)OnStylusDown(self: ComboBox_21$22,e: StylusDownEventArgs)OnStylusDown(self: Separator_22$23,e: StylusDownEventArgs)
"""
pass
def OnStylusEnter(self,*args):
"""
OnStylusEnter(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusEnter�attached
event is raised by this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusEnter(self: Window_16$17,e: StylusEventArgs)OnStylusEnter(self: Label_17$18,e: StylusEventArgs)OnStylusEnter(self: TextBox_18$19,e: StylusEventArgs)OnStylusEnter(self: Button_19$20,e: StylusEventArgs)OnStylusEnter(self: CheckBox_20$21,e: StylusEventArgs)OnStylusEnter(self: ComboBox_21$22,e: StylusEventArgs)OnStylusEnter(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusInAirMove(self,*args):
"""
OnStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInAirMove�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusInAirMove(self: Window_16$17,e: StylusEventArgs)OnStylusInAirMove(self: Label_17$18,e: StylusEventArgs)OnStylusInAirMove(self: TextBox_18$19,e: StylusEventArgs)OnStylusInAirMove(self: Button_19$20,e: StylusEventArgs)OnStylusInAirMove(self: CheckBox_20$21,e: StylusEventArgs)OnStylusInAirMove(self: ComboBox_21$22,e: StylusEventArgs)OnStylusInAirMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusInRange(self,*args):
"""
OnStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInRange�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusInRange(self: Window_16$17,e: StylusEventArgs)OnStylusInRange(self: Label_17$18,e: StylusEventArgs)OnStylusInRange(self: TextBox_18$19,e: StylusEventArgs)OnStylusInRange(self: Button_19$20,e: StylusEventArgs)OnStylusInRange(self: CheckBox_20$21,e: StylusEventArgs)OnStylusInRange(self: ComboBox_21$22,e: StylusEventArgs)OnStylusInRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusLeave(self,*args):
"""
OnStylusLeave(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusLeave�attached
event is raised by this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusLeave(self: Window_16$17,e: StylusEventArgs)OnStylusLeave(self: Label_17$18,e: StylusEventArgs)OnStylusLeave(self: TextBox_18$19,e: StylusEventArgs)OnStylusLeave(self: Button_19$20,e: StylusEventArgs)OnStylusLeave(self: CheckBox_20$21,e: StylusEventArgs)OnStylusLeave(self: ComboBox_21$22,e: StylusEventArgs)OnStylusLeave(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusMove(self,*args):
"""
OnStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusMove�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusMove(self: Window_16$17,e: StylusEventArgs)OnStylusMove(self: Label_17$18,e: StylusEventArgs)OnStylusMove(self: TextBox_18$19,e: StylusEventArgs)OnStylusMove(self: Button_19$20,e: StylusEventArgs)OnStylusMove(self: CheckBox_20$21,e: StylusEventArgs)OnStylusMove(self: ComboBox_21$22,e: StylusEventArgs)OnStylusMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusOutOfRange(self,*args):
"""
OnStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusOutOfRange�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusOutOfRange(self: Window_16$17,e: StylusEventArgs)OnStylusOutOfRange(self: Label_17$18,e: StylusEventArgs)OnStylusOutOfRange(self: TextBox_18$19,e: StylusEventArgs)OnStylusOutOfRange(self: Button_19$20,e: StylusEventArgs)OnStylusOutOfRange(self: CheckBox_20$21,e: StylusEventArgs)OnStylusOutOfRange(self: ComboBox_21$22,e: StylusEventArgs)OnStylusOutOfRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusSystemGesture(self,*args):
"""
OnStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusSystemGesture�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event
data.
OnStylusSystemGesture(self: Window_16$17,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Label_17$18,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: TextBox_18$19,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Button_19$20,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: CheckBox_20$21,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: ComboBox_21$22,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Separator_22$23,e: StylusSystemGestureEventArgs)
"""
pass
def OnStylusUp(self,*args):
"""
OnStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusUp�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusUp(self: Window_16$17,e: StylusEventArgs)OnStylusUp(self: Label_17$18,e: StylusEventArgs)OnStylusUp(self: TextBox_18$19,e: StylusEventArgs)OnStylusUp(self: Button_19$20,e: StylusEventArgs)OnStylusUp(self: CheckBox_20$21,e: StylusEventArgs)OnStylusUp(self: ComboBox_21$22,e: StylusEventArgs)OnStylusUp(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnTemplateChanged(self,*args):
"""
OnTemplateChanged(self: Control,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
Called whenever the control's template changes.
oldTemplate: The old template.
newTemplate: The new template.
OnTemplateChanged(self: Window_16$17,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Label_17$18,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: TextBox_18$19,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Button_19$20,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: CheckBox_20$21,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: ComboBox_21$22,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Separator_22$23,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
"""
pass
def OnTextInput(self,*args):
"""
OnTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.TextInput�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
OnTextInput(self: Window_16$17,e: TextCompositionEventArgs)OnTextInput(self: Label_17$18,e: TextCompositionEventArgs)OnTextInput(self: TextBox_18$19,e: TextCompositionEventArgs)OnTextInput(self: Button_19$20,e: TextCompositionEventArgs)OnTextInput(self: CheckBox_20$21,e: TextCompositionEventArgs)OnTextInput(self: ComboBox_21$22,e: TextCompositionEventArgs)OnTextInput(self: Separator_22$23,e: TextCompositionEventArgs)
"""
pass
def OnToolTipClosing(self,*args):
"""
OnToolTipClosing(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ToolTipClosing
routed event reaches this class in its route. Implement this method to add
class handling for this event.
e: Provides data about the event.
OnToolTipClosing(self: Window_16$17,e: ToolTipEventArgs)OnToolTipClosing(self: Label_17$18,e: ToolTipEventArgs)OnToolTipClosing(self: TextBox_18$19,e: ToolTipEventArgs)OnToolTipClosing(self: Button_19$20,e: ToolTipEventArgs)OnToolTipClosing(self: CheckBox_20$21,e: ToolTipEventArgs)OnToolTipClosing(self: ComboBox_21$22,e: ToolTipEventArgs)OnToolTipClosing(self: Separator_22$23,e: ToolTipEventArgs)
"""
pass
def OnToolTipOpening(self,*args):
"""
OnToolTipOpening(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever the System.Windows.FrameworkElement.ToolTipOpening routed
event reaches this class in its route. Implement this method to add class
handling for this event.
e: Provides data about the event.
OnToolTipOpening(self: Window_16$17,e: ToolTipEventArgs)OnToolTipOpening(self: Label_17$18,e: ToolTipEventArgs)OnToolTipOpening(self: TextBox_18$19,e: ToolTipEventArgs)OnToolTipOpening(self: Button_19$20,e: ToolTipEventArgs)OnToolTipOpening(self: CheckBox_20$21,e: ToolTipEventArgs)OnToolTipOpening(self: ComboBox_21$22,e: ToolTipEventArgs)OnToolTipOpening(self: Separator_22$23,e: ToolTipEventArgs)
"""
pass
def OnTouchDown(self,*args):
"""
OnTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchDown routed event
that occurs when a touch presses inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchDown(self: Window_16$17,e: TouchEventArgs)OnTouchDown(self: Label_17$18,e: TouchEventArgs)OnTouchDown(self: TextBox_18$19,e: TouchEventArgs)OnTouchDown(self: Button_19$20,e: TouchEventArgs)OnTouchDown(self: CheckBox_20$21,e: TouchEventArgs)OnTouchDown(self: ComboBox_21$22,e: TouchEventArgs)OnTouchDown(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchEnter(self,*args):
"""
OnTouchEnter(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchEnter routed
event that occurs when a touch moves from outside to inside the bounds of this
element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchEnter(self: Window_16$17,e: TouchEventArgs)OnTouchEnter(self: Label_17$18,e: TouchEventArgs)OnTouchEnter(self: TextBox_18$19,e: TouchEventArgs)OnTouchEnter(self: Button_19$20,e: TouchEventArgs)OnTouchEnter(self: CheckBox_20$21,e: TouchEventArgs)OnTouchEnter(self: ComboBox_21$22,e: TouchEventArgs)OnTouchEnter(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchLeave(self,*args):
"""
OnTouchLeave(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchLeave routed
event that occurs when a touch moves from inside to outside the bounds of this
System.Windows.UIElement.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchLeave(self: Window_16$17,e: TouchEventArgs)OnTouchLeave(self: Label_17$18,e: TouchEventArgs)OnTouchLeave(self: TextBox_18$19,e: TouchEventArgs)OnTouchLeave(self: Button_19$20,e: TouchEventArgs)OnTouchLeave(self: CheckBox_20$21,e: TouchEventArgs)OnTouchLeave(self: ComboBox_21$22,e: TouchEventArgs)OnTouchLeave(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchMove(self,*args):
"""
OnTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchMove routed event
that occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchMove(self: Window_16$17,e: TouchEventArgs)OnTouchMove(self: Label_17$18,e: TouchEventArgs)OnTouchMove(self: TextBox_18$19,e: TouchEventArgs)OnTouchMove(self: Button_19$20,e: TouchEventArgs)OnTouchMove(self: CheckBox_20$21,e: TouchEventArgs)OnTouchMove(self: ComboBox_21$22,e: TouchEventArgs)OnTouchMove(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchUp(self,*args):
"""
OnTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchUp routed event
that occurs when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchUp(self: Window_16$17,e: TouchEventArgs)OnTouchUp(self: Label_17$18,e: TouchEventArgs)OnTouchUp(self: TextBox_18$19,e: TouchEventArgs)OnTouchUp(self: Button_19$20,e: TouchEventArgs)OnTouchUp(self: CheckBox_20$21,e: TouchEventArgs)OnTouchUp(self: ComboBox_21$22,e: TouchEventArgs)OnTouchUp(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnVisualChildrenChanged(self,*args):
"""
OnVisualChildrenChanged(self: Visual,visualAdded: DependencyObject,visualRemoved: DependencyObject)
Called when the System.Windows.Media.VisualCollection of the visual object is
modified.
visualAdded: The System.Windows.Media.Visual that was added to the collection
visualRemoved: The System.Windows.Media.Visual that was removed from the collection
OnVisualChildrenChanged(self: Window_16$17,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Label_17$18,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: TextBox_18$19,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Button_19$20,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: CheckBox_20$21,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: ComboBox_21$22,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Separator_22$23,visualAdded: DependencyObject,visualRemoved: DependencyObject)
"""
pass
def OnVisualParentChanged(self,*args):
"""
OnVisualParentChanged(self: FrameworkElement,oldParent: DependencyObject)
Invoked when the parent of this element in the visual tree is changed.
Overrides
System.Windows.UIElement.OnVisualParentChanged(System.Windows.DependencyObject).
oldParent: The old parent element. May be null to indicate that the element did not have a
visual parent previously.
OnVisualParentChanged(self: Window_16$17,oldParent: DependencyObject)OnVisualParentChanged(self: Label_17$18,oldParent: DependencyObject)OnVisualParentChanged(self: TextBox_18$19,oldParent: DependencyObject)OnVisualParentChanged(self: Button_19$20,oldParent: DependencyObject)OnVisualParentChanged(self: CheckBox_20$21,oldParent: DependencyObject)OnVisualParentChanged(self: ComboBox_21$22,oldParent: DependencyObject)OnVisualParentChanged(self: Separator_22$23,oldParent: DependencyObject)
"""
pass
def ParentLayoutInvalidated(self,*args):
"""
ParentLayoutInvalidated(self: FrameworkElement,child: UIElement)
Supports incremental layout implementations in specialized subclasses of
System.Windows.FrameworkElement.
System.Windows.FrameworkElement.ParentLayoutInvalidated(System.Windows.UIElement
) is invoked when a child element has invalidated a property that is marked in
metadata as affecting the parent's measure or arrange passes during layout.
child: The child element reporting the change.
ParentLayoutInvalidated(self: Window_16$17,child: UIElement)ParentLayoutInvalidated(self: Label_17$18,child: UIElement)ParentLayoutInvalidated(self: TextBox_18$19,child: UIElement)ParentLayoutInvalidated(self: Button_19$20,child: UIElement)ParentLayoutInvalidated(self: CheckBox_20$21,child: UIElement)ParentLayoutInvalidated(self: ComboBox_21$22,child: UIElement)ParentLayoutInvalidated(self: Separator_22$23,child: UIElement)
"""
pass
def RemoveLogicalChild(self,*args):
"""
RemoveLogicalChild(self: FrameworkElement,child: object)
Removes the provided object from this element's logical tree.
System.Windows.FrameworkElement updates the affected logical tree parent
pointers to keep in sync with this deletion.
child: The element to remove.
RemoveLogicalChild(self: Window_16$17,child: object)RemoveLogicalChild(self: Label_17$18,child: object)RemoveLogicalChild(self: TextBox_18$19,child: object)RemoveLogicalChild(self: Button_19$20,child: object)RemoveLogicalChild(self: CheckBox_20$21,child: object)RemoveLogicalChild(self: ComboBox_21$22,child: object)RemoveLogicalChild(self: Separator_22$23,child: object)
"""
pass
def RemoveVisualChild(self,*args):
"""
RemoveVisualChild(self: Visual,child: Visual)
Removes the parent-child relationship between two visuals.
child: The child visual object to remove from the parent visual.
RemoveVisualChild(self: Window_16$17,child: Window_16$17)RemoveVisualChild(self: Label_17$18,child: Label_17$18)RemoveVisualChild(self: TextBox_18$19,child: TextBox_18$19)RemoveVisualChild(self: Button_19$20,child: Button_19$20)RemoveVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)RemoveVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)RemoveVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def ShouldSerializeProperty(self,*args):
"""
ShouldSerializeProperty(self: DependencyObject,dp: DependencyProperty) -> bool
Returns a value that indicates whether serialization processes should serialize
the value for the provided dependency property.
dp: The identifier for the dependency property that should be serialized.
Returns: true if the dependency property that is supplied should be value-serialized;
otherwise,false.
ShouldSerializeProperty(self: Window_16$17,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Label_17$18,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: TextBox_18$19,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Button_19$20,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: CheckBox_20$21,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: ComboBox_21$22,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Separator_22$23,dp: DependencyProperty) -> bool
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
DefaultStyleKey=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the key to use to reference the style for this control,when theme styles are used or defined.
"""
HandlesScrolling=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether a control supports scrolling.
"""
HasEffectiveKeyboardFocus=property(lambda self: object(),lambda self,v: None,lambda self: None)
InheritanceBehavior=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the scope limits for property value inheritance,resource key lookup,and RelativeSource FindAncestor lookup.
"""
IsEnabledCore=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that becomes the return value of System.Windows.UIElement.IsEnabled in derived classes.
"""
LogicalChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an enumerator to the content control's logical child elements.
"""
StylusPlugIns=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all stylus plug-in (customization) objects associated with this element.
"""
VisualBitmapEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffect value for the System.Windows.Media.Visual.
"""
VisualBitmapEffectInput=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffectInput value for the System.Windows.Media.Visual.
"""
VisualBitmapScalingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.BitmapScalingMode for the System.Windows.Media.Visual.
"""
VisualCacheMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a cached representation of the System.Windows.Media.Visual.
"""
VisualChildrenCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of visual child elements within this element.
"""
VisualClearTypeHint=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.ClearTypeHint that determines how ClearType is rendered in the System.Windows.Media.Visual.
"""
VisualClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the clip region of the System.Windows.Media.Visual as a System.Windows.Media.Geometry value.
"""
VisualEdgeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edge mode of the System.Windows.Media.Visual as an System.Windows.Media.EdgeMode value.
"""
VisualEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the bitmap effect to apply to the System.Windows.Media.Visual.
"""
VisualOffset=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the offset value of the visual object.
"""
VisualOpacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the opacity of the System.Windows.Media.Visual.
"""
VisualOpacityMask=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Brush value that represents the opacity mask of the System.Windows.Media.Visual.
"""
VisualParent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the visual tree parent of the visual object.
"""
VisualScrollableAreaClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a clipped scrollable area for the System.Windows.Media.Visual.
"""
VisualTextHintingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextHintingMode of the System.Windows.Media.Visual.
"""
VisualTextRenderingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextRenderingMode of the System.Windows.Media.Visual.
"""
VisualTransform=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Transform value for the System.Windows.Media.Visual.
"""
VisualXSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the x-coordinate (vertical) guideline collection.
"""
VisualYSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the y-coordinate (horizontal) guideline collection.
"""
| 65.599446
| 745
| 0.788197
|
acffaa09bdfc6eea19bf9afaf25d609d4fe906c2
| 160
|
py
|
Python
|
WMIAdventure/backend/WMIAdventure_backend/IngameUsers/apps.py
|
Michael-Czekanski/WMIAdventure-1
|
ea812b13de0cd6c47c541cbede2d016a7837b4b8
|
[
"Apache-2.0"
] | null | null | null |
WMIAdventure/backend/WMIAdventure_backend/IngameUsers/apps.py
|
Michael-Czekanski/WMIAdventure-1
|
ea812b13de0cd6c47c541cbede2d016a7837b4b8
|
[
"Apache-2.0"
] | null | null | null |
WMIAdventure/backend/WMIAdventure_backend/IngameUsers/apps.py
|
Michael-Czekanski/WMIAdventure-1
|
ea812b13de0cd6c47c541cbede2d016a7837b4b8
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class IngameusersConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'IngameUsers'
| 22.857143
| 57
| 0.74375
|
acffaa86ffb3bbe31312210a23d8e421d8ddda5f
| 11,094
|
py
|
Python
|
d2go/data/dataset_mappers/d2go_dataset_mapper.py
|
wenliangzhao2018/d2go
|
a9dce74e5caf4c2260371a1abb603e3d5f14d763
|
[
"Apache-2.0"
] | null | null | null |
d2go/data/dataset_mappers/d2go_dataset_mapper.py
|
wenliangzhao2018/d2go
|
a9dce74e5caf4c2260371a1abb603e3d5f14d763
|
[
"Apache-2.0"
] | null | null | null |
d2go/data/dataset_mappers/d2go_dataset_mapper.py
|
wenliangzhao2018/d2go
|
a9dce74e5caf4c2260371a1abb603e3d5f14d763
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import logging
from io import BytesIO
import numpy as np
import torch
from d2go.utils.helper import retryable
from detectron2.data import detection_utils as utils, transforms as T
from detectron2.data.transforms.augmentation import (
AugInput,
AugmentationList,
)
from PIL import Image
from .build import D2GO_DATA_MAPPER_REGISTRY
logger = logging.getLogger(__name__)
PREFETCHED_FILE_NAME = "prefetch_image"
PREFETCHED_SEM_SEG_FILE_NAME = "prefetch_sem_seg"
def read_image_with_prefetch(file_name, format=None, prefetched=None):
if prefetched is None:
return utils.read_image(file_name, format)
image = Image.open(BytesIO(prefetched.numpy().view()))
# work around this bug: https://github.com/python-pillow/Pillow/issues/3973
image = utils._apply_exif_orientation(image)
return utils.convert_PIL_to_numpy(image, format)
@D2GO_DATA_MAPPER_REGISTRY.register()
class D2GoDatasetMapper(object):
def __init__(self, cfg, is_train=True, image_loader=None, tfm_gens=None):
self.tfm_gens = (
tfm_gens
if tfm_gens is not None
else utils.build_transform_gen(cfg, is_train)
)
if cfg.INPUT.CROP.ENABLED and is_train:
self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)
# D2GO NOTE: when INPUT.CROP.ENABLED, don't allow using RandomCropOp
assert all(not isinstance(gen, T.RandomCrop) for gen in self.tfm_gens)
else:
self.crop_gen = None
# fmt: off
self.img_format = cfg.INPUT.FORMAT # noqa
self.mask_on = cfg.MODEL.MASK_ON # noqa
self.mask_format = cfg.INPUT.MASK_FORMAT # noqa
self.keypoint_on = cfg.MODEL.KEYPOINT_ON # noqa
# fmt: on
if self.keypoint_on and is_train:
# Flip only makes sense in training
self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(
cfg.DATASETS.TRAIN
)
else:
self.keypoint_hflip_indices = None
self.load_proposals = cfg.MODEL.LOAD_PROPOSALS
if self.load_proposals:
self.proposal_min_box_size = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE
self.proposal_topk = (
cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN
if is_train
else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST
)
self.is_train = is_train
# Setup image loader:
self.image_loader = image_loader
self.backfill_size = cfg.D2GO_DATA.MAPPER.BACKFILL_SIZE
self.retry = cfg.D2GO_DATA.MAPPER.RETRY
self.catch_exception = cfg.D2GO_DATA.MAPPER.CATCH_EXCEPTION
if self.backfill_size:
if cfg.DATALOADER.ASPECT_RATIO_GROUPING:
logger.warning(
"ASPECT_RATIO_GROUPING may not work if image's width & height"
" are not given in json dataset when calling extended_coco_load,"
" if you encounter issue, consider disable ASPECT_RATIO_GROUPING."
)
self._error_count = 0
self._total_counts = 0
self._error_types = {}
def _original_call(self, dataset_dict):
"""
Modified from detectron2's original __call__ in DatasetMapper
"""
dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below
image = self._read_image(dataset_dict, format=self.img_format)
if not self.backfill_size:
utils.check_image_size(dataset_dict, image)
image, dataset_dict = self._custom_transform(image, dataset_dict)
inputs = AugInput(image=image)
if "annotations" not in dataset_dict:
transforms = AugmentationList(
([self.crop_gen] if self.crop_gen else []) + self.tfm_gens
)(inputs)
image = inputs.image
else:
# pass additional arguments, will only be used when the Augmentation
# takes `annotations` as input
inputs.annotations = dataset_dict["annotations"]
# Crop around an instance if there are instances in the image.
if self.crop_gen:
crop_tfm = utils.gen_crop_transform_with_instance(
self.crop_gen.get_crop_size(image.shape[:2]),
image.shape[:2],
np.random.choice(dataset_dict["annotations"]),
)
inputs.image = crop_tfm.apply_image(image)
transforms = AugmentationList(self.tfm_gens)(inputs)
image = inputs.image
if self.crop_gen:
transforms = crop_tfm + transforms
image_shape = image.shape[:2] # h, w
if image.ndim == 2:
image = np.expand_dims(image, 2)
dataset_dict["image"] = torch.as_tensor(
image.transpose(2, 0, 1).astype("float32")
)
# Can use uint8 if it turns out to be slow some day
if self.load_proposals:
utils.transform_proposals(
dataset_dict,
image_shape,
transforms,
proposal_topk=self.proposal_topk,
min_box_size=self.proposal_min_box_size,
)
if not self.is_train:
dataset_dict.pop("annotations", None)
dataset_dict.pop("sem_seg_file_name", None)
return dataset_dict
if "annotations" in dataset_dict:
for anno in dataset_dict["annotations"]:
if not self.mask_on:
anno.pop("segmentation", None)
if not self.keypoint_on:
anno.pop("keypoints", None)
annos = [
utils.transform_instance_annotations(
obj,
transforms,
image_shape,
keypoint_hflip_indices=self.keypoint_hflip_indices,
)
for obj in dataset_dict.pop("annotations")
if obj.get("iscrowd", 0) == 0
]
instances = utils.annotations_to_instances(
annos, image_shape, mask_format=self.mask_format
)
# Create a tight bounding box from masks, useful when image is cropped
if self.crop_gen and instances.has("gt_masks"):
instances.gt_boxes = instances.gt_masks.get_bounding_boxes()
dataset_dict["instances"] = utils.filter_empty_instances(instances)
if "sem_seg_file_name" in dataset_dict:
sem_seg_gt = read_image_with_prefetch(
dataset_dict.pop("sem_seg_file_name"),
"L",
prefetched=dataset_dict.get(PREFETCHED_SEM_SEG_FILE_NAME, None),
).squeeze(2)
sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)
sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long"))
dataset_dict["sem_seg"] = sem_seg_gt
# extend standard D2 semantic segmentation to support multiple segmentation
# files, each file can represent a class
if "multi_sem_seg_file_names" in dataset_dict:
raise NotImplementedError()
if "_post_process_" in dataset_dict:
proc_func = dataset_dict.pop("_post_process_")
dataset_dict = proc_func(dataset_dict)
return dataset_dict
def __call__(self, dataset_dict):
self._total_counts += 1
@retryable(num_tries=self.retry, sleep_time=0.1)
def _f():
return self._original_call(dataset_dict)
if not self.catch_exception:
return _f()
try:
return _f()
except Exception as e:
self._error_count += 1
# if self._error_count % 10 == 1:
# # print the stacktrace for easier debugging
# traceback.print_exc()
error_type = type(e).__name__
self._error_types[error_type] = self._error_types.get(error_type, 0) + 1
if self._error_count % 100 == 0:
logger.warning(
"{}Error when applying transform for dataset_dict: {};"
" error rate {}/{} ({:.2f}%), msg: {}".format(
self._get_logging_prefix(),
dataset_dict,
self._error_count,
self._total_counts,
100.0 * self._error_count / self._total_counts,
repr(e),
)
)
self._log_error_type_stats()
# NOTE: the contract with MapDataset allows return `None` such that
# it'll randomly use other element in the dataset. We use this
# feature to handle error.
return None
def _get_logging_prefix(self):
worker_info = torch.utils.data.get_worker_info()
if not worker_info:
return ""
prefix = "[worker: {}/{}] ".format(worker_info.id, worker_info.num_workers)
return prefix
def _log_error_type_stats(self):
error_type_count_msgs = [
"{}: {}/{} ({}%)".format(
k, v, self._total_counts, 100.0 * v / self._total_counts
)
for k, v in self._error_types.items()
]
logger.warning(
"{}Error statistics:\n{}".format(
self._get_logging_prefix(), "\n".join(error_type_count_msgs)
)
)
def _read_image(self, dataset_dict, format=None):
if not (self.image_loader and self.image_loader.support(dataset_dict)):
# fallback to use D2's read_image
image = read_image_with_prefetch(
dataset_dict["file_name"],
format=format,
prefetched=dataset_dict.get(PREFETCHED_FILE_NAME),
)
if self.backfill_size:
h, w, _ = image.shape
dataset_dict["width"] = w
dataset_dict["height"] = h
return image
image = self.image_loader(dataset_dict)
if self.backfill_size:
dataset_dict["width"] = image.width
dataset_dict["height"] = image.height
return utils.convert_PIL_to_numpy(image, format)
def _custom_transform(self, image, dataset_dict):
"""
Override this method to inject custom transform.
"""
return image, dataset_dict
def __repr__(self):
return (
self.__class__.__name__
+ ":\n"
+ "\n".join(
[
" is_train: {}".format(self.is_train),
" image_loader: {}".format(self.image_loader),
" tfm_gens: \n{}".format(
"\n".join([" - {}".format(x) for x in self.tfm_gens])
),
]
)
)
| 36.98
| 87
| 0.582387
|
acffab1353105dd932971cc3f0906b37a4a87d31
| 4,289
|
py
|
Python
|
blockchain_client/blockchain_client.py
|
glorin1/blockchain-python-tutorial
|
61b31b353e3b8aa4673764538f5da00ab666cae2
|
[
"MIT"
] | null | null | null |
blockchain_client/blockchain_client.py
|
glorin1/blockchain-python-tutorial
|
61b31b353e3b8aa4673764538f5da00ab666cae2
|
[
"MIT"
] | null | null | null |
blockchain_client/blockchain_client.py
|
glorin1/blockchain-python-tutorial
|
61b31b353e3b8aa4673764538f5da00ab666cae2
|
[
"MIT"
] | null | null | null |
'''
title : blockchain_client.py
description : A blockchain client implemenation, with the following features
- Wallets generation using Public/Private key encryption (based on RSA algorithm)
- Generation of transactions with RSA encryption
author : Adil Moujahid
date_created : 20180212
date_modified : 20180309
version : 0.3
usage : python blockchain_client.py
python blockchain_client.py -p 8080
python blockchain_client.py --port 8080
python_version : 3.6.1
Comments : Wallet generation and transaction signature is based on [1]
References : [1] https://github.com/julienr/ipynb_playground/blob/master/bitcoin/dumbcoin/dumbcoin.ipynb
'''
from collections import OrderedDict
import binascii
import Crypto
import Crypto.Random
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import requests
from flask import Flask, jsonify, request, render_template
class Transaction:
def __init__(self, sender_address, sender_private_key, recipient_address, value):
self.sender_address = sender_address
self.sender_private_key = sender_private_key
self.recipient_address = recipient_address
self.value = value
def __getattr__(self, attr):
return self.data[attr]
def to_dict(self):
return OrderedDict({'sender_address': self.sender_address,
'recipient_address': self.recipient_address,
'value': self.value})
def sign_transaction(self):
"""
Sign transaction with private key
"""
private_key = RSA.importKey(binascii.unhexlify(self.sender_private_key))
signer = PKCS1_v1_5.new(private_key)
h = SHA.new(str(self.to_dict()).encode('utf8'))
return binascii.hexlify(signer.sign(h)).decode('ascii')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('./index.html')
@app.route('/make/transaction')
def make_transaction():
return render_template('./make_transaction.html')
@app.route('/view/transactions')
def view_transaction():
return render_template('./view_transactions.html')
@app.route('/wallet/new', methods=['GET'])
def new_wallet():
random_gen = Crypto.Random.new().read
private_key = RSA.generate(1024, random_gen)
public_key = private_key.publickey()
response = {
'private_key': binascii.hexlify(private_key.exportKey(format='DER')).decode('ascii'),
'public_key': binascii.hexlify(public_key.exportKey(format='DER')).decode('ascii')
}
return jsonify(response), 200
@app.route('/wallet/validate', methods=['GET'])
def validate_wallet():
public_key = request.args['public_key']
private_key = request.args['private_key']
try:
private_key = RSA.importKey(binascii.unhexlify(private_key))
public_key = RSA.importKey(binascii.unhexlify(public_key))
signer = PKCS1_v1_5.new(private_key)
verifier = PKCS1_v1_5.new(public_key)
h = SHA.new("hello".encode('utf8'))
sign = signer.sign(h)
result = verifier.verify(h, sign)
except Exception:
result = False
response = {
'success': result
}
return jsonify(response), 200
@app.route('/generate/transaction', methods=['POST'])
def generate_transaction():
sender_address = request.form['sender_address']
sender_private_key = request.form['sender_private_key']
recipient_address = request.form['recipient_address']
value = request.form['amount']
transaction = Transaction(sender_address, sender_private_key, recipient_address, value)
response = {'transaction': transaction.to_dict(), 'signature': transaction.sign_transaction()}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=8080, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='127.0.0.1', port=port)
| 32.740458
| 110
| 0.660993
|
acffac3efdbcd2ec27b0093267c1124bf983e270
| 999
|
py
|
Python
|
onmt/tests/test_attention.py
|
philhchen/OpenNMT-evidential-softmax
|
87709ce1cf7bda783aed4a64c096fa23282e7aa9
|
[
"MIT"
] | null | null | null |
onmt/tests/test_attention.py
|
philhchen/OpenNMT-evidential-softmax
|
87709ce1cf7bda783aed4a64c096fa23282e7aa9
|
[
"MIT"
] | null | null | null |
onmt/tests/test_attention.py
|
philhchen/OpenNMT-evidential-softmax
|
87709ce1cf7bda783aed4a64c096fa23282e7aa9
|
[
"MIT"
] | null | null | null |
"""
Here come the tests for attention types and their compatibility
"""
import unittest
import torch
from torch.autograd import Variable
import onmt
class TestAttention(unittest.TestCase):
def test_masked_global_attention(self):
source_lengths = torch.IntTensor([7, 3, 5, 2])
# illegal_weights_mask = torch.ByteTensor([
# [0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 1, 1, 1, 1],
# [0, 0, 0, 0, 0, 1, 1],
# [0, 0, 1, 1, 1, 1, 1]])
batch_size = source_lengths.size(0)
dim = 20
memory_bank = Variable(torch.randn(batch_size, source_lengths.max(), dim))
hidden = Variable(torch.randn(batch_size, dim))
attn = onmt.modules.GlobalAttention(dim)
_, alignments = attn(hidden, memory_bank, memory_lengths=source_lengths)
# TODO: fix for pytorch 0.3
# illegal_weights = alignments.masked_select(illegal_weights_mask)
# self.assertEqual(0.0, illegal_weights.data.sum())
| 29.382353
| 82
| 0.623624
|
acfface5c3801bc40a95496828c58e88afc826b8
| 13,421
|
py
|
Python
|
src/jenga/compile.py
|
aidnem/jenga-lang
|
c46ca79a85b9a5e19c621fa7cc68af5db8c10ddc
|
[
"MIT"
] | null | null | null |
src/jenga/compile.py
|
aidnem/jenga-lang
|
c46ca79a85b9a5e19c621fa7cc68af5db8c10ddc
|
[
"MIT"
] | null | null | null |
src/jenga/compile.py
|
aidnem/jenga-lang
|
c46ca79a85b9a5e19c621fa7cc68af5db8c10ddc
|
[
"MIT"
] | null | null | null |
"""Compiler for Jenga"""
import logging
import jenga
import jenga.util
from jenga.enums import OP
from jenga.types import Word
def compile(words: list[Word], fn: str):
"""Compile a Jenga program from tokens into an assembly file"""
assert len(OP) == 29, "Exhaustive handling of ops in compilation"
stack_items = 1000000000000000000000000
str_literals: list[str] = []
with open(fn, 'w+') as f:
f.write(
"section .text\n"
"extern _printf\n"
"extern _exit\n"
"global _main\n"
"_main:\n"
)
for ip, word in enumerate(words):
f.write(f"word_{ip}:\n")
match word.type:
case OP.DUP:
if stack_items >= 1:
stack_items += 1
else:
jenga.util.raise_error(word.loc, "Dup with empty stack")
f.write(
" ;; -- dup --\n"
" pop rax\n"
" push rax\n"
" push rax\n"
)
case OP.OVER:
if stack_items >= 2:
stack_items += 2
else:
jenga.util.raise_error(word.loc, "over with less than 2 items on stack")
f.write(
" ;; -- over --\n"
" pop rax\n"
" pop rbx\n"
" push rbx\n"
" push rax\n"
" push rbx\n"
)
case OP.SWAP:
if stack_items < 2:
jenga.util.raise_error(word.loc, "Swap with less than 2 items on stack")
f.write(
" ;; -- swap --\n"
" pop rax\n"
" pop rbx\n"
" push rax\n"
" push rbx\n"
)
case OP.ROT:
f.write(
" ;; -- rot --\n"
" pop rax\n"
" pop rbx\n"
" pop rcx\n"
" push rbx\n"
" push rax\n"
" push rcx\n"
)
case OP.DROP:
f.write(
" ;; -- drop --\n"
" pop rax\n"
)
case OP.PUSH:
stack_items += 1
assert word.value is not None, "PUSH op with value of None"
if isinstance(word.value, int):
f.write(
f" ;; -- push {word.value} --\n"
f" push {word.value}\n"
)
elif isinstance(word.value, str):
try:
idx = str_literals.index(word.value)
except ValueError:
idx = len(str_literals)
str_literals.append(word.value)
f.write(
f" ;; -- push \"{word.value}\"\n"
f" mov rax, str_{idx}\n"
f" push rax\n"
)
else:
assert False, f"Pushing type {type(word.value).__name__} not implemented"
case OP.ADD:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Add with less than 2 items on stack")
f.write(
" ;; -- add --\n"
" pop rax\n"
" pop rbx\n"
" add rax, rbx\n"
" push rax\n"
)
case OP.SUB:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Sub with less than 2 items on stack")
f.write(
" ;; -- sub --\n"
" pop rax\n"
" pop rbx\n"
" sub rbx, rax\n"
" push rbx\n"
)
case OP.MULT:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Mult with less than 2 items on stack")
f.write(
" ;; -- mult --\n"
" pop rax\n"
" pop rbx\n"
" imul rbx\n"
" push rax\n"
)
case OP.DIVMOD:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Div with less than 2 items on stack")
f.write(
" ;; -- divmod --\n"
" xor edx, edx\n"
" pop rbx\n"
" pop rax\n"
" cqo\n"
" idiv rbx\n"
" push rax\n"
" push rdx\n"
)
case OP.DUMP:
if stack_items >= 1:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Dump from empty stack")
f.write(
" ;; -- dump --\n"
" mov rdi, dump_msg\n"
" pop rsi\n"
" call _printf\n"
)
case OP.EQUAL:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Equal with less than 2 items on stack")
f.write(
" ;; -- equal --\n"
" mov rcx, 0\n"
" mov rdx, 1\n"
" pop rax\n"
" pop rbx\n"
" cmp rax, rbx\n"
" cmove rcx, rdx\n"
" push rcx\n"
)
case OP.LESS:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Less than with less than 2 items on stack")
f.write(
" ;; -- less than --\n"
" mov rcx, 0\n"
" mov rdx, 1\n"
" pop rax\n"
" pop rbx\n"
" cmp rbx, rax\n"
" cmovl rcx, rdx\n"
" push rcx\n"
)
case OP.GREATER:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Greater than with less than 2 items on stack")
f.write(
" ;; -- greater than --\n"
" mov rcx, 0\n"
" mov rdx, 1\n"
" pop rax\n"
" pop rbx\n"
" cmp rbx, rax\n"
" cmovg rcx, rdx\n"
" push rcx\n"
)
case OP.LESS_OR_EQUAL:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Less or equal with less than 2 items on stack")
f.write(
" ;; -- less or equal --\n"
" mov rcx, 0\n"
" mov rdx, 1\n"
" pop rax\n"
" pop rbx\n"
" cmp rbx, rax\n"
" cmovle rcx, rdx\n"
" push rcx\n"
)
case OP.GREATER_OR_EQUAL:
if stack_items >= 2:
stack_items -= 1
else:
jenga.util.raise_error(word.loc, "Greater or equal with less than 2 items on stack")
f.write(
" ;; -- less or equal --\n"
" mov rcx, 0\n"
" mov rdx, 1\n"
" pop rax\n"
" pop rbx\n"
" cmp rbx, rax\n"
" cmovge rcx, rdx\n"
" push rcx\n"
)
case OP.NOT:
if stack_items < 1:
jenga.util.raise_error(word.loc, "'not' with less than 1 item on stack")
f.write(
" ;; -- not --\n"
" mov rbx, 0\n"
" mov rcx, 1\n"
" pop rax\n"
" cmp rax, 0\n"
" cmove rbx, rcx\n"
" push rbx\n"
)
case OP.IF:
assert word.ref is not None, f"If not crossreferenced : {word}"
f.write(
f" ;; -- if --\n"
f" pop rax\n"
f" cmp rax, 0\n"
f" je word_{word.ref + 1}\n"
)
case OP.ELSE:
assert word.ref is not None, f"Else not crossreferenced : {word}"
f.write(
f" ;; -- else --\n"
f" jmp word_{word.ref}\n"
)
case OP.WHILE: # Just here so that it doesn't error on the unreachable assert
pass
case OP.DO:
assert word.ref is not None, f"'do' not crossreferenced : {word}"
f.write(
f" ;; -- do --\n"
f" pop rax\n"
f" cmp rax, 0\n"
f" je word_{word.ref + 1}\n"
)
case OP.END:
f.write(" ;; -- end --\n")
if word.ref is not None:
f.write(f" jmp word_{word.ref}\n")
case OP.MEM:
stack_items += 1
f.write(
" ;; -- mem --\n"
" mov rax, mem\n"
" push rax\n"
)
case OP.STORE:
f.write(
" ;; -- store --\n"
" pop rbx\n"
" pop rax\n"
" mov [rax], bl\n"
)
case OP.LOAD:
f.write(
" ;; -- load --\n"
" pop rax\n"
" xor rbx, rbx\n"
" mov bl, [rax]\n"
" push rbx\n"
)
case OP.PRINT:
f.write(
" ;; -- print --\n"
" mov rsi, 0\n"
" mov rax, 0\n"
" pop rdi\n"
" call _printf\n"
)
case OP.MACRO:
assert False, "'macro' word allowed to reach compile()"
case OP.NAME:
assert False, "un-expanded name allowed to reach compile()"
case OP.INCLUDE:
assert False, "un-expanded include allowed to reach compile()"
case _:
raise NotImplementedError(f"Op {word.type} not implemented")
f.write("\n")
f.write(
" ;; -- EOF --\n"
f"word_{len(words)}:\n"
" xor rdi, rdi\n"
" call _exit\n"
)
f.write(
"section .data\n"
"dump_msg: db `%d\\n`, 0\n"
)
for idx, s in enumerate(str_literals):
f.write(
f"str_{idx}: db `{s}`, 0\n"
)
f.write(
"section .bss\n"
"mem: resb {}\n".format(jenga.MEM_SIZE)
)
print()
logging.info("Compilation complete")
| 34.237245
| 108
| 0.301542
|
acffae17a331586a853b109e5a0082d6277e331a
| 34
|
py
|
Python
|
catalogo/models/__init__.py
|
wisi3/eventos-serve
|
e0abbbd143107a15fbad06294de50853efd0adaa
|
[
"MIT"
] | null | null | null |
catalogo/models/__init__.py
|
wisi3/eventos-serve
|
e0abbbd143107a15fbad06294de50853efd0adaa
|
[
"MIT"
] | 3
|
2020-02-11T23:14:05.000Z
|
2021-06-10T18:31:18.000Z
|
catalogo/models/__init__.py
|
jhonnyperalta/trivago-serve
|
50d8394200ae1e9a4b19b55fb6e23395832989aa
|
[
"MIT"
] | 3
|
2017-11-18T14:01:45.000Z
|
2020-10-01T14:32:54.000Z
|
#from .categoria import Categoria
| 17
| 33
| 0.823529
|
acffaedc6bd243f046fbd365dd2a46decf93e7fb
| 4,643
|
py
|
Python
|
cart_venv/Lib/site-packages/tensorflow_core/core/framework/tensor_slice_pb2.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 2
|
2019-08-04T20:28:14.000Z
|
2019-10-27T23:26:42.000Z
|
cart_venv/Lib/site-packages/tensorflow_core/core/framework/tensor_slice_pb2.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | null | null | null |
cart_venv/Lib/site-packages/tensorflow_core/core/framework/tensor_slice_pb2.py
|
juice1000/Synchronous-vs-Asynchronous-Learning-Tensorflow-
|
654be60f7986ac9bb7ce1d080ddee377c3389f93
|
[
"MIT"
] | 1
|
2020-11-04T03:16:29.000Z
|
2020-11-04T03:16:29.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/framework/tensor_slice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/framework/tensor_slice.proto',
package='tensorflow',
syntax='proto3',
serialized_options=_b('\n\030org.tensorflow.frameworkB\021TensorSliceProtosP\001Z=github.com/tensorflow/tensorflow/tensorflow/go/core/framework\370\001\001'),
serialized_pb=_b('\n,tensorflow/core/framework/tensor_slice.proto\x12\ntensorflow\"\x80\x01\n\x10TensorSliceProto\x12\x33\n\x06\x65xtent\x18\x01 \x03(\x0b\x32#.tensorflow.TensorSliceProto.Extent\x1a\x37\n\x06\x45xtent\x12\r\n\x05start\x18\x01 \x01(\x03\x12\x10\n\x06length\x18\x02 \x01(\x03H\x00\x42\x0c\n\nhas_lengthBq\n\x18org.tensorflow.frameworkB\x11TensorSliceProtosP\x01Z=github.com/tensorflow/tensorflow/tensorflow/go/core/framework\xf8\x01\x01\x62\x06proto3')
)
_TENSORSLICEPROTO_EXTENT = _descriptor.Descriptor(
name='Extent',
full_name='tensorflow.TensorSliceProto.Extent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start', full_name='tensorflow.TensorSliceProto.Extent.start', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='length', full_name='tensorflow.TensorSliceProto.Extent.length', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='has_length', full_name='tensorflow.TensorSliceProto.Extent.has_length',
index=0, containing_type=None, fields=[]),
],
serialized_start=134,
serialized_end=189,
)
_TENSORSLICEPROTO = _descriptor.Descriptor(
name='TensorSliceProto',
full_name='tensorflow.TensorSliceProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='extent', full_name='tensorflow.TensorSliceProto.extent', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TENSORSLICEPROTO_EXTENT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=61,
serialized_end=189,
)
_TENSORSLICEPROTO_EXTENT.containing_type = _TENSORSLICEPROTO
_TENSORSLICEPROTO_EXTENT.oneofs_by_name['has_length'].fields.append(
_TENSORSLICEPROTO_EXTENT.fields_by_name['length'])
_TENSORSLICEPROTO_EXTENT.fields_by_name['length'].containing_oneof = _TENSORSLICEPROTO_EXTENT.oneofs_by_name['has_length']
_TENSORSLICEPROTO.fields_by_name['extent'].message_type = _TENSORSLICEPROTO_EXTENT
DESCRIPTOR.message_types_by_name['TensorSliceProto'] = _TENSORSLICEPROTO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TensorSliceProto = _reflection.GeneratedProtocolMessageType('TensorSliceProto', (_message.Message,), {
'Extent' : _reflection.GeneratedProtocolMessageType('Extent', (_message.Message,), {
'DESCRIPTOR' : _TENSORSLICEPROTO_EXTENT,
'__module__' : 'tensorflow.core.framework.tensor_slice_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.TensorSliceProto.Extent)
})
,
'DESCRIPTOR' : _TENSORSLICEPROTO,
'__module__' : 'tensorflow.core.framework.tensor_slice_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.TensorSliceProto)
})
_sym_db.RegisterMessage(TensorSliceProto)
_sym_db.RegisterMessage(TensorSliceProto.Extent)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 37.144
| 469
| 0.77213
|
acffaf1877690e11e80dcea35d09ec5c74b24bf7
| 1,075
|
py
|
Python
|
google/cloud/security/common/data_access/sql_queries/load_data.py
|
joshiumang107/forseti-security
|
c088e677426ec60daae0e44a5d1acbb337386ddb
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/security/common/data_access/sql_queries/load_data.py
|
joshiumang107/forseti-security
|
c088e677426ec60daae0e44a5d1acbb337386ddb
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/security/common/data_access/sql_queries/load_data.py
|
joshiumang107/forseti-security
|
c088e677426ec60daae0e44a5d1acbb337386ddb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SQL queries to load data into snapshot tables."""
LOAD_DATA = """
LOAD DATA LOCAL INFILE '{0}'
INTO TABLE {1} FIELDS TERMINATED BY ','
({2});
"""
INSERT_VIOLATION = """
INSERT INTO {0}
(resource_type, resource_id, rule_name, rule_index,
violation_type, violation_data)
VALUES (%s, %s, %s, %s, %s, %s)
"""
INSERT_GROUPS_VIOLATION = """
INSERT INTO {0}
(member_email, group_email, rule_name)
VALUES (%s, %s, %s)
"""
| 30.714286
| 74
| 0.699535
|
acffb0ac98a952ee4506627c5cee0523ff3de1e0
| 6,172
|
py
|
Python
|
homeassistant/components/growatt_server/sensor_types/tlx.py
|
tizzen33/core
|
2a1884a1f7a07848b8b63afd29f59c81f1ffaf62
|
[
"Apache-2.0"
] | 7
|
2019-08-15T13:36:58.000Z
|
2020-03-18T10:46:29.000Z
|
homeassistant/components/growatt_server/sensor_types/tlx.py
|
tizzen33/core
|
2a1884a1f7a07848b8b63afd29f59c81f1ffaf62
|
[
"Apache-2.0"
] | 87
|
2020-07-15T13:43:35.000Z
|
2022-03-23T07:43:10.000Z
|
homeassistant/components/growatt_server/sensor_types/tlx.py
|
marecabo/home-assistant
|
e33774a61e7fcc88aff752dfa4618dd26a746872
|
[
"Apache-2.0"
] | 7
|
2018-10-04T10:12:45.000Z
|
2021-12-29T20:55:40.000Z
|
"""Growatt Sensor definitions for the TLX type."""
from __future__ import annotations
from homeassistant.components.sensor import (
STATE_CLASS_TOTAL,
STATE_CLASS_TOTAL_INCREASING,
)
from homeassistant.const import (
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_VOLTAGE,
ELECTRIC_CURRENT_AMPERE,
ELECTRIC_POTENTIAL_VOLT,
ENERGY_KILO_WATT_HOUR,
FREQUENCY_HERTZ,
POWER_WATT,
TEMP_CELSIUS,
)
from .sensor_entity_description import GrowattSensorEntityDescription
TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
GrowattSensorEntityDescription(
key="tlx_energy_today",
name="Energy today",
api_key="eacToday",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_energy_total",
name="Lifetime energy output",
api_key="eacTotal",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_energy_total_input_1",
name="Lifetime total energy input 1",
api_key="epv1Total",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_energy_today_input_1",
name="Energy Today Input 1",
api_key="epv1Today",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_voltage_input_1",
name="Input 1 voltage",
api_key="vpv1",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_amperage_input_1",
name="Input 1 Amperage",
api_key="ipv1",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=DEVICE_CLASS_CURRENT,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_wattage_input_1",
name="Input 1 Wattage",
api_key="ppv1",
native_unit_of_measurement=POWER_WATT,
device_class=DEVICE_CLASS_POWER,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_energy_total_input_2",
name="Lifetime total energy input 2",
api_key="epv2Total",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_energy_today_input_2",
name="Energy Today Input 2",
api_key="epv2Today",
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
device_class=DEVICE_CLASS_ENERGY,
state_class=STATE_CLASS_TOTAL_INCREASING,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_voltage_input_2",
name="Input 2 voltage",
api_key="vpv2",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_amperage_input_2",
name="Input 2 Amperage",
api_key="ipv2",
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
device_class=DEVICE_CLASS_CURRENT,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_wattage_input_2",
name="Input 2 Wattage",
api_key="ppv2",
native_unit_of_measurement=POWER_WATT,
device_class=DEVICE_CLASS_POWER,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_internal_wattage",
name="Internal wattage",
api_key="ppv",
native_unit_of_measurement=POWER_WATT,
device_class=DEVICE_CLASS_POWER,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_reactive_voltage",
name="Reactive voltage",
api_key="vacrs",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
device_class=DEVICE_CLASS_VOLTAGE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_frequency",
name="AC frequency",
api_key="fac",
native_unit_of_measurement=FREQUENCY_HERTZ,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_current_wattage",
name="Output power",
api_key="pac",
native_unit_of_measurement=POWER_WATT,
device_class=DEVICE_CLASS_POWER,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_temperature_1",
name="Temperature 1",
api_key="temp1",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_temperature_2",
name="Temperature 2",
api_key="temp2",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_temperature_3",
name="Temperature 3",
api_key="temp3",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_temperature_4",
name="Temperature 4",
api_key="temp4",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
precision=1,
),
GrowattSensorEntityDescription(
key="tlx_temperature_5",
name="Temperature 5",
api_key="temp5",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
precision=1,
),
)
| 31.171717
| 69
| 0.675308
|
acffb17617fda26812f3755ca68246c66958ebec
| 1,070
|
py
|
Python
|
feedback/migrations/0001_initial.py
|
Aleccc/gtcrew
|
7e6e7024afdbf48ee796cb1f9a86b913e6843dda
|
[
"MIT"
] | null | null | null |
feedback/migrations/0001_initial.py
|
Aleccc/gtcrew
|
7e6e7024afdbf48ee796cb1f9a86b913e6843dda
|
[
"MIT"
] | 21
|
2019-02-14T02:47:34.000Z
|
2022-01-23T02:22:54.000Z
|
feedback/migrations/0001_initial.py
|
Aleccc/gtcrew
|
7e6e7024afdbf48ee796cb1f9a86b913e6843dda
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.4 on 2020-04-25 21:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Feedback',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('open', 'Open'), ('progress', 'In Progress'), ('closed', 'Closed')], default='open', max_length=8)),
('feedback', models.TextField()),
('date_added', models.DateTimeField(auto_now_add=True)),
('date_updated', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='feedback_submitted', to=settings.AUTH_USER_MODEL)),
],
),
]
| 36.896552
| 159
| 0.631776
|
acffb1e8544f8cff7d976cf882437603ac45303d
| 381
|
py
|
Python
|
src/wee/wsgi.py
|
dipkakwani/wee_app
|
a0f15053ec64a49611d759eaae6d780d608bea46
|
[
"MIT"
] | 2
|
2016-11-18T18:43:10.000Z
|
2018-10-17T18:31:52.000Z
|
src/wee/wsgi.py
|
dipkakwani/wee_app
|
a0f15053ec64a49611d759eaae6d780d608bea46
|
[
"MIT"
] | null | null | null |
src/wee/wsgi.py
|
dipkakwani/wee_app
|
a0f15053ec64a49611d759eaae6d780d608bea46
|
[
"MIT"
] | null | null | null |
"""
WSGI config for wee project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wee.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| 25.4
| 78
| 0.784777
|
acffb349db22e23e9f53173b486e62ceaaa78d25
| 2,356
|
py
|
Python
|
set_from_is_list.py
|
cedelis/docker-mailman
|
b4e3221a560a20adb20187a4e9d24e7ed58d2020
|
[
"MIT"
] | null | null | null |
set_from_is_list.py
|
cedelis/docker-mailman
|
b4e3221a560a20adb20187a4e9d24e7ed58d2020
|
[
"MIT"
] | null | null | null |
set_from_is_list.py
|
cedelis/docker-mailman
|
b4e3221a560a20adb20187a4e9d24e7ed58d2020
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python
#
# Copyright (C) 2001-2018 by the Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Reset a list's web_page_url attribute to the default setting.
This script is intended to be run as a bin/withlist script, i.e.
% bin/withlist -l -r set_from_is_list listname [options]
Options:
-u value
--from-is-list-value=value
Set from_is_list value. Choices for value:
0 = Accept (default)
1 = Munge From
2 = Wrap Message
3 = Reject
4 = Discard
-v / --verbose
Print what the script is doing.
If run standalone, it prints this help text and exits.
"""
import sys
import getopt
import paths
from Mailman import mm_cfg
from Mailman.i18n import C_
def usage(code, msg=''):
print C_(__doc__.replace('%', '%%'))
if msg:
print msg
sys.exit(code)
def set_from_is_list(mlist, *args):
try:
opts, args = getopt.getopt(args, 'u:v', ['from-is-list-value=', 'verbose'])
except getopt.error, msg:
usage(1, msg)
verbose = 0
f_value = 0
for opt, arg in opts:
if opt in ('-u', '--from-is-list-value'):
f_value = int(arg)
elif opt in ('-v', '--verbose'):
verbose = 1
# Make sure list is locked.
if not mlist.Locked():
if verbose:
print C_('Locking list')
mlist.Lock()
if verbose:
old_f_value = mlist.from_is_list
print C_('Setting from_is_list from: %(old_f_value)s to: %(f_value)s')
mlist.from_is_list = f_value
print C_('Saving list')
mlist.Save()
mlist.Unlock()
if __name__ == '__main__':
usage(0)
| 25.333333
| 83
| 0.647708
|
acffb3df6a4228665a0585c99acada2ab2a78fc5
| 2,522
|
py
|
Python
|
runtests.py
|
DummerDelfin/django-contact-form
|
ef7f7d2492b6bdf9bed6aae2fa5aadc3dd2f3d30
|
[
"BSD-3-Clause"
] | null | null | null |
runtests.py
|
DummerDelfin/django-contact-form
|
ef7f7d2492b6bdf9bed6aae2fa5aadc3dd2f3d30
|
[
"BSD-3-Clause"
] | null | null | null |
runtests.py
|
DummerDelfin/django-contact-form
|
ef7f7d2492b6bdf9bed6aae2fa5aadc3dd2f3d30
|
[
"BSD-3-Clause"
] | null | null | null |
"""
A standalone test runner script, configuring the minimum settings
required for tests to execute.
Re-use at your own risk: many Django applications will require full
settings and/or templates in order to execute their tests.
"""
import os
import sys
# Make sure the app is (at least temporarily) on the import path.
APP_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, APP_DIR)
# Minimum settings required for django-contact-form to work.
SETTINGS_DICT = {
'BASE_DIR': APP_DIR,
'INSTALLED_APPS': (
'contact_form',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
),
'ROOT_URLCONF': 'contact_form.urls',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(APP_DIR, 'db.sqlite3'),
},
},
'MIDDLEWARE': (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
),
'SITE_ID': 1,
'DEFAULT_FROM_EMAIL': 'contact@example.com',
'MANAGERS': [('Manager', 'noreply@example.com')],
'TEMPLATES': [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(APP_DIR, 'tests/templates')],
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
}],
}
def run_tests():
# Making Django run this way is a two-step process. First, call
# settings.configure() to give Django settings to work with:
from django.conf import settings
settings.configure(**SETTINGS_DICT)
# Then, call django.setup() to initialize the application cache
# and other bits:
import django
django.setup()
# Now we instantiate a test runner...
from django.test.utils import get_runner
TestRunner = get_runner(settings)
# And then we run tests and return the results.
test_runner = TestRunner(verbosity=2, interactive=True)
failures = test_runner.run_tests(['tests'])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
| 30.385542
| 70
| 0.645123
|
acffb4cb18c771e228067f5ec87d67614dca096c
| 629
|
py
|
Python
|
backend/manage.py
|
crowdbotics-apps/thu-34308
|
0c102b713cee7f454ed0efb54bb531b0a017567d
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/thu-34308
|
0c102b713cee7f454ed0efb54bb531b0a017567d
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/thu-34308
|
0c102b713cee7f454ed0efb54bb531b0a017567d
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'thu_34308.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.590909
| 73
| 0.683625
|
acffb582799c3154c5fdf3aa4703266213ee00ef
| 959
|
py
|
Python
|
yatest/testpage.py
|
boluny/cray
|
b8c628df70a3dd488067bce393123e9686393b33
|
[
"MIT"
] | 1
|
2017-10-13T06:58:25.000Z
|
2017-10-13T06:58:25.000Z
|
yatest/testpage.py
|
boluny/cray
|
b8c628df70a3dd488067bce393123e9686393b33
|
[
"MIT"
] | 3
|
2017-07-21T08:09:39.000Z
|
2018-08-15T08:42:39.000Z
|
yatest/testpage.py
|
boluny/cray
|
b8c628df70a3dd488067bce393123e9686393b33
|
[
"MIT"
] | null | null | null |
import unittest
import os
from cray.craylib.page import Page
PAGE_DIR = os.path.join(os.path.dirname(__file__), "test_site", "_page")
def get_test_suites():
'''Return test cases as a suite in this module'''
suite = unittest.TestSuite()
suite.addTest(PageGenerateTestCase())
return suite
class PageGenerateTestCase(unittest.TestCase):
'''Test case for page generation'''
def runTest(self):
'''Run test'''
p_about = Page(os.path.join(PAGE_DIR, "about.md"))
p_non_existence = Page(os.path.join(PAGE_DIR, "no-existence.markdown"))
should_meta = {'title': r'about',
'layout': 'page',
'permalink': '/about/'
}
self.assertFalse(p_non_existence.is_existed())
self.assertTrue(p_about.is_existed())
p_about.parse_file()
p_about_meta = p_about.get_meta()
self.assertEqual(should_meta, p_about_meta)
| 29.96875
| 79
| 0.625652
|
acffb5bec2889ac3924926294f60154cce32413d
| 386
|
py
|
Python
|
utils/logger.py
|
RapDoodle8080/mooli-milk-tea-management-system
|
9b6e31664529ac70d180da2f219baf0eb902017b
|
[
"MIT"
] | 1
|
2021-05-29T09:40:42.000Z
|
2021-05-29T09:40:42.000Z
|
utils/logger.py
|
RapDoodle/Mooli-Ordering-System
|
9b6e31664529ac70d180da2f219baf0eb902017b
|
[
"MIT"
] | 1
|
2020-10-28T02:10:24.000Z
|
2020-10-28T02:10:24.000Z
|
utils/logger.py
|
RapDoodle/Mooli-Ordering-System
|
9b6e31664529ac70d180da2f219baf0eb902017b
|
[
"MIT"
] | 1
|
2021-05-29T09:40:41.000Z
|
2021-05-29T09:40:41.000Z
|
# Unable to use python logger due to its incompatibility with flask at the moment
from datetime import datetime
def log_error(msg):
# The output will be as the following example:
# [2020-05-15 17:08:53.508167] Unable to connect
current_time = datetime.now()
with open('./log/error.log', 'a') as log:
log.write("[{}] {}\n".format(str(current_time), str(msg)))
| 38.6
| 81
| 0.678756
|
acffb6acdac5675b29dd1ba5b5398d04a7ea2b37
| 2,237
|
py
|
Python
|
examples/formula/simple_contrast.py
|
bpinsard/nipy
|
d49e8292adad6619e3dac710752131b567efe90e
|
[
"BSD-3-Clause"
] | 1
|
2019-03-23T07:41:30.000Z
|
2019-03-23T07:41:30.000Z
|
examples/formula/simple_contrast.py
|
bpinsard/nipy
|
d49e8292adad6619e3dac710752131b567efe90e
|
[
"BSD-3-Clause"
] | null | null | null |
examples/formula/simple_contrast.py
|
bpinsard/nipy
|
d49e8292adad6619e3dac710752131b567efe90e
|
[
"BSD-3-Clause"
] | 1
|
2020-08-19T17:26:43.000Z
|
2020-08-19T17:26:43.000Z
|
#!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
""" A simple contrast for an FMRI HRF model """
from __future__ import print_function # Python 2/3 compatibility
import numpy as np
from nipy.algorithms.statistics.api import Formula, make_recarray
from nipy.modalities.fmri import utils, hrf
from nipy.modalities.fmri.fmristat import hrf as delay
# We take event onsets, and a specified HRF model, and make symbolic functions
# of time
c1 = utils.events([3,7,10], f=hrf.glover) # Symbolic function of time
c2 = utils.events([1,3,9], f=hrf.glover) # Symbolic function of time
c3 = utils.events([3,4,6], f=delay.spectral[0]) # Symbolic function of time
# We can also use a Fourier basis for some other onsets - again making symbolic
# functions of time
d = utils.fourier_basis([3,5,7]) # Formula
# Make a formula for all four sets of onsets
f = Formula([c1,c2,c3]) + d
# A contrast is a formula expressed on the elements of the design formula
contrast = Formula([c1-c2, c1-c3])
# Instantiate actual values of time at which to create the design matrix rows
t = make_recarray(np.linspace(0,20,50), 't')
# Make the design matrix, and get contrast matrices for the design
X, c = f.design(t, return_float=True, contrasts={'C':contrast})
# c is a dictionary, containing a 2 by 9 matrix - the F contrast matrix for our
# contrast of interest
assert X.shape == (50, 9)
assert c['C'].shape == (2, 9)
# In this case the contrast matrix is rather obvious.
np.testing.assert_almost_equal(c['C'],
[[1,-1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, -1, 0, 0, 0, 0, 0, 0]])
# We can get the design implied by our contrast at our chosen times
preC = contrast.design(t, return_float=True)
np.testing.assert_almost_equal(preC[:, 0], X[:, 0] - X[:, 1])
np.testing.assert_almost_equal(preC[:, 1], X[:, 0] - X[:, 2])
# So, X . c['C'].T \approx preC
np.testing.assert_almost_equal(np.dot(X, c['C'].T), preC)
# So what is the matrix C such that preC = X . C? Yes, it's c['C']
C = np.dot(np.linalg.pinv(X), preC).T
np.testing.assert_almost_equal(C, c['C'])
# The contrast matrix (approx equal to c['C'])
print(C)
| 37.915254
| 79
| 0.680376
|
acffb6c9aeb69fefb2cb5a9d2b42333c6458d544
| 5,179
|
py
|
Python
|
fcm.py
|
Mohammad-Robati/FuzzyCMeans
|
398efa3c913b1cad211f2435764edbad3db62426
|
[
"MIT"
] | null | null | null |
fcm.py
|
Mohammad-Robati/FuzzyCMeans
|
398efa3c913b1cad211f2435764edbad3db62426
|
[
"MIT"
] | null | null | null |
fcm.py
|
Mohammad-Robati/FuzzyCMeans
|
398efa3c913b1cad211f2435764edbad3db62426
|
[
"MIT"
] | null | null | null |
from math import sqrt, pow, log, inf
from point import Point
from random import random
from copy import deepcopy
import numpy as np
import matplotlib.pyplot as plt
class FCM:
def __init__(self, points, m, minCluster, maxCluster):
self.points = points
self.centers = []
self.m = m
self.minCluster = minCluster
self.maxCluster = maxCluster
self.c = 2
self.sensivity = 0.01
pass
def setInitialCenters(self):
self.centers = []
for i in range(self.c):
point = Point()
point.setValue([random() for i in range(len(self.points[0].value))])
self.centers.append(point)
def getDistance(self, point1, point2):
dim = len(point1.value)
sum = 0
for i in range(dim):
sum += pow(point1.value[i]-point2.value[i], 2)
return sqrt(sum)
def setNewMembeship(self, clusterIndex, point):
sum = 0
for i in range(self.c):
sum += pow(self.getDistance(point, self.centers[clusterIndex])
/self.getDistance(point, self.centers[i]),
2/(self.m-1))
point.setMembership(clusterIndex, 1/sum)
def setNewCenter(self, clusterIndex):
points = self.points
sum1 = np.array([0.0 for i in range(len(points[0].value))])
sum2 = 0
for point in points:
membership = pow(point.membership[clusterIndex], self.m)
newPoint = np.array([i * membership for i in point.value])
sum1 = np.add(sum1, newPoint)
sum2 += membership
self.centers[clusterIndex].setValue((sum1/sum2).tolist())
def checkTerminationCondition(self, oldCenters, newCenters):
allDiffs = []
for i in range(len(oldCenters)):
diffs = []
for j in range(len(oldCenters[0].value)):
diffs.append(abs(newCenters[i].value[j]-oldCenters[i].value[j]))
allDiffs.append(max(diffs))
return max(allDiffs) < self.sensivity
def getEntropy(self):
sum = 0
for i in range(len(self.centers)):
for point in self.points:
sum += point.membership[i] * log(point.membership[i])
entropy = -sum / log(self.c)
print("C = " + str(self.c) + " -> Entropy is", entropy)
return entropy
def getBestAnswer(self, answers):
minEntropy = inf
finalAnswer = None
for answer in answers:
if minEntropy > answers[answer]["entropy"]:
minEntropy = answers[answer]["entropy"]
finalAnswer = answer
self.c = finalAnswer
self.centers = answers[finalAnswer]["centers"]
def fcmAlgorithm(self):
answers = {}
for c in range(self.minCluster, self.maxCluster):
self.c = c
self.setInitialCenters()
centers = self.centers
while True:
oldCenters = deepcopy(centers)
for i in range(len(centers)):
for point in self.points:
self.setNewMembeship(i, point)
for i in range(len(centers)):
self.setNewCenter(i)
if self.checkTerminationCondition(oldCenters, centers):
answers[c] = {"entropy": self.getEntropy(), "centers": centers}
break
self.getBestAnswer(answers)
def run(self):
self.fcmAlgorithm()
for i in range(len(self.centers)):
print("Cluster Center #" + str(i+1) + " : " + str(self.centers[i]))
self.plot()
def getDecisionBoundaries(self, minX, maxX, minY, maxY):
density = 6
backpointsX = [(minX + j * (maxX-minX)/density) for j in range(density) for i in range(density)]
backpointsY = [(minY + i * (maxY-minY)/density) for j in range(density) for i in range(density)]
backpoints = []
for backpointX in backpointsX:
for backpointY in backpointsY:
backpoint = Point()
backpoint.value = [backpointX, backpointY]
for c in range(self.c):
self.setNewMembeship(c, backpoint)
backpoints.append(backpoint)
return backpoints
def plot(self):
points = self.points
if len(points[0].value) == 2:
centers = self.centers
pointsX = []
pointsY = []
for point in points:
pointsX.append(point.value[0])
pointsY.append(point.value[1])
centersX = []
centersY = []
for center in centers:
centersX.append(center.value[0])
centersY.append(center.value[1])
backpoints = self.getDecisionBoundaries(min(pointsX)-0.05, max(pointsX)+0.05, min(pointsY)-0.2, max(pointsY)+0.2)
for point in backpoints:
plt.scatter(point.value[0], point.value[1], color=str(point.bestClusterIndex/self.c), s=400)
plt.plot(pointsX, pointsY, 'ro')
plt.plot(centersX, centersY, 'bs')
plt.show()
| 36.992857
| 125
| 0.555706
|
acffb717a792bf363fc23fb87cd9e30d0fe6f63c
| 364
|
py
|
Python
|
Python/network/inet.py
|
ebouaziz/miscripts
|
9520d31adfd8cf63a06d519b0c308f07dd107b90
|
[
"MIT"
] | null | null | null |
Python/network/inet.py
|
ebouaziz/miscripts
|
9520d31adfd8cf63a06d519b0c308f07dd107b90
|
[
"MIT"
] | null | null | null |
Python/network/inet.py
|
ebouaziz/miscripts
|
9520d31adfd8cf63a06d519b0c308f07dd107b90
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2.7
# Experiment with netifaces module
from netifaces import interfaces, ifaddresses, AF_INET
for ifaceName in interfaces():
try:
addresses = [i['addr'] for i in ifaddresses(ifaceName)[AF_INET]]
print '%s: %s' % (ifaceName, ', '.join(addresses))
except KeyError, e:
print "%s: no defined address" % ifaceName
| 30.333333
| 72
| 0.664835
|
acffb8f862a9672ae107f6b6094fd82e15a0dae2
| 2,808
|
py
|
Python
|
server/api/grid/split_task_apis.py
|
wildintellect/tasking-manager
|
373fb231404628e6ae9a1838539b9c3cb23ad73c
|
[
"BSD-2-Clause"
] | 3
|
2018-04-24T08:12:31.000Z
|
2020-09-02T18:11:21.000Z
|
server/api/grid/split_task_apis.py
|
wildintellect/tasking-manager
|
373fb231404628e6ae9a1838539b9c3cb23ad73c
|
[
"BSD-2-Clause"
] | 28
|
2019-01-04T17:39:00.000Z
|
2021-05-06T23:06:24.000Z
|
server/api/grid/split_task_apis.py
|
wildintellect/tasking-manager
|
373fb231404628e6ae9a1838539b9c3cb23ad73c
|
[
"BSD-2-Clause"
] | 3
|
2020-02-29T20:46:09.000Z
|
2020-11-20T19:44:04.000Z
|
from flask_restful import Resource, current_app, request
from schematics.exceptions import DataError
from server.models.dtos.grid_dto import SplitTaskDTO
from server.models.postgis.utils import NotFound
from server.services.grid.split_service import SplitService, SplitServiceError
from server.services.users.authentication_service import token_auth, tm
class SplitTaskAPI(Resource):
@tm.pm_only(False)
@token_auth.login_required
def post(self, project_id, task_id):
"""
Split a task
---
tags:
- grid
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: true
type: string
default: Token sessionTokenHere==
- in: header
name: Accept-Language
description: Language user is requesting
type: string
required: true
default: en
- name: project_id
in: path
description: The ID of the project the task is associated with
required: true
type: integer
default: 1
- name: task_id
in: path
description: The unique task ID
required: true
type: integer
default: 1
responses:
200:
description: Task split OK
400:
description: Client Error
401:
description: Unauthorized - Invalid credentials
403:
description: Forbidden
404:
description: Task not found
500:
description: Internal Server Error
"""
try:
split_task_dto = SplitTaskDTO()
split_task_dto.user_id = tm.authenticated_user_id
split_task_dto.project_id = project_id
split_task_dto.task_id = task_id
split_task_dto.preferred_locale = request.environ.get('HTTP_ACCEPT_LANGUAGE')
split_task_dto.validate()
except DataError as e:
current_app.logger.error(f'Error validating request: {str(e)}')
return str(e), 400
try:
tasks = SplitService.split_task(split_task_dto)
return tasks.to_primitive(), 200
except NotFound:
return {"Error": "Task Not Found"}, 404
except SplitServiceError as e:
return {"Error": str(e)}, 403
except Exception as e:
error_msg = f'Task Split API - unhandled error: {str(e)}'
current_app.logger.critical(error_msg)
return {"Error": error_msg}, 500
| 34.666667
| 89
| 0.565527
|
acffb92b8e8b04befe4b7b9b62e2de43e0f5979d
| 781
|
py
|
Python
|
config.py
|
Blankphrase/News-Highlight
|
efda2300b0c7e021bc88b19f06e76ea1b09f4717
|
[
"MIT"
] | null | null | null |
config.py
|
Blankphrase/News-Highlight
|
efda2300b0c7e021bc88b19f06e76ea1b09f4717
|
[
"MIT"
] | null | null | null |
config.py
|
Blankphrase/News-Highlight
|
efda2300b0c7e021bc88b19f06e76ea1b09f4717
|
[
"MIT"
] | null | null | null |
import os
class Config:
'''
General configuration parent class
'''
NEWS_SOURCE_BASE_URL = 'https://newsapi.org/v2/sources?category={}&language=en&apiKey={}'
NEWS_ARTICLE_API_BASE_URL = 'https://newsapi.org/v2/everything?sources={}&apiKey={}'
NEWS_API_KEY = os.environ.get('NEWS_API_KEY')
class ProdConfig(Config):
'''
Production configuration child class
Args:
Config: The parent configuration class with General configuration settings
'''
pass
class DevConfig(Config):
'''
Development configuration child class
Args:
Config: The parent configuration class with General configuration settings
'''
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig
}
| 21.694444
| 93
| 0.681178
|
acffb9c17058c6fee8039ee5b5330d8bb5f8db19
| 7,195
|
py
|
Python
|
von_agent/demo_agents.py
|
nrempel/von_agent_test
|
da23056f8385b2b1751f5df21af87f23825db216
|
[
"Apache-2.0"
] | null | null | null |
von_agent/demo_agents.py
|
nrempel/von_agent_test
|
da23056f8385b2b1751f5df21af87f23825db216
|
[
"Apache-2.0"
] | null | null | null |
von_agent/demo_agents.py
|
nrempel/von_agent_test
|
da23056f8385b2b1751f5df21af87f23825db216
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2017 Government of Canada - Public Services and Procurement Canada - buyandsell.gc.ca
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from requests import post
from .agents import AgentRegistrar, Origin, Verifier, Issuer, HolderProver
import json
import logging
class TrustAnchorAgent(AgentRegistrar, Origin):
"""
Trust anchor register agents and schemata & claim defs onto the distributed ledger
"""
async def process_post(self, form: dict) -> str:
"""
Takes a request from service wrapper POST and dispatches the applicable agent action.
Returns (json) response arising from processing.
:param form: request form on which to operate
:return: json response
"""
logger = logging.getLogger(__name__)
logger.debug('TrustAnchorAgent.process_post: >>> form: {}'.format(form))
self.__class__._vet_keys({'type', 'data'}, set(form.keys())) # all tokens need type and data
# Try each responder code base from BaseListeningAgent up before trying locally
mro = TrustAnchorAgent._mro_dispatch()
for ResponderClass in mro:
try:
rv = await ResponderClass.process_post(self, form)
logger.debug('TrustAnchorAgent.process_post: <<< {}'.format(rv))
return rv
except NotImplementedError:
pass
# token-type/proxy
logger.debug('TrustAnchorAgent.process_post: <!< not this form type: {}'.format(form['type']))
raise NotImplementedError('{} does not support token type {}'.format(self.__class__.__name__, form['type']))
class SRIAgent(Verifier, Issuer, HolderProver):
"""
SRI agent is:
* a Verifier for:
* Org Book proofs of BC Registrar
* its own proofs of its own SRI registration claims
* an Issuer and HolderProver of its own SRI registration claims
* a Prover for its own SRI registration claims.
"""
async def reset_wallet(self) -> int:
"""
Method for SRIAgent to close and delete wallet, then create and open a new one. Delegates to
HolderProver.reset_wallet() to create wallet and reset master secret, then resets claim_def for
SRIAgent's Issuer nature.
Useful for demo purpose so as not to have to shut down and restart the HolderProver from django.
Precursor to revocation, and issuer/filter-specifiable claim deletion.
:return: wallet num
"""
logger = logging.getLogger(__name__)
logger.debug('SRIAgent.reset_wallet: >>>')
await HolderProver.reset_wallet(self)
schema_json = await self._schema_info({})
await self.send_claim_def(schema_json) # allow for new claim creation
rv = self.wallet.num
logger.debug('SRIAgent.reset_wallet: <<< {}'.format(rv))
return rv
async def process_post(self, form: dict) -> str:
"""
Takes a request from service wrapper POST and dispatches the applicable agent action.
Returns (json) response arising from processing.
:param form: request form on which to operate
:return: json response
"""
logger = logging.getLogger(__name__)
logger.debug('SRIAgent.process_post: >>> form: {}'.format(form))
self.__class__._vet_keys({'type', 'data'}, set(form.keys())) # all tokens need type and data
# Try each responder code base from BaseListeningAgent up before trying locally
mro = SRIAgent._mro_dispatch()
for ResponderClass in mro:
try:
rv = await ResponderClass.process_post(self, form)
logger.debug('SRIAgent.process_post: <<< {}'.format(rv))
return rv
except NotImplementedError:
pass
# token-type/proxy
logger.debug('SRIAgent.process_post: <!< not this form type: {}'.format(form['type']))
raise NotImplementedError('{} does not support token type {}'.format(self.__class__.__name__, form['type']))
class BCRegistrarAgent(Issuer):
"""
BC registrar agent is an Issuer of BC registrar claims
"""
async def process_post(self, form: dict) -> str:
"""
Takes a request from service wrapper POST and dispatches the applicable agent action.
Returns (json) response arising from processing.
:param form: request form on which to operate
:return: json response
"""
logger = logging.getLogger(__name__)
logger.debug('BCRegistrarAgent.process_post: >>> form: {}'.format(form))
self.__class__._vet_keys({'type', 'data'}, set(form.keys())) # all tokens need type and data
# Try each responder code base from BaseListeningAgent up before trying locally
mro = SRIAgent._mro_dispatch()
for ResponderClass in mro:
try:
rv = await ResponderClass.process_post(self, form)
logger.debug('BCRegistrarAgent.process_post: <<< {}'.format(rv))
return rv
except NotImplementedError:
pass
# token-type/proxy
logger.debug('BCRegistrarAgent.process_post: <!< not this form type: {}'.format(form['type']))
raise NotImplementedError('{} does not support token type {}'.format(self.__class__.__name__, form['type']))
class OrgBookAgent(HolderProver):
"""
The Org Book agent is a HolderProver of BC registrar claims
"""
async def process_post(self, form: dict) -> str:
"""
Takes a request from service wrapper POST and dispatches the applicable agent action.
Returns (json) response arising from processing.
:param form: request form on which to operate
:return: json response
"""
logger = logging.getLogger(__name__)
logger.debug('OrgBookAgent.process_post: >>> form: {}'.format(form))
self.__class__._vet_keys({'type', 'data'}, set(form.keys())) # all tokens need type and data
# Try each responder code base from BaseListeningAgent up before trying locally
mro = OrgBookAgent._mro_dispatch()
for ResponderClass in mro:
try:
rv = await ResponderClass.process_post(self, form)
logger.debug('OrgBookAgent.process_post: <<< {}'.format(rv))
return rv
except NotImplementedError:
pass
# token-type/proxy
logger.debug('OrgBookAgent.process_post: <!< not this form type: {}'.format(form['type']))
raise NotImplementedError('{} does not support token type {}'.format(self.__class__.__name__, form['type']))
| 38.682796
| 116
| 0.64934
|
acffba07ebbf270f030d04de1d8ca73df1ac92a6
| 14,669
|
py
|
Python
|
c7n/cli.py
|
ticketmaster/cloud-custodian
|
0da3866f70f858895af228cc08706d0909a2a324
|
[
"Apache-2.0"
] | null | null | null |
c7n/cli.py
|
ticketmaster/cloud-custodian
|
0da3866f70f858895af228cc08706d0909a2a324
|
[
"Apache-2.0"
] | null | null | null |
c7n/cli.py
|
ticketmaster/cloud-custodian
|
0da3866f70f858895af228cc08706d0909a2a324
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
# PYTHON_ARGCOMPLETE_OK (Must be in first 1024 bytes, so if tab completion
# is failing, move this above the license)
import argcomplete
import argparse
import importlib
import logging
import os
import pdb
import sys
import traceback
from datetime import datetime
from dateutil.parser import parse as date_parse
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(t):
return None
from c7n import utils
from c7n.commands import schema_completer
from c7n.utils import get_account_id_from_sts
DEFAULT_REGION = 'us-east-1'
log = logging.getLogger('custodian.cli')
def _default_options(p, blacklist=""):
""" Add basic options ot the subparser.
`blacklist` is a list of options to exclude from the default set.
e.g.: ['region', 'log-group']
"""
provider = p.add_argument_group(
"provider", "AWS account information, defaults per the aws cli")
if 'region' not in blacklist:
provider.add_argument(
"-r", "--region", action='append', default=[],
dest='regions', metavar='REGION',
help="AWS Region to target. Can be used multiple times")
provider.add_argument(
"--profile",
help="AWS Account Config File Profile to utilize")
provider.add_argument("--assume", default=None, dest="assume_role",
help="Role to assume")
provider.add_argument("--external-id", default=None, dest="external_id",
help="External Id to provide when assuming a role")
config = p.add_argument_group(
"config", "Policy config file(s) and policy selectors")
# -c is deprecated. Supported for legacy reasons
config.add_argument("-c", "--config", help=argparse.SUPPRESS)
config.add_argument("configs", nargs='*',
help="Policy configuration file(s)")
config.add_argument("-p", "--policies", default=None, dest='policy_filter',
help="Only use named/matched policies")
config.add_argument("-t", "--resource", default=None, dest='resource_type',
help="Only use policies with the given resource type")
output = p.add_argument_group("output", "Output control")
output.add_argument("-v", "--verbose", action="count", help="Verbose logging")
if 'quiet' not in blacklist:
output.add_argument("-q", "--quiet", action="count",
help="Less logging (repeatable, -qqq for no output)")
else:
output.add_argument("-q", "--quiet", action="count", help=argparse.SUPPRESS)
output.add_argument("--debug", default=False, help=argparse.SUPPRESS,
action="store_true")
if 'vars' not in blacklist:
# p.add_argument('--vars', default=None,
# help='Vars file to substitute into policy')
p.set_defaults(vars=None)
if 'log-group' not in blacklist:
p.add_argument(
"-l", "--log-group", default=None,
help="Cloudwatch Log Group to send policy logs")
else:
p.add_argument("--log-group", default=None, help=argparse.SUPPRESS)
if 'output-dir' not in blacklist:
p.add_argument("-s", "--output-dir", required=True,
help="Directory or S3 URL For policy output")
if 'cache' not in blacklist:
p.add_argument(
"-f", "--cache", default="~/.cache/cloud-custodian.cache",
help="Cache file (default %(default)s)")
p.add_argument(
"--cache-period", default=15, type=int,
help="Cache validity in minutes (default %(default)i)")
else:
p.add_argument("--cache", default=None, help=argparse.SUPPRESS)
def _default_region(options):
marker = object()
value = getattr(options, 'regions', marker)
if value is marker:
return
if len(value) > 0:
return
try:
options.regions = [utils.get_profile_session(options).region_name]
except:
log.warning('Could not determine default region')
options.regions = [None]
if options.regions[0] is None:
log.error('No default region set. Specify a default via AWS_DEFAULT_REGION '
'or setting a region in ~/.aws/config')
sys.exit(1)
log.debug("using default region:%s from boto" % options.regions[0])
def _default_account_id(options):
if options.assume_role:
try:
options.account_id = options.assume_role.split(':')[4]
return
except IndexError:
pass
try:
session = utils.get_profile_session(options)
options.account_id = get_account_id_from_sts(session)
except:
options.account_id = None
def _report_options(p):
""" Add options specific to the report subcommand. """
_default_options(p, blacklist=['cache', 'log-group', 'quiet'])
p.add_argument(
'--days', type=float, default=1,
help="Number of days of history to consider")
p.add_argument(
'--raw', type=argparse.FileType('wb'),
help="Store raw json of collected records to given file path")
p.add_argument(
'--field', action='append', default=[], type=_key_val_pair,
metavar='HEADER=FIELD',
help='Repeatable. JMESPath of field to include in the output OR '
'for a tag use prefix `tag:`. Special case fields `region` and'
'`policy` are available')
p.add_argument(
'--no-default-fields', action="store_true",
help='Exclude default fields for report.')
p.add_argument(
'--format', default='csv', choices=['csv', 'grid', 'simple'],
help="Format to output data in (default: %(default)s). "
"Options include simple, grid, rst")
def _metrics_options(p):
""" Add options specific to metrics subcommand. """
_default_options(p, blacklist=['log-group', 'output-dir', 'cache', 'quiet'])
p.add_argument(
'--start', type=date_parse,
help='Start date (requires --end, overrides --days)')
p.add_argument(
'--end', type=date_parse, help='End date')
p.add_argument(
'--days', type=int, default=14,
help='Number of days of history to consider (default: %(default)i)')
p.add_argument('--period', type=int, default=60 * 24 * 24)
def _logs_options(p):
""" Add options specific to logs subcommand. """
_default_options(p, blacklist=['cache', 'quiet'])
# default time range is 0 to "now" (to include all log entries)
p.add_argument(
'--start',
default='the beginning', # invalid, will result in 0
help='Start date and/or time',
)
p.add_argument(
'--end',
default=datetime.now().strftime('%c'),
help='End date and/or time',
)
def _schema_tab_completer(prefix, parsed_args, **kwargs):
# If we are printing the summary we discard the resource
if parsed_args.summary:
return []
return schema_completer(prefix)
def _schema_options(p):
""" Add options specific to schema subcommand. """
p.add_argument(
'resource', metavar='selector', nargs='?',
default=None).completer = _schema_tab_completer
p.add_argument(
'--summary', action="store_true",
help="Summarize counts of available resources, actions and filters")
p.add_argument('--json', action="store_true", help=argparse.SUPPRESS)
p.add_argument("-v", "--verbose", action="count", help="Verbose logging")
p.add_argument("-q", "--quiet", action="count", help=argparse.SUPPRESS)
p.add_argument("--debug", default=False, help=argparse.SUPPRESS)
def _dryrun_option(p):
p.add_argument(
"-d", "--dryrun", action="store_true",
help="Don't execute actions but filter resources")
def _key_val_pair(value):
"""
Type checker to ensure that --field values are of the format key=val
"""
if '=' not in value:
msg = 'values must be of the form `header=field`'
raise argparse.ArgumentTypeError(msg)
return value
def setup_parser():
c7n_desc = "Cloud fleet management"
parser = argparse.ArgumentParser(description=c7n_desc)
# Setting `dest` means we capture which subparser was used.
subs = parser.add_subparsers(dest='subparser')
report_desc = ("Report of resources that a policy matched/ran on. "
"The default output format is csv, but other formats "
"are available.")
report = subs.add_parser(
"report", description=report_desc, help=report_desc)
report.set_defaults(command="c7n.commands.report")
_report_options(report)
logs_desc = "Get policy execution logs from s3 or cloud watch logs"
logs = subs.add_parser(
'logs', help=logs_desc, description=logs_desc)
logs.set_defaults(command="c7n.commands.logs")
_logs_options(logs)
metrics_desc = "Retrieve metrics for policies from CloudWatch Metrics"
metrics = subs.add_parser(
'metrics', description=metrics_desc, help=metrics_desc)
metrics.set_defaults(command="c7n.commands.metrics_cmd")
_metrics_options(metrics)
version = subs.add_parser(
'version', help="Display installed version of custodian")
version.set_defaults(command='c7n.commands.version_cmd')
version.add_argument('-v', '--verbose', action="count", help="Verbose logging")
version.add_argument("-q", "--quiet", action="count", help=argparse.SUPPRESS)
version.add_argument(
"--debug", action="store_true",
help="Print info for bug reports")
validate_desc = (
"Validate config files against the json schema")
validate = subs.add_parser(
'validate', description=validate_desc, help=validate_desc)
validate.set_defaults(command="c7n.commands.validate")
validate.add_argument(
"-c", "--config", help=argparse.SUPPRESS)
validate.add_argument("configs", nargs='*',
help="Policy Configuration File(s)")
validate.add_argument("-v", "--verbose", action="count", help="Verbose Logging")
validate.add_argument("-q", "--quiet", action="count", help="Less logging (repeatable)")
validate.add_argument("--debug", default=False, help=argparse.SUPPRESS)
schema_desc = ("Browse the available vocabularies (resources, filters, and "
"actions) for policy construction. The selector "
"is specified with RESOURCE[.CATEGORY[.ITEM]] "
"examples: s3, ebs.actions, or ec2.filters.instance-age")
schema = subs.add_parser(
'schema', description=schema_desc,
help="Interactive cli docs for policy authors")
schema.set_defaults(command="c7n.commands.schema_cmd")
_schema_options(schema)
# access_desc = ("Show permissions needed to execute the policies")
# access = subs.add_parser(
# 'access', description=access_desc, help=access_desc)
# access.set_defaults(command='c7n.commands.access')
# _default_options(access)
# access.add_argument(
# '-m', '--access', default=False, action='store_true')
run_desc = "\n".join((
"Execute the policies in a config file",
"",
"Multiple regions can be passed in, as can the symbolic region 'all'. ",
"",
"When running across multiple regions, policies targeting resources in ",
"regions where they do not exist will not be run. The output directory ",
"when passing multiple regions is suffixed with the region. Resources ",
"with global endpoints are run just once and are suffixed with the first ",
"region passed in or us-east-1 if running against 'all' regions.",
""
))
run = subs.add_parser(
"run", description=run_desc, help=run_desc,
formatter_class=argparse.RawDescriptionHelpFormatter)
run.set_defaults(command="c7n.commands.run")
_default_options(run)
_dryrun_option(run)
run.add_argument(
"-m", "--metrics-enabled",
default=False, action="store_true",
help="Emit metrics to CloudWatch Metrics")
return parser
def _setup_logger(options):
level = 3 + (options.verbose or 0) - (options.quiet or 0)
if level <= 0:
# print nothing
log_level = logging.CRITICAL + 1
elif level == 1:
log_level = logging.ERROR
elif level == 2:
log_level = logging.WARNING
elif level == 3:
# default
log_level = logging.INFO
else:
log_level = logging.DEBUG
logging.basicConfig(
level=log_level,
format="%(asctime)s: %(name)s:%(levelname)s %(message)s")
external_log_level = logging.ERROR
if level <= 0:
external_log_level = logging.CRITICAL + 1
elif level >= 5:
external_log_level = logging.INFO
logging.getLogger('botocore').setLevel(external_log_level)
logging.getLogger('s3transfer').setLevel(external_log_level)
def main():
parser = setup_parser()
argcomplete.autocomplete(parser)
options = parser.parse_args()
_setup_logger(options)
# Support the deprecated -c option
if getattr(options, 'config', None) is not None:
options.configs.append(options.config)
if options.subparser in ('report', 'logs', 'metrics', 'run'):
_default_region(options)
_default_account_id(options)
try:
command = options.command
if not callable(command):
command = getattr(
importlib.import_module(command.rsplit('.', 1)[0]),
command.rsplit('.', 1)[-1])
# Set the process name to something cleaner
process_name = [os.path.basename(sys.argv[0])]
process_name.extend(sys.argv[1:])
setproctitle(' '.join(process_name))
command(options)
except Exception:
if not options.debug:
raise
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[-1])
if __name__ == '__main__':
main()
| 35.953431
| 92
| 0.645511
|
acffba0cb82baa2acdee4a471b09864424965c13
| 75
|
py
|
Python
|
version.py
|
redhat-developer/moksha-monitor-exporter
|
be681023b8ac9a7ad87d4e3cd5a8fce087d63aa2
|
[
"MIT"
] | null | null | null |
version.py
|
redhat-developer/moksha-monitor-exporter
|
be681023b8ac9a7ad87d4e3cd5a8fce087d63aa2
|
[
"MIT"
] | null | null | null |
version.py
|
redhat-developer/moksha-monitor-exporter
|
be681023b8ac9a7ad87d4e3cd5a8fce087d63aa2
|
[
"MIT"
] | null | null | null |
_VERSION_ = '0.0.2'
if __name__ == '__main__':
print('v' + _VERSION_)
| 15
| 26
| 0.6
|
acffba1fff308eaa14d039a6531b921e7589727e
| 203
|
py
|
Python
|
models/__init__.py
|
ejmejm/GoHeuristics
|
9336d661abd48aa31ff5c9ed50cc2fbbd4472ebe
|
[
"Apache-2.0"
] | 1
|
2017-07-18T22:24:30.000Z
|
2017-07-18T22:24:30.000Z
|
models/__init__.py
|
ejmejm/GoHeuristics
|
9336d661abd48aa31ff5c9ed50cc2fbbd4472ebe
|
[
"Apache-2.0"
] | null | null | null |
models/__init__.py
|
ejmejm/GoHeuristics
|
9336d661abd48aa31ff5c9ed50cc2fbbd4472ebe
|
[
"Apache-2.0"
] | null | null | null |
from os.path import dirname, basename, isfile
import glob
modules = glob.glob(dirname(__file__)+"/*.py")
__all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
| 40.6
| 94
| 0.704433
|
acffbd6d1bfd98b7fa71dda3d9b7bc95d0afb775
| 413
|
py
|
Python
|
Python/flipping-an-image.py
|
se77enn/LeetCode-Solution
|
d29ef5358cae592b63952c3d293897a176fb75e1
|
[
"MIT"
] | 1
|
2020-10-27T03:22:31.000Z
|
2020-10-27T03:22:31.000Z
|
Python/flipping-an-image.py
|
se77enn/LeetCode-Solution
|
d29ef5358cae592b63952c3d293897a176fb75e1
|
[
"MIT"
] | null | null | null |
Python/flipping-an-image.py
|
se77enn/LeetCode-Solution
|
d29ef5358cae592b63952c3d293897a176fb75e1
|
[
"MIT"
] | 1
|
2021-03-22T18:58:23.000Z
|
2021-03-22T18:58:23.000Z
|
# Time: O(n^2)
# Space: O(1)
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def flipAndInvertImage(self, A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
for row in A:
for i in xrange((len(row)+1) // 2):
row[i], row[~i] = row[~i] ^ 1, row[i] ^ 1
return A
| 20.65
| 57
| 0.469734
|
acffbe5e277a9e36d75f0230d3eee460accdd990
| 721
|
py
|
Python
|
theano/gans/BGAN/old/DISC-MNIST/plot_control.py
|
dendisuhubdy/ccw1_trainstats
|
0105bddd23366db6b55421e9b3bccd6da2f9aa51
|
[
"MIT"
] | 4
|
2017-10-29T17:41:17.000Z
|
2020-04-07T12:51:44.000Z
|
theano/gans/BGAN/old/DISC-MNIST/plot_control.py
|
dendisuhubdy/ccw1_trainstats
|
0105bddd23366db6b55421e9b3bccd6da2f9aa51
|
[
"MIT"
] | null | null | null |
theano/gans/BGAN/old/DISC-MNIST/plot_control.py
|
dendisuhubdy/ccw1_trainstats
|
0105bddd23366db6b55421e9b3bccd6da2f9aa51
|
[
"MIT"
] | 4
|
2017-11-26T21:53:28.000Z
|
2020-04-07T12:51:45.000Z
|
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from disc_mnist import train
def main():
plt.title("Boundary-seeking GAN")
plt.xlabel("Training batches")
plt.ylabel("Generator loss")
train(num_epochs=1, n_samples=20, initial_eta=1e-5, plot_colour="-b")
train(num_epochs=1, n_samples=20, initial_eta=1e-4, plot_colour="-r")
train(num_epochs=1, n_samples=20, initial_eta=1e-3, plot_colour="-g")
plt.grid()
art = []
lgd = plt.legend(loc=9, bbox_to_anchor=(0.5, -0.1))
art.append(lgd)
plt.draw()
plt.savefig('gen_plots/RWGAN.png',
additional_artists=art,
bbox_inches="tight")
if __name__ == '__main__':
main()
| 30.041667
| 73
| 0.658807
|
acffbe86cca0b741cc7ab50eb5652401f519999b
| 1,692
|
py
|
Python
|
mak/libs/pyxx/cxx/grammar/expression/primary/lambda_expr/general.py
|
bugengine/BugEngine
|
1b3831d494ee06b0bd74a8227c939dd774b91226
|
[
"BSD-3-Clause"
] | 4
|
2015-05-13T16:28:36.000Z
|
2017-05-24T15:34:14.000Z
|
mak/libs/pyxx/cxx/grammar/expression/primary/lambda_expr/general.py
|
bugengine/BugEngine
|
1b3831d494ee06b0bd74a8227c939dd774b91226
|
[
"BSD-3-Clause"
] | null | null | null |
mak/libs/pyxx/cxx/grammar/expression/primary/lambda_expr/general.py
|
bugengine/BugEngine
|
1b3831d494ee06b0bd74a8227c939dd774b91226
|
[
"BSD-3-Clause"
] | 1
|
2017-03-21T08:28:07.000Z
|
2017-03-21T08:28:07.000Z
|
"""
lambda-expression:
lambda-introducer lambda-declarator compound-statement
lambda-introducer < template-parameter-list > requires-clause? lambda-declarator compound-statement
lambda-introducer:
[ lambda-capture? ]
lambda-declarator:
lambda-specifiers
( parameter-declaration-clause ) lambda-specifiers requires-clause?
lambda-specifiers:
decl-specifier-seq? noexcept-specifier? attribute-specifier-seq? trailing-return-type?
"""
import glrp
from .....parser import cxx98
from be_typing import TYPE_CHECKING
@glrp.rule('lambda-expression : lambda-introducer lambda-declarator compound-statement')
@glrp.rule(
'lambda-expression : lambda-introducer "<" template-parameter-list ">" requires-clause? lambda-declarator compound-statement'
)
@cxx98
def lambda_expression(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
#@glrp.rule('lambda-introducer : "[" lambda-capture? "]"')
@glrp.rule('lambda-introducer : [split] "[" lambda-capture "]"')
@glrp.rule('lambda-introducer[split] : [split] "[" [split] "]"')
@cxx98
def lambda_introducer(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('lambda-declarator : lambda-specifiers')
@glrp.rule('lambda-declarator : "(" parameter-declaration-clause ")" lambda-specifiers requires-clause?')
@cxx98
def lambda_declarator(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
@glrp.rule('lambda-specifiers : decl-specifier-seq? noexcept-specifier? attribute-specifier-seq? trailing-return-type?')
@cxx98
def lambda_specifiers(self, p):
# type: (CxxParser, glrp.Production) -> None
pass
if TYPE_CHECKING:
from .....parser import CxxParser
| 29.684211
| 129
| 0.728132
|
acffbf8df22aa84f1b784d1e556c549105e3bf69
| 744
|
py
|
Python
|
Vocoder_WaveRNN/vocoder_utils/text/symbols.py
|
madhavmk/QA_VoiceBot_Desktop_Application
|
89128f1e75314d585b126c19e592d0e88061d5ea
|
[
"Apache-2.0"
] | 7
|
2019-11-21T12:42:16.000Z
|
2021-08-28T08:22:30.000Z
|
Vocoder_WaveRNN/vocoder_utils/text/symbols.py
|
madhavmk/QA_VoiceBot_Desktop_Application
|
89128f1e75314d585b126c19e592d0e88061d5ea
|
[
"Apache-2.0"
] | 1
|
2019-12-28T15:12:14.000Z
|
2020-01-08T21:28:00.000Z
|
Vocoder_WaveRNN/vocoder_utils/text/symbols.py
|
madhavmk/QA_VoiceBot_Desktop_Application
|
89128f1e75314d585b126c19e592d0e88061d5ea
|
[
"Apache-2.0"
] | 6
|
2019-09-03T05:10:24.000Z
|
2021-07-08T03:27:11.000Z
|
""" from https://github.com/keithito/tacotron """
'''
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. '''
from Vocoder_WaveRNN.vocoder_utils.text import cmudict
_pad = '_'
_punctuation = '!\'(),.:;? '
_special = '-'
_letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
# Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters):
_arpabet = ['@' + s for s in cmudict.valid_symbols]
# Export all symbols:
symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet
| 39.157895
| 195
| 0.74328
|
acffbf915123fef1f4d0327bfe7ee0179aa85c62
| 603
|
py
|
Python
|
dotviewer/test/test_translator.py
|
kantai/passe-pypy-taint-tracking
|
b60a3663f8fe89892dc182c8497aab97e2e75d69
|
[
"MIT"
] | 2
|
2016-07-06T23:30:20.000Z
|
2017-05-30T15:59:31.000Z
|
dotviewer/test/test_translator.py
|
kantai/passe-pypy-taint-tracking
|
b60a3663f8fe89892dc182c8497aab97e2e75d69
|
[
"MIT"
] | null | null | null |
dotviewer/test/test_translator.py
|
kantai/passe-pypy-taint-tracking
|
b60a3663f8fe89892dc182c8497aab97e2e75d69
|
[
"MIT"
] | 2
|
2020-07-09T08:14:22.000Z
|
2021-01-15T18:01:25.000Z
|
"""
Test the integration with PyPy.
"""
import py, sys
from dotviewer.conftest import option
def setup_module(mod):
if not option.pygame:
py.test.skip("--pygame not enabled")
try:
import pypy
except ImportError:
py.test.skip("cannot import pypy")
# ____________________________________________________________
def is_prime(n):
divisors = [d for d in range(1, n+1) if n % d == 0]
return len(divisors) == 2
def test_annotated():
from rpython.translator.interactive import Translation
t = Translation(is_prime)
t.annotate([int])
t.viewcg()
| 21.535714
| 62
| 0.679934
|
acffc031e781cb8e49599eea11db5a4b80a35c9a
| 2,110
|
py
|
Python
|
tarnish-server/tornado-celery/setup.py
|
bbhunter/tarnish
|
0a5ac30bf0e88f3caa433ba6edbf88b0c35078c0
|
[
"MIT"
] | 464
|
2015-01-02T14:19:43.000Z
|
2022-03-19T14:28:22.000Z
|
setup.py
|
andrey-bkstg/tornado-celery
|
3a9c573ee803a187c5591b8481a23133f1f15327
|
[
"BSD-3-Clause"
] | 27
|
2015-02-23T06:16:54.000Z
|
2017-04-20T10:02:18.000Z
|
setup.py
|
andrey-bkstg/tornado-celery
|
3a9c573ee803a187c5591b8481a23133f1f15327
|
[
"BSD-3-Clause"
] | 134
|
2015-01-04T17:38:28.000Z
|
2021-08-03T09:27:21.000Z
|
#!/usr/bin/env python
import os
import sys
import re
from setuptools import setup, find_packages
version = re.compile(r'VERSION\s*=\s*\((.*?)\)')
def get_package_version():
"returns package version without importing it"
base = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(base, "tcelery/__init__.py")) as initf:
for line in initf:
m = version.match(line.strip())
if not m:
continue
return ".".join(m.groups()[0].split(", "))
install_requires = ['celery', 'tornado']
dependency_links = []
if sys.version_info[0] >= 3:
dependency_links.append(
'https://github.com/renshawbay/pika-python3/archive/python3.zip#egg=pika-python3'
)
install_requires.append('python3-pika')
else:
install_requires.append('pika')
classes = """
Development Status :: 3 - Alpha
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Topic :: System :: Distributed Computing
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Programming Language :: Python :: Implementation :: CPython
Operating System :: OS Independent
"""
classifiers = [s.strip() for s in classes.split('\n') if s]
setup(
name='tornado-celery',
version=get_package_version(),
description='Celery integration with Tornado',
long_description=open('README.rst').read(),
author='Mher Movsisyan',
author_email='mher.movsisyan@gmail.com',
url='https://github.com/mher/tornado-celery',
license='BSD',
classifiers=classifiers,
packages=find_packages(exclude=['tests', 'tests.*']),
dependency_links=dependency_links,
install_requires=install_requires,
extras_require={
'redis': ["tornado-redis"]
},
entry_points={
'console_scripts': [
'tcelery = tcelery.__main__:main',
]
},
)
| 28.513514
| 89
| 0.651185
|
acffc0c905436bac883676cb162684a1752b0d24
| 19,786
|
py
|
Python
|
exps/algos/GDAS.py
|
yuezhixiong/AutoDL-Projects
|
0d3c63bdbe2d648c2119ffe8d0491f8a07cf85cb
|
[
"MIT"
] | null | null | null |
exps/algos/GDAS.py
|
yuezhixiong/AutoDL-Projects
|
0d3c63bdbe2d648c2119ffe8d0491f8a07cf85cb
|
[
"MIT"
] | null | null | null |
exps/algos/GDAS.py
|
yuezhixiong/AutoDL-Projects
|
0d3c63bdbe2d648c2119ffe8d0491f8a07cf85cb
|
[
"MIT"
] | 1
|
2021-02-26T06:26:48.000Z
|
2021-02-26T06:26:48.000Z
|
##################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2020 #
###########################################################################
# Searching for A Robust Neural Architecture in Four GPU Hours, CVPR 2019 #
###########################################################################
import sys, time, random, argparse
from copy import deepcopy
import torch
from pathlib import Path
lib_dir = (Path(__file__).parent / '..' / '..' / 'lib').resolve()
if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir))
from config_utils import load_config, dict2config
from datasets import get_datasets, get_nas_search_loaders
from procedures import prepare_seed, prepare_logger, save_checkpoint, copy_checkpoint, get_optim_scheduler
from utils import get_model_infos, obtain_accuracy
from log_utils import AverageMeter, time_string, convert_secs2time
from models import get_cell_based_tiny_net, get_search_spaces
from nas_201_api import NASBench201API as API
from datasets.get_dataset_with_transform import CUTOUT
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torch.nn.functional as F
from utils.min_norm_solvers import MinNormSolver, gradient_normalizers
def clamp(X, lower_limit, upper_limit):
return torch.max(torch.min(X, upper_limit), lower_limit)
def search_func(xloader, network, criterion, scheduler, w_optimizer, a_optimizer, epoch_str, xargs, logger, ood_loader=None):
data_time, batch_time = AverageMeter(), AverageMeter()
base_losses, base_top1, base_top5 = AverageMeter(), AverageMeter(), AverageMeter()
arch_losses, arch_top1, arch_top5 = AverageMeter(), AverageMeter(), AverageMeter()
network.train()
end = time.time()
for step, (base_inputs, base_targets, arch_inputs, arch_targets) in enumerate(xloader):
base_inputs = base_inputs.cuda(non_blocking=True)
arch_inputs = arch_inputs.cuda(non_blocking=True)
if xargs.adv_outer:
arch_inputs.requires_grad = True
scheduler.update(None, 1.0 * step / len(xloader))
base_targets = base_targets.cuda(non_blocking=True)
arch_targets = arch_targets.cuda(non_blocking=True)
if xargs.ood_inner or xargs.ood_outer:
try:
ood_input, _ = next(ood_loader_iter)
except:
ood_loader_iter = iter(ood_loader)
ood_input, _ = next(ood_loader_iter)
ood_input = ood_input.cuda(non_blocking=True)
# measure data loading time
data_time.update(time.time() - end)
# update the weights
w_optimizer.zero_grad()
_, logits, _, _ = network(base_inputs)
base_loss = criterion(logits, base_targets)
if xargs.ood_inner and ood_loader is not None:
_, ood_logits, _, _ = network(ood_input)
ood_loss = F.kl_div(input=F.log_softmax(ood_logits, dim=-1), target=torch.ones_like(ood_logits)/ood_logits.size()[-1])
base_loss += ood_loss
base_loss.backward()
torch.nn.utils.clip_grad_norm_(network.parameters(), 5)
w_optimizer.step()
# record
base_prec1, base_prec5 = obtain_accuracy(logits.data, base_targets.data, topk=(1, 5))
base_losses.update(base_loss.item(), base_inputs.size(0))
base_top1.update (base_prec1.item(), base_inputs.size(0))
base_top5.update (base_prec5.item(), base_inputs.size(0))
# update the architecture-weight
a_optimizer.zero_grad()
grads = {}
loss_data = {}
# ---- acc loss ----
_, acc_logits, nop_loss, flp_loss = network(arch_inputs)
acc_loss = criterion(acc_logits, arch_targets)
loss_data['acc'] = acc_loss.item()
grads['acc'] = list(torch.autograd.grad(acc_loss, network.get_alphas(), retain_graph=True))
# del acc_logits
# ---- end ----
# ---- nop loss ----
if xargs.nop_outer:
if xargs.nop_constrain == 'abs':
nop_loss = torch.abs(xargs.nop_constrain_min - nop_loss)
loss_data['nop'] = nop_loss.item()
grads['nop'] = list(torch.autograd.grad(nop_loss, network.get_alphas(), retain_graph=True))
# ---- end ----
# ---- flp loss ----
if xargs.flp_outer:
if xargs.flp_constrain == 'abs':
flp_loss = torch.abs(xargs.flp_constrain_min - flp_loss)
loss_data['flp'] = flp_loss.item()
grads['flp'] = list(torch.autograd.grad(flp_loss, network.get_alphas(), retain_graph=True))
# ---- end ----
# ---- ood loss ----
if xargs.ood_outer and ood_loader is not None:
_, ood_logits, _, _ = network(ood_input)
ood_loss = F.kl_div(input=F.log_softmax(ood_logits), target=torch.ones_like(ood_logits)/ood_logits.size()[-1])
loss_data['ood'] = ood_loss.item()
grads['ood'] = list(torch.autograd.grad(ood_loss, network.get_alphas(), retain_graph=True))
del ood_logits
# ---- end ----
# ---- adv loss ----
if xargs.adv_outer:
if xargs.dataset == 'cifar10':
mean = (0.4914, 0.4822, 0.4465)
std = (0.2471, 0.2435, 0.2616)
elif xargs.dataset == 'cifar100':
mean = (0.5071, 0.4867, 0.4408)
std = (0.2675, 0.2565, 0.2761)
mean = torch.FloatTensor(mean).view(3,1,1)
std = torch.FloatTensor(std).view(3,1,1)
upper_limit = ((1 - mean)/ std).cuda()
lower_limit = ((0 - mean)/ std).cuda()
epsilon = ((xargs.epsilon / 255.) / std).cuda()
step_size = epsilon * 1.25
delta = ((torch.rand(arch_inputs.size())-0.5)*2).cuda() * epsilon
adv_grad = torch.autograd.grad(acc_loss, arch_inputs, retain_graph=True, create_graph=False)[0]
adv_grad = adv_grad.detach().data
delta = clamp(delta + step_size * torch.sign(adv_grad), -epsilon, epsilon)
delta = clamp(delta, lower_limit - arch_inputs.data, upper_limit - arch_inputs.data)
adv_input = (arch_inputs.data + delta).cuda()
_, adv_logits, _, _ = network(adv_input)
adv_loss = criterion(adv_logits, arch_targets)
loss_data['adv'] = adv_loss.item()
grads['adv'] = list(torch.autograd.grad(adv_loss, network.get_alphas(), retain_graph=True))
del mean, std, upper_limit, lower_limit, epsilon, step_size, delta, adv_grad, adv_input, adv_logits
# ---- end ----
# ---- MGDA ----
gn = gradient_normalizers(grads, loss_data, normalization_type=xargs.grad_norm) # loss+, loss, l2
for t in grads:
for gr_i in range(len(grads[t])):
grads[t][gr_i] = grads[t][gr_i] / (gn[t]+1e-7)
if xargs.MGDA and (len(grads)>1):
sol, _ = MinNormSolver.find_min_norm_element([grads[t] for t in grads])
print(sol) # acc, adv, nop
else:
sol = [1] * len(grads)
arch_loss = 0
for kk, t in enumerate(grads):
if t == 'acc':
arch_loss += float(sol[kk]) * acc_loss
elif t == 'adv':
arch_loss += float(sol[kk]) * adv_loss
elif t == 'nop':
arch_loss += float(sol[kk]) * nop_loss
elif t == 'ood':
arch_loss += float(sol[kk]) * ood_loss
elif t == 'flp':
arch_loss += float(sol[kk]) * flp_loss
# ---- end ----
arch_loss.backward()
a_optimizer.step()
# record
arch_prec1, arch_prec5 = obtain_accuracy(acc_logits.data, arch_targets.data, topk=(1, 5))
arch_losses.update(arch_loss.item(), arch_inputs.size(0))
arch_top1.update (arch_prec1.item(), arch_inputs.size(0))
arch_top5.update (arch_prec5.item(), arch_inputs.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if step % xargs.print_freq == 0 or step + 1 == len(xloader):
Sstr = '*SEARCH* ' + time_string() + ' [{:}][{:03d}/{:03d}]'.format(epoch_str, step, len(xloader))
Tstr = 'Time {batch_time.val:.2f} ({batch_time.avg:.2f}) Data {data_time.val:.2f} ({data_time.avg:.2f})'.format(batch_time=batch_time, data_time=data_time)
Wstr = 'Base [Loss {loss.val:.3f} ({loss.avg:.3f}) Prec@1 {top1.val:.2f} ({top1.avg:.2f}) Prec@5 {top5.val:.2f} ({top5.avg:.2f})]'.format(loss=base_losses, top1=base_top1, top5=base_top5)
Astr = 'Arch [Loss {loss.val:.3f} ({loss.avg:.3f}) Prec@1 {top1.val:.2f} ({top1.avg:.2f}) Prec@5 {top5.val:.2f} ({top5.avg:.2f})]'.format(loss=arch_losses, top1=arch_top1, top5=arch_top5)
logger.log(Sstr + ' ' + Tstr + ' ' + Wstr + ' ' + Astr)
return base_losses.avg, base_top1.avg, base_top5.avg, arch_losses.avg, arch_top1.avg, arch_top5.avg
def main(xargs):
assert torch.cuda.is_available(), 'CUDA is not available.'
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.set_num_threads( xargs.workers )
prepare_seed(xargs.rand_seed)
logger = prepare_logger(args)
train_data, valid_data, xshape, class_num = get_datasets(xargs.dataset, xargs.data_path, -1)
#config_path = 'configs/nas-benchmark/algos/GDAS.config'
config = load_config(xargs.config_path, {'class_num': class_num, 'xshape': xshape}, logger)
search_loader, _, valid_loader = get_nas_search_loaders(train_data, valid_data, xargs.dataset, 'configs/nas-benchmark/', config.batch_size, xargs.workers)
if xargs.ood_inner or xargs.ood_outer:
mean = [x / 255 for x in [125.3, 123.0, 113.9]]
std = [x / 255 for x in [63.0, 62.1, 66.7]]
# lists = [transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, padding=4), transforms.ToTensor(), transforms.Normalize(mean, std)]
lists = [transforms.ToTensor(), transforms.Normalize(mean, std)]
# lists += [CUTOUT(-1)]
ood_transform = transforms.Compose(lists)
ood_data = dset.SVHN(root=args.data_path, split='train', download=True, transform=ood_transform)
ood_loader = torch.utils.data.DataLoader(ood_data, batch_size=config.batch_size,
sampler=torch.utils.data.sampler.SubsetRandomSampler(list(range(len(ood_data)))[:len(train_data)]),
pin_memory=True, num_workers=xargs.workers)
else:
ood_loader = None
logger.log('||||||| {:10s} ||||||| Search-Loader-Num={:}, batch size={:}'.format(xargs.dataset, len(search_loader), config.batch_size))
logger.log('||||||| {:10s} ||||||| Config={:}'.format(xargs.dataset, config))
global search_space
search_space = get_search_spaces('cell', xargs.search_space_name)
if xargs.model_config is None:
model_config = dict2config({'name': 'GDAS', 'C': xargs.channel, 'N': xargs.num_cells,
'max_nodes': xargs.max_nodes, 'num_classes': class_num,
'space' : search_space,
'affine' : False, 'track_running_stats': bool(xargs.track_running_stats),}, None)
else:
model_config = load_config(xargs.model_config, {'num_classes': class_num, 'space' : search_space,
'affine' : False, 'track_running_stats': bool(xargs.track_running_stats)}, None)
search_model = get_cell_based_tiny_net(model_config)
# logger.log('search-model :\n{:}'.format(search_model))
logger.log('model-config : {:}'.format(model_config))
w_optimizer, w_scheduler, criterion = get_optim_scheduler(search_model.get_weights(), config)
a_optimizer = torch.optim.Adam(search_model.get_alphas(), lr=xargs.arch_learning_rate, betas=(0.5, 0.999), weight_decay=xargs.arch_weight_decay)
logger.log('w-optimizer : {:}'.format(w_optimizer))
logger.log('a-optimizer : {:}'.format(a_optimizer))
logger.log('w-scheduler : {:}'.format(w_scheduler))
logger.log('criterion : {:}'.format(criterion))
flop, param = get_model_infos(search_model, xshape)
logger.log('FLOP = {:.2f} M, Params = {:.2f} MB'.format(flop, param))
logger.log('search-space [{:} ops] : {:}'.format(len(search_space), search_space))
if xargs.arch_nas_dataset is None:
api = None
else:
api = API(xargs.arch_nas_dataset)
logger.log('{:} create API = {:} done'.format(time_string(), api))
last_info, model_base_path, model_best_path = logger.path('info'), logger.path('model'), logger.path('best')
# network, criterion = torch.nn.DataParallel(search_model).cuda(), criterion.cuda()
network, criterion = search_model.cuda(), criterion.cuda()
if last_info.exists(): # automatically resume from previous checkpoint
logger.log("=> loading checkpoint of the last-info '{:}' start".format(last_info))
last_info = torch.load(last_info)
start_epoch = last_info['epoch']
checkpoint = torch.load(last_info['last_checkpoint'])
genotypes = checkpoint['genotypes']
valid_accuracies = checkpoint['valid_accuracies']
search_model.load_state_dict( checkpoint['search_model'] )
w_scheduler.load_state_dict ( checkpoint['w_scheduler'] )
w_optimizer.load_state_dict ( checkpoint['w_optimizer'] )
a_optimizer.load_state_dict ( checkpoint['a_optimizer'] )
logger.log("=> loading checkpoint of the last-info '{:}' start with {:}-th epoch.".format(last_info, start_epoch))
else:
logger.log("=> do not find the last-info file : {:}".format(last_info))
start_epoch, valid_accuracies, genotypes = 0, {'best': -1}, {-1: search_model.genotype()}
# start training
start_time, search_time, epoch_time, total_epoch = time.time(), AverageMeter(), AverageMeter(), config.epochs + config.warmup
for epoch in range(start_epoch, total_epoch):
w_scheduler.update(epoch, 0.0)
need_time = 'Time Left: {:}'.format( convert_secs2time(epoch_time.val * (total_epoch-epoch), True) )
epoch_str = '{:03d}-{:03d}'.format(epoch, total_epoch)
search_model.set_tau( xargs.tau_max - (xargs.tau_max-xargs.tau_min) * epoch / (total_epoch-1) )
logger.log('\n[Search the {:}-th epoch] {:}, tau={:}, LR={:}'.format(epoch_str, need_time, search_model.get_tau(), min(w_scheduler.get_lr())))
search_w_loss, search_w_top1, search_w_top5, valid_a_loss , valid_a_top1 , valid_a_top5 \
= search_func(search_loader, network, criterion, w_scheduler, w_optimizer, a_optimizer, epoch_str, xargs, logger, ood_loader)
search_time.update(time.time() - start_time)
logger.log('[{:}] searching : loss={:.2f}, accuracy@1={:.2f}%, accuracy@5={:.2f}%, time-cost={:.1f} s'.format(epoch_str, search_w_loss, search_w_top1, search_w_top5, search_time.sum))
logger.log('[{:}] evaluate : loss={:.2f}, accuracy@1={:.2f}%, accuracy@5={:.2f}%'.format(epoch_str, valid_a_loss , valid_a_top1 , valid_a_top5 ))
# check the best accuracy
valid_accuracies[epoch] = valid_a_top1
if valid_a_top1 > valid_accuracies['best']:
valid_accuracies['best'] = valid_a_top1
genotypes['best'] = search_model.genotype()
find_best = True
else: find_best = False
genotypes[epoch] = search_model.genotype()
logger.log('<<<--->>> The {:}-th epoch : {:}'.format(epoch_str, genotypes[epoch]))
# save checkpoint
save_path = save_checkpoint({'epoch' : epoch + 1,
'args' : deepcopy(xargs),
'search_model': search_model.state_dict(),
'w_optimizer' : w_optimizer.state_dict(),
'a_optimizer' : a_optimizer.state_dict(),
'w_scheduler' : w_scheduler.state_dict(),
'genotypes' : genotypes,
'valid_accuracies' : valid_accuracies},
model_base_path, logger)
last_info = save_checkpoint({
'epoch': epoch + 1,
'args' : deepcopy(args),
'last_checkpoint': save_path,
}, logger.path('info'), logger)
if find_best:
logger.log('<<<--->>> The {:}-th epoch : find the highest validation accuracy : {:.2f}%.'.format(epoch_str, valid_a_top1))
copy_checkpoint(model_base_path, model_best_path, logger)
with torch.no_grad():
logger.log('{:}'.format(search_model.show_alphas()))
if api is not None: logger.log('{:}'.format(api.query_by_arch(genotypes[epoch], '200')))
# measure elapsed time
epoch_time.update(time.time() - start_time)
start_time = time.time()
logger.log('\n' + '-'*100)
# check the performance from the architecture dataset
logger.log('GDAS : run {:} epochs, cost {:.1f} s, last-geno is {:}.'.format(total_epoch, search_time.sum, genotypes[total_epoch-1]))
if api is not None: logger.log('{:}'.format(api.query_by_arch(genotypes[total_epoch-1], '200')))
logger.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser("GDAS")
parser.add_argument('--data_path', type=str, help='Path to dataset')
parser.add_argument('--dataset', type=str, choices=['cifar10', 'cifar100', 'ImageNet16-120'], help='Choose between Cifar10/100 and ImageNet-16.')
# channels and number-of-cells
parser.add_argument('--search_space_name', type=str, help='The search space name.')
parser.add_argument('--max_nodes', type=int, help='The maximum number of nodes.')
parser.add_argument('--channel', type=int, help='The number of channels.')
parser.add_argument('--num_cells', type=int, help='The number of cells in one stage.')
parser.add_argument('--track_running_stats',type=int, choices=[0,1],help='Whether use track_running_stats or not in the BN layer.')
parser.add_argument('--config_path', type=str, help='The path of the configuration.')
parser.add_argument('--model_config', type=str, help='The path of the model configuration. When this arg is set, it will cover max_nodes / channels / num_cells.')
# architecture leraning rate
parser.add_argument('--arch_learning_rate', type=float, default=3e-4, help='learning rate for arch encoding')
parser.add_argument('--arch_weight_decay', type=float, default=1e-3, help='weight decay for arch encoding')
parser.add_argument('--tau_min', type=float, help='The minimum tau for Gumbel')
parser.add_argument('--tau_max', type=float, help='The maximum tau for Gumbel')
# log
parser.add_argument('--workers', type=int, default=2, help='number of data loading workers (default: 2)')
parser.add_argument('--save_dir', type=str, help='Folder to save checkpoints and log.')
parser.add_argument('--arch_nas_dataset', type=str, help='The path to load the architecture dataset (tiny-nas-benchmark).')
parser.add_argument('--print_freq', type=int, help='print frequency (default: 200)')
parser.add_argument('--rand_seed', type=int, help='manual seed')
# E2RNAS
parser.add_argument('--nop_outer', default=False, action='store_true', help='use nop in outer loop')
parser.add_argument('--flp_outer', default=False, action='store_true', help='use flp in outer loop')
parser.add_argument('--adv_outer', default=False, action='store_true', help='use adv in outer loop')
parser.add_argument('--ood_outer', default=False, action='store_true', help='use ood in outer loop')
parser.add_argument('--ood_inner', default=False, action='store_true', help='use ood in inner loop')
parser.add_argument('--MGDA', default=False, action='store_true', help='use MGDA')
parser.add_argument('--grad_norm', type=str, default='none', choices=['none', 'lossplus', 'loss', 'l2'], help='use gradient normalization in MGDA')
parser.add_argument('--nop_constrain', type=str, default='none', choices=['max', 'min', 'both', 'abs', 'none'], help='use constraint in model size')
parser.add_argument('--nop_constrain_min', type=float, default=0, help='constrain the model size')
parser.add_argument('--flp_constrain', type=str, default='none', choices=['max', 'min', 'both', 'abs', 'none'], help='use constraint in model size')
parser.add_argument('--flp_constrain_min', type=float, default=0, help='constrain the model size')
parser.add_argument('--epsilon', default=2, type=int)
args = parser.parse_args()
if args.rand_seed is None or args.rand_seed < 0: args.rand_seed = random.randint(1, 100000)
main(args)
| 54.808864
| 194
| 0.66638
|
acffc187b0f6247290379df3813f9acfe3dab6f8
| 11,128
|
py
|
Python
|
duckling/test/test_duckling.py
|
spenly/python-duckling
|
e40ecd13125779c1f774d624ab3b296b5d8c6035
|
[
"Apache-2.0"
] | 129
|
2016-10-15T07:18:52.000Z
|
2021-12-29T19:41:49.000Z
|
duckling/test/test_duckling.py
|
spenly/python-duckling
|
e40ecd13125779c1f774d624ab3b296b5d8c6035
|
[
"Apache-2.0"
] | 45
|
2016-10-17T16:33:17.000Z
|
2021-12-02T17:54:53.000Z
|
duckling/test/test_duckling.py
|
spenly/python-duckling
|
e40ecd13125779c1f774d624ab3b296b5d8c6035
|
[
"Apache-2.0"
] | 29
|
2017-03-20T12:59:27.000Z
|
2021-11-10T10:07:38.000Z
|
import pytest
import jpype
from datetime import datetime, timedelta
from dateutil import parser
from dateutil.tz import tzlocal
from duckling import Duckling, Dim, Language
@pytest.fixture
def test_input():
return '2pm'
@pytest.fixture
def test_time_input():
return 'Let\'s meet tomorrow'
@pytest.fixture
def dec_30():
return '1990-12-30'
@pytest.fixture
def answer_to_the_ultimate_question_of_life_the_universe_and_everything():
return 42
@pytest.fixture
def two_pm():
return datetime.now(tzlocal()).replace(
hour=14, minute=0, second=0, microsecond=0)
@pytest.fixture
def two_pm_str(two_pm):
return two_pm.strftime('%Y-%m-%dT%H:%M:%S%z')
@pytest.fixture(scope='module')
def clojure():
return jpype.JClass('clojure.java.api.Clojure')
@pytest.fixture
def java_symbol():
return jpype.JClass('clojure.lang.Symbol')
@pytest.fixture
def java_boolean():
return jpype.JClass('java.lang.Boolean')
@pytest.fixture
def java_string():
return jpype.JClass('java.lang.String')
@pytest.fixture
def java_long():
return jpype.JClass('java.lang.Long')
@pytest.fixture
def java_int():
return jpype.JClass('java.lang.Integer')
@pytest.fixture
def java_arrays():
return jpype.JClass('java.util.Arrays')
@pytest.fixture
def java_persistant_array_map():
return jpype.JClass('clojure.lang.PersistentArrayMap')
@pytest.fixture
def java_map_entry():
return jpype.JClass('clojure.lang.MapEntry')
@pytest.fixture
def java_keyword():
return jpype.JClass('clojure.lang.Keyword')
@pytest.fixture
def java_hash_map():
return jpype.JClass('java.util.HashMap')
@pytest.fixture(scope='module')
def clojure_loaded(clojure):
duckling_load = clojure.var("duckling.core", "load!")
duckling_load.invoke()
return clojure
@pytest.fixture
def clojure_parse(clojure_loaded):
return clojure_loaded.var("duckling.core", "parse")
@pytest.fixture(scope='module')
def duckling():
return Duckling()
@pytest.fixture(scope='module')
def duckling_loaded(duckling):
duckling.load()
return duckling
def test_load(duckling):
duckling.load()
assert duckling._is_loaded is True
def test_not_load():
duckling = Duckling()
assert duckling._is_loaded is False
with pytest.raises(RuntimeError):
duckling.parse('')
def test_parse(duckling_loaded, test_input):
result = duckling_loaded.parse(test_input)
assert len(result) == 5
def test_parse_with_reference_time(duckling_loaded, test_time_input, dec_30):
result = duckling_loaded.parse(test_time_input, reference_time=dec_30)
assert parser.parse(u'1990-12-30').date() + timedelta(days=1) == parser.parse(
result[0][u'value'][u'values'][0][u'value']).date()
def test_parse_with_filter(duckling_loaded, test_input, two_pm):
result = duckling_loaded.parse(test_input, dim_filter=Dim.TIME)
assert len(result) == 1
assert result[0][u'dim'] == Dim.TIME
result_val = result[0][u'value'][u'values'][0][u'value']
result_datetime = parser.parse(result_val)
assert result_datetime.time() == two_pm.time()
def test_parse_result(clojure_parse, duckling, test_input):
result = clojure_parse.invoke(Language.ENGLISH, test_input)
duckling_result = duckling._parse_result(result)
assert type(duckling_result) == list
assert len(duckling_result) == 5
def test_parse_dict(duckling, java_symbol, java_keyword, java_map_entry, java_persistant_array_map, two_pm_str):
input_symbol_type = java_symbol.create(u'type')
input_keyword_type = java_keyword.intern(input_symbol_type)
input_symbol_value = java_symbol.create(u'value')
input_keyword_value = java_keyword.intern(input_symbol_value)
input_symbol_grain = java_symbol.create(u'grain')
input_keyword_grain = java_keyword.intern(input_symbol_grain)
input_symbol_hour = java_symbol.create(u'hour')
input_keyword_hour = java_keyword.intern(input_symbol_hour)
test_array_map = java_persistant_array_map([
input_keyword_type, 'value',
input_keyword_value, two_pm_str,
input_keyword_grain, input_keyword_hour
])
result = duckling._parse_dict(test_array_map)
assert type(result) == dict
assert len(result) == 3
def test_parse_list(duckling, java_arrays, java_persistant_array_map, java_symbol, java_keyword):
input_symbol = java_symbol.create(u'type')
input_keyword = java_keyword.intern(input_symbol)
test_array_map = java_persistant_array_map([input_keyword, 'value'])
test_list = java_arrays.asList([test_array_map])
result = duckling._parse_list(test_list)
assert type(result) == list
assert len(result) == 1
def test_parse_value(duckling, two_pm_str, java_long, java_string):
assert duckling._parse_value(two_pm_str, Dim.TIME) == two_pm_str
number_dims = {Dim.TEMPERATURE, Dim.NUMBER, Dim.ORDINAL,
Dim.DISTANCE, Dim.VOLUME, Dim.AMOUNTOFMONEY, Dim.DURATION}
for dim in number_dims:
assert duckling._parse_value(java_long(2), dim) == 2
string_dims = {Dim.EMAIL, Dim.URL, Dim.PHONENUMBER}
for dim in string_dims:
assert duckling._parse_value(java_string(u'test'), dim) == u'test'
assert duckling._parse_value(java_string(u'test')) == u'test'
def test_parse_float(duckling, java_long):
assert duckling._parse_float(java_long(2)) == 2
def test_parse_int(duckling, java_int):
assert duckling._parse_int(java_int(2)) == 2
def test_parse_time(duckling, two_pm, two_pm_str):
assert duckling._parse_time(two_pm_str) == two_pm_str
duckling.parse_datetime = True
assert duckling._parse_time(two_pm_str) == two_pm
duckling.parse_datetime = False
def test_parse_string(duckling, java_string):
assert duckling._parse_string(java_string(u'test')) == u'test'
def test_parse_symbol(duckling, java_symbol):
input_symbol = java_symbol.create(u':test')
assert duckling._parse_symbol(input_symbol) == u'test'
def test_parse_boolean(duckling, java_boolean):
assert duckling._parse_boolean(java_boolean(True)) is True
assert duckling._parse_boolean(java_boolean(False)) is False
def test_parse_time_input(duckling_loaded):
result = duckling_loaded.parse(
'the day before labor day 2020', dim_filter=Dim.TIME)
assert len(result) == 2
assert result[0][u'dim'] == Dim.TIME
result_val = result[0][u'value'][u'values'][0][u'value']
result_datetime = parser.parse(result_val)
assert result_datetime == datetime(
2020, 9, 6, 0, 0, 0, 0, tzinfo=tzlocal())
def test_parse_temperature_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'42 degrees', dim_filter=Dim.TEMPERATURE)
assert len(result) == 1
assert result[0][u'dim'] == Dim.TEMPERATURE
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_number_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'forty-two', dim_filter=Dim.NUMBER)
assert len(result) == 1
assert result[0][u'dim'] == Dim.NUMBER
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_ordinal_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse('second', dim_filter=Dim.ORDINAL)
assert len(result) == 1
assert result[0][u'dim'] == Dim.ORDINAL
result_val = result[0][u'value']['value']
assert result_val == 2
def test_parse_distance_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'42km', dim_filter=Dim.DISTANCE)
assert len(result) == 1
assert result[0][u'dim'] == Dim.DISTANCE
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_volume_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'42liters', dim_filter=Dim.VOLUME)
assert len(result) == 1
assert result[0][u'dim'] == Dim.VOLUME
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_amount_of_money_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'$42', dim_filter=Dim.AMOUNTOFMONEY)
assert len(result) == 1
assert result[0][u'dim'] == Dim.AMOUNTOFMONEY
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_duration_input(duckling_loaded,
answer_to_the_ultimate_question_of_life_the_universe_and_everything):
result = duckling_loaded.parse(
'42 days', dim_filter=Dim.DURATION)
assert len(result) == 1
assert result[0][u'dim'] == Dim.DURATION
result_val = result[0][u'value']['value']
assert result_val == answer_to_the_ultimate_question_of_life_the_universe_and_everything
def test_parse_email_input(duckling_loaded):
test_input = 'contact@frank-blechschmidt.com'
result = duckling_loaded.parse(
'contact me at {input}'.format(input=test_input),
dim_filter=Dim.EMAIL
)
assert len(result) == 1
assert result[0][u'dim'] == Dim.EMAIL
result_val = result[0][u'value']['value']
assert result_val == test_input
def test_parse_url_input(duckling_loaded):
test_input = 'sap.com'
result = duckling_loaded.parse(
'website under construction: {input}'.format(input=test_input),
dim_filter=Dim.URL
)
assert len(result) == 1
assert result[0][u'dim'] == Dim.URL
result_val = result[0][u'value']['value']
assert result_val == test_input
def test_parse_phone_number_input(duckling_loaded):
test_input = '(650)-424-4242 '
result = duckling_loaded.parse(
'{input}is a random phone number'.format(input=test_input),
dim_filter=Dim.PHONENUMBER
)
assert len(result) == 1
assert result[0][u'dim'] == Dim.PHONENUMBER
result_val = result[0][u'value']['value']
assert result_val == test_input
def test_multiple_dims(duckling_loaded):
test_input = '42'
result = duckling_loaded.parse(
'it will be ready in {input} weeks'.format(input=test_input),
dim_filter=[Dim.DISTANCE, Dim.NUMBER]
)
assert len(result) == 2
assert result[0][u'value']['value'] == float(test_input)
assert result[1][u'value']['value'] == float(test_input)
| 28.533333
| 112
| 0.717739
|
acffc231bd9d4b9a6886afb4cfcfdd88c9c494c1
| 603
|
py
|
Python
|
main.py
|
we-wow/pf-work
|
e40a4c20833d83db34e6737cd2b885a903034934
|
[
"MIT"
] | null | null | null |
main.py
|
we-wow/pf-work
|
e40a4c20833d83db34e6737cd2b885a903034934
|
[
"MIT"
] | null | null | null |
main.py
|
we-wow/pf-work
|
e40a4c20833d83db34e6737cd2b885a903034934
|
[
"MIT"
] | null | null | null |
from tools import *
result, times, source = decoupling_method(brach_path='./data_network.txt', power_path='./data_power.txt')
short_circuit_bus = 4 # 短路点编号
sc_voltage, sc_current, info = accurate_short_circuit_param(result=result, source=source,
short_circuit_no=short_circuit_bus)
rough_sc_voltage, rough_info = rough_short_circuit_param(source=source, short_circuit_no=short_circuit_bus)
write2txt(result, source, info, rough_info, sc_voltage,
sc_current, rough_sc_voltage, short_circuit_no=short_circuit_bus) # 将结果写入到txt文件中
| 54.818182
| 107
| 0.731343
|
acffc257f4aec5a1b5c2f37f9921487fb330d660
| 1,228
|
py
|
Python
|
tests/pymcell4_positive/0185_model_deepcopy/model.py
|
mcellteam/mcell-tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | 1
|
2021-08-13T20:40:54.000Z
|
2021-08-13T20:40:54.000Z
|
tests/pymcell4_positive/0185_model_deepcopy/model.py
|
mcellteam/mcell_tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | null | null | null |
tests/pymcell4_positive/0185_model_deepcopy/model.py
|
mcellteam/mcell_tests
|
34d2d967b75d56edbae999bf0090641850f4f4fe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import os
import math
import copy
MCELL_PATH = os.environ.get('MCELL_PATH', '')
if MCELL_PATH:
sys.path.append(os.path.join(MCELL_PATH, 'lib'))
else:
print("Error: variable MCELL_PATH that is used to find the mcell library was not set.")
sys.exit(1)
import mcell as m
from parameters import *
if len(sys.argv) == 3 and sys.argv[1] == '-seed':
# overwrite value SEED defined in module parameters
SEED = int(sys.argv[2])
model = m.Model()
# ---- configuration ----
model.config.time_step = TIME_STEP
model.config.seed = 1
model.config.total_iterations = ITERATIONS
model.config.partition_dimension = 10
model.config.subpartition_dimension = 2.5
# ---- default configuration overrides ----
# ---- add components ----
model.load_bngl('model.bngl')
model2 = copy.deepcopy(model)
model2.config.seed = 4
# ---- initialization and execution ----
model.initialize()
model2.initialize()
model.run_iterations(ITERATIONS)
model2.run_iterations(ITERATIONS)
print(model.find_count('c').get_current_value())
assert model.find_count('c').get_current_value() == 34
print(model2.find_count('c').get_current_value())
assert model2.find_count('c').get_current_value() == 25
| 21.172414
| 91
| 0.722313
|
acffc263b77d3574f920568bf53b4b144993f40b
| 885
|
py
|
Python
|
model/tensorflow_hub/examples/tfhub_text_classifier/textclassifier.py
|
sk-ip/dffml
|
1ef5a169327d71baecd5eccae83ad4a9999ccad1
|
[
"MIT"
] | null | null | null |
model/tensorflow_hub/examples/tfhub_text_classifier/textclassifier.py
|
sk-ip/dffml
|
1ef5a169327d71baecd5eccae83ad4a9999ccad1
|
[
"MIT"
] | null | null | null |
model/tensorflow_hub/examples/tfhub_text_classifier/textclassifier.py
|
sk-ip/dffml
|
1ef5a169327d71baecd5eccae83ad4a9999ccad1
|
[
"MIT"
] | null | null | null |
from dffml import CSVSource, Features, Feature
from dffml.noasync import train, accuracy, predict
from dffml_model_tensorflow_hub.text_classifier import TextClassificationModel
from dffml_model_tensorflow_hub.text_classifier_accuracy import (
TextClassifierAccuracy,
)
model = TextClassificationModel(
features=Features(Feature("sentence", str, 1)),
predict=Feature("sentiment", int, 1),
classifications=[0, 1, 2],
clstype=int,
location="tempdir",
)
# Train the model
train(model, "train.csv")
# Assess accuracy (alternate way of specifying data source)
scorer = TextClassifierAccuracy()
print("Accuracy:", accuracy(model, scorer, CSVSource(filename="test.csv")))
# Make prediction
for i, features, prediction in predict(
model, {"sentence": "This track is horrible"},
):
features["sentiment"] = prediction["sentiment"]["value"]
print(features)
| 30.517241
| 78
| 0.750282
|
acffc2d2df7b10b36bc78806241e0a971d85881c
| 1,956
|
pyw
|
Python
|
MobileMouse/VINI_server.pyw
|
yugrocks/MobileMouse
|
f60515633a8e03551f2ae618e39c82f49617cc16
|
[
"MIT"
] | 40
|
2017-04-11T09:33:39.000Z
|
2021-12-29T03:21:55.000Z
|
MobileMouse/VINI_server.pyw
|
yugrocks/MobileMouse
|
f60515633a8e03551f2ae618e39c82f49617cc16
|
[
"MIT"
] | 1
|
2019-12-02T05:25:40.000Z
|
2019-12-02T05:25:40.000Z
|
MobileMouse/VINI_server.pyw
|
yugrocks/MobileMouse
|
f60515633a8e03551f2ae618e39c82f49617cc16
|
[
"MIT"
] | 5
|
2017-07-22T16:48:35.000Z
|
2020-11-16T10:49:16.000Z
|
__author__='Yugal'
from socket import SOL_SOCKET,SO_REUSEADDR,AF_INET,SOCK_STREAM,socket
from time import strftime,sleep
from threading import Thread
import os
class viniServer:
conn = None
s=None
def __init__(self):
self.s=socket(AF_INET,SOCK_STREAM)
self.s.setsockopt(SOL_SOCKET,SO_REUSEADDR, 1)
self.s.bind(("", 9999)) # Let's Bind to port
self.s.listen(0) # Specify number of connections//0=1 :P
@staticmethod
def send(conn,message):
try:
message+="\n"
conn.send(message.encode())
except:
pass
@staticmethod
def refine(string):
e=""
for i in range(len(string)-2):
k=i+2
e+=string[k]
return e
def getConn(self):
return self.conn
def run_server(self):
if True:
print("Waiting for connection")
self.conn, addr = self.s.accept()
print("Now connected to ", addr)
Thread(target=self.startConnCheck).start()
return True
else:
return False
def wait_for_response(self):
try:
msg = self.conn.recv(1024)
if msg == b'':
self.conn.close()
return None
a=msg.decode('utf-8')
return a
except:
self.conn.close()
self.conn=None
return None
def isConnected(self):
if "closed" in str(self.conn):
return False
else:
return True
def startConnCheck(self):
k=0
while True:
sleep(1)
if self.isConnected():
self.send(self.conn,"__ccheck_404")
else:
if self.isConnected():
self.conn.close()
self.conn=None
break
| 23.566265
| 70
| 0.496421
|
acffc34b0c1c793d59a365bd9eb8eca2f5fe33af
| 5,866
|
py
|
Python
|
CGAT/scripts/liftover.py
|
CGATOxford/cgat
|
326aad4694bdfae8ddc194171bb5d73911243947
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 87
|
2015-01-01T03:48:19.000Z
|
2021-11-23T16:23:24.000Z
|
CGAT/scripts/liftover.py
|
CGATOxford/cgat
|
326aad4694bdfae8ddc194171bb5d73911243947
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 189
|
2015-01-06T15:53:11.000Z
|
2019-05-31T13:19:45.000Z
|
CGAT/scripts/liftover.py
|
CGATOxford/cgat
|
326aad4694bdfae8ddc194171bb5d73911243947
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 56
|
2015-01-13T02:18:50.000Z
|
2022-01-05T10:00:59.000Z
|
'''
liftover.py - simple liftover script
======================================================
:Tags: Python
Purpose
-------
liftover coordinates using a liftover formatted file from the ucsc.
Usage
-----
Example::
python liftover.py --help
Type::
python liftover.py --help
for command line help.
Command line options
--------------------
'''
import sys
import numpy
import CGAT.Experiment as E
import CGAT.IOTools as IOTools
def readLiftOver(infile, chromosome,
chromosome_size=250000000,
report_step=1000000):
"""read a matrix. There probably is a routine for this in Numpy, which
I haven't found yet.
"""
if options.loglevel >= 2:
print("## started reading mapping information")
sys.stdout.flush()
map_position = numpy.zeros((chromosome_size,), numpy.int)
# signed character for chromosme, negative values for negative strands
map_chromosome = numpy.zeros((chromosome_size,), numpy.int8)
map_id2chromosome = ["", ]
map_chromosome2id = {}
n = 0
for line in infile:
n += 1
if not (n % report_step):
if options.loglevel >= 2:
print("# iteration %i" % n)
sys.stdout.flush()
if line[:5] == "chain":
(chr_x, size_x, strand_x, first_x, last_x,
chr_y, size_y, strand_y, first_y, last_y,
dontknow) = line[:-1].split(" ")[2:]
if strand_x == "-":
raise ValueError("what shall I do with negative strands?")
x = int(first_x)
# revert coordinates for negative strands (it seems that
# the mapping file uses reverse coordinates, while liftover
# output doesn't)
# add 1 to coordinates, because 0 is flag for unmappable.
if strand_y == "-":
invert = True
# no +1, because already one past current residue (due to open
# bracket)
y = int(size_y) - int(first_y)
else:
invert = False
y = int(first_y) + 1
if chr_x != chromosome:
keep = False
else:
keep = True
if options.loglevel >= 3:
print("# adding alignment", line[:-1])
continue
elif line.strip() == "":
keep = False
continue
elif keep:
data = list(map(int, line[:-1].split("\t")))
if len(data) == 3:
size, increment_x, increment_y = data
else:
size, increment_x, increment_y = data[0], 0, 0
# add position
if invert:
map_position[x:x + size] = numpy.arrayrange(y, y - size, -1)
else:
map_position[x:x + size] = numpy.arrayrange(y, y + size, 1)
if chr_y not in map_id2chromosome:
map_chromosome2id[chr_y] = len(map_id2chromosome)
map_id2chromosome.append(chr_y)
id = map_chromosome2id[chr_y]
if strand_y == "-":
id = -id
# add chromsome
map_chromosome[x:x + size] = id
x += increment_x + size
if invert:
y -= increment_y + size
else:
y += increment_y + size
if y < 0:
raise ValueError(
"illegal mapping: %i -> %i for %s %s:%s-%s(%s) "
"to %s %s: %s-%s(%s)" % (
x, y,
chr_x, strand_x, first_x, last_x, size_x,
chr_y, strand_y, first_y, last_y, size_y))
return map_position, map_chromosome, map_chromosome2id, map_id2chromosome
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id$")
parser.add_option("-c", "--chromosome", dest="chromosome", type="string",
help="chromosome to take.")
parser.add_option("-m", "--map", dest="filename_map", type="string",
help="filename with mapping info.",
metavar="FILE")
parser.set_defaults(
filename_map="",
chromosome=None,
)
(options, args) = E.Start(parser)
if options.filename_map == "":
raise ValueError("please specify the file with the "
"liftover mapping information")
if not options.chromosome:
raise ValueError("please give a chromosome")
map_position, map_chromosome, map_chromosome2id, \
map_id2chromosome = readLiftOver(
IOTools.openFile(options.filename_map, "r"),
options.chromosome)
l = 0
for line in options.stdin:
if line[0] == "#":
continue
data = line[:-1].split("\t")
chromosome = data[0]
range_from, range_to = int(data[1]), int(data[2])
l += 1
if chromosome == options.chromosome:
if options.loglevel >= 1:
print("#", l, ":", line[:-1])
for x in range(range_from, range_to):
if map_position[x]:
id = map_chromosome[x]
if id > 0:
c = "+"
else:
c = "-"
id = -id
print("%s\t%i\t%s\t%s\t%i" % (
chromosome, x, map_id2chromosome[id], c, map_position[x] - 1))
else:
pass
# print "%s\t%i\tna" % (chromosome, x )
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 26.663636
| 86
| 0.498125
|
acffc3640677c5c1b7b8f1759656c0fe0ae79365
| 4,712
|
py
|
Python
|
tests/bootstrap_task_test.py
|
danieldiamond/dbt-sugar
|
0645722cb52cf9eb685d65b556beb9b4c2d3cbcf
|
[
"Apache-2.0"
] | 94
|
2020-12-21T20:00:38.000Z
|
2022-03-31T13:53:00.000Z
|
tests/bootstrap_task_test.py
|
danieldiamond/dbt-sugar
|
0645722cb52cf9eb685d65b556beb9b4c2d3cbcf
|
[
"Apache-2.0"
] | 410
|
2020-12-19T09:25:59.000Z
|
2022-03-23T04:20:40.000Z
|
tests/bootstrap_task_test.py
|
danieldiamond/dbt-sugar
|
0645722cb52cf9eb685d65b556beb9b4c2d3cbcf
|
[
"Apache-2.0"
] | 16
|
2021-03-29T23:11:35.000Z
|
2022-03-10T11:27:26.000Z
|
from collections import OrderedDict
from pathlib import Path
import pytest
FIXTURE_DIR = Path(__file__).resolve().parent
@pytest.mark.datafiles(FIXTURE_DIR)
def test_build_all_models_dict(datafiles):
from dbt_sugar.core.clients.dbt import DbtProfile
from dbt_sugar.core.config.config import DbtSugarConfig
from dbt_sugar.core.flags import FlagParser
from dbt_sugar.core.main import parser
from dbt_sugar.core.task.bootstrap import BootstrapTask, DbtModelsDict
config_filepath = Path(datafiles).joinpath("sugar_config.yml")
flag_parser = FlagParser(parser)
cli_args = ["bootstrap", "--config-path", str(config_filepath)]
flag_parser.consume_cli_arguments(test_cli_args=cli_args)
config = DbtSugarConfig(flag_parser)
config.load_config()
profile = DbtProfile(
flags=flag_parser,
profile_name="dbt_sugar_test",
target_name=str(),
profiles_dir=Path(datafiles),
)
profile.read_profile()
task = BootstrapTask(
flags=flag_parser,
dbt_path=Path("tests/test_dbt_project/dbt_sugar_test"),
sugar_config=config,
dbt_profile=profile,
)
task.build_all_models_dict()
print("BUILT")
print(task.dbt_models_data)
expectation = [
DbtModelsDict(
model_name="my_first_dbt_model",
model_path=Path(
"tests/test_dbt_project/dbt_sugar_test/models/example/my_first_dbt_model.sql"
),
model_columns=[],
),
DbtModelsDict(
model_name="my_second_dbt_model",
model_path=Path(
"tests/test_dbt_project/dbt_sugar_test/models/example/my_second_dbt_model.sql"
),
model_columns=[],
),
]
print("EXPT")
print(expectation)
assert task.dbt_models_data == expectation
@pytest.mark.datafiles(FIXTURE_DIR)
def test_add_or_update_model_descriptor_placeholders(datafiles):
from dbt_sugar.core.clients.dbt import DbtProfile
from dbt_sugar.core.config.config import DbtSugarConfig
from dbt_sugar.core.flags import FlagParser
from dbt_sugar.core.main import parser
from dbt_sugar.core.task.bootstrap import BootstrapTask, DbtModelsDict
config_filepath = Path(datafiles).joinpath("sugar_config.yml")
flag_parser = FlagParser(parser)
cli_args = ["bootstrap", "--config-path", str(config_filepath)]
flag_parser.consume_cli_arguments(test_cli_args=cli_args)
config = DbtSugarConfig(flag_parser)
config.load_config()
profile = DbtProfile(
flags=flag_parser,
profile_name="dbt_sugar_test",
target_name=str(),
profiles_dir=Path(datafiles),
)
profile.read_profile()
task = BootstrapTask(
flags=flag_parser,
dbt_path=Path("tests/test_dbt_project"),
sugar_config=config,
dbt_profile=profile,
)
task.dbt_models_data = [
DbtModelsDict(model_name="my_first_dbt_model", model_path=None, model_columns=[])
]
rez = task.add_or_update_model_descriptor_placeholders(is_test=True)
expectation = OrderedDict(
[
("version", 2),
(
"models",
[
OrderedDict(
[
("name", "my_first_dbt_model"),
("description", "No description for this model."),
(
"columns",
[
OrderedDict(
[
("name", "answer"),
("description", "No description for this column."),
]
),
OrderedDict(
[
("name", "id"),
("description", "No description for this column."),
]
),
OrderedDict(
[
("name", "question"),
("description", "No description for this column."),
]
),
],
),
]
)
],
),
]
)
assert rez == expectation
| 34.903704
| 95
| 0.523769
|
acffc3c4325c7c1e2ca199cc4221f5c4c74b9ede
| 11,253
|
py
|
Python
|
mltk/batch_agg.py
|
haowen-xu/ml-essentials
|
ca44186be37887461205227c32995f1485b4ff41
|
[
"MIT"
] | 4
|
2019-08-06T03:23:14.000Z
|
2019-11-08T10:58:54.000Z
|
mltk/batch_agg.py
|
haowen-xu/ml-essentials
|
ca44186be37887461205227c32995f1485b4ff41
|
[
"MIT"
] | null | null | null |
mltk/batch_agg.py
|
haowen-xu/ml-essentials
|
ca44186be37887461205227c32995f1485b4ff41
|
[
"MIT"
] | 2
|
2019-12-03T08:09:05.000Z
|
2020-10-15T06:50:20.000Z
|
import operator
from enum import Enum
from functools import reduce
from typing import *
import numpy as np
from .stage import StageType
from .utils import ALL, NOT_SET
__all__ = [
'BatchAggregationMode',
'BatchAggregator', 'BatchAggregatorDict',
]
class BatchAggregationMode(str, Enum):
CONCAT = 'CONCAT'
"""To concat the batch arrays along specified axis."""
SUM = 'SUM'
"""To sum the batch arrays along specified axis."""
AVERAGE = 'AVERAGE'
"""To average the batch arrays along specified axis."""
class BatchAggregator(object):
"""
Class to aggregate batch arrays.
>>> agg = BatchAggregator(BatchAggregationMode.CONCAT)
>>> agg
BatchAggregator(mode=CONCAT, axis=0)
>>> agg.add(np.array([1, 2, 3, 4]))
>>> agg.add(np.array([5, 6]))
>>> agg.get()
array([1, 2, 3, 4, 5, 6])
>>> agg = BatchAggregator(BatchAggregationMode.AVERAGE)
>>> agg
BatchAggregator(mode=AVERAGE, axis=None)
>>> agg.add(np.array([1, 2, 3, 4]))
>>> agg.add(np.array([5, 6]))
>>> agg.get()
3.5
>>> agg = BatchAggregator(BatchAggregationMode.SUM)
>>> agg
BatchAggregator(mode=SUM, axis=None)
>>> agg.add(np.array([1, 2, 3, 4]))
>>> agg.add(np.array([5, 6]))
>>> agg.get()
21
"""
mode: BatchAggregationMode
axis: Union[int, Tuple[int, ...]]
def __init__(self,
mode: Union[str, BatchAggregationMode],
axis: Optional[Union[int, Tuple[int, ...], List[int]]] = NOT_SET):
"""
Construct a new :class:`BatchAggregator`.
Args:
mode: Aggregation mode.
axis: The axis to aggregate. Defaults to `0` for `CONCAT` mode,
while :obj:`None` for `SUM` and `AVERAGE` mode.
"""
mode = BatchAggregationMode(mode)
if axis is NOT_SET:
axis = 0 if mode == BatchAggregationMode.CONCAT else None
if mode == BatchAggregationMode.CONCAT:
if not isinstance(axis, int):
raise TypeError('`axis` must be a int when `mode` is CONCAT.')
if axis is not None:
if hasattr(axis, '__iter__'):
axis = tuple(int(v) for v in axis)
if len(axis) == 1:
axis = axis[0]
else:
axis = int(axis)
self.mode = mode
self.axis = axis
self._buf = None
self._weight_sum = 0.
def __repr__(self):
return f'{self.__class__.__qualname__}' \
f'(mode={self.mode.value}, axis={self.axis})'
def get(self) -> Optional[np.ndarray]:
"""
Get the aggregation result.
Returns:
The result, or :obj:`None` if no value has been collected.
"""
if self._buf is not None:
if self.mode == BatchAggregationMode.CONCAT:
return np.concatenate(self._buf, axis=self.axis)
else:
return self._buf
def add(self,
values: np.ndarray,
weight: Optional[float] = 1.):
"""
Add a batch array to the aggregator.
Args:
values: The batch array.
weight: The batch weight, used only in `AVERAGE` mode.
"""
# CONCAT: append the values to the buf
if self.mode == BatchAggregationMode.CONCAT:
if self._buf is None:
self._buf = []
self._buf.append(values)
# SUM
elif self.mode == BatchAggregationMode.SUM:
batch_sum = np.sum(values, axis=self.axis)
if self._buf is None:
self._buf = batch_sum
else:
self._buf += batch_sum
# AVERAGE: maintain the `total_weight` state and update the buf
else:
# infer the batch size and weight
batch_shape = np.shape(values)
if self.axis is None:
batch_size = float(reduce(operator.mul, np.shape(values), 1.))
elif isinstance(self.axis, tuple):
batch_size = 1.
for a in self.axis:
batch_size *= batch_shape[a]
else:
batch_size = batch_shape[self.axis]
batch_weight = weight * batch_size
# do update the weight
self._weight_sum += batch_weight
r1 = weight / self._weight_sum
batch_sum = np.sum(values, axis=self.axis)
if self._buf is None:
self._buf = r1 * batch_sum
else:
r2 = batch_weight / self._weight_sum
self._buf += r1 * batch_sum - r2 * self._buf
class BatchAggregatorDict(Mapping[str, BatchAggregator]):
"""
Maintain a dict of :class:`BatchAggregator` instances, maybe with
a default factory to construct :class:`BatchAggregator` instance
for new keys.
>>> agg_dict = BatchAggregatorDict.new()
>>> agg_dict['acc'].add(np.array([0.75, 0.875]))
>>> agg_dict['loss'].add(np.array([0.125, 0.2]))
>>> len(agg_dict)
2
>>> list(agg_dict)
['acc', 'loss']
>>> agg_dict['acc'].get()
0.8125
>>> agg_dict['loss'].get()
0.1625
"""
@staticmethod
def new(metrics: Union[Sequence[str], type(ALL)] = ALL,
outputs: Union[Sequence[str], type(ALL)] = (),
aggregators: Optional[Mapping[str, BatchAggregator]] = None,
excludes: Sequence[str] = (),
stage_type: Optional[StageType] = None) -> 'BatchAggregatorDict':
"""
Construct a new :class:`BatchAggregatorDict` according to the field
settings `metrics`, `outputs` and `aggregators`.
Args:
metrics: The names of the batch arrays, which should be aggregated
by ``BatchAggregator('AVERAGE', axis=None)``. :obj:`ALL`
indicates that an array is by default a metric if it is neither
specified in `outputs` nor in `aggregator`.
outputs: The names of the batch arrays, which should be aggregated
by ``BatchAggregator('CONCAT', axis=0)``. :obj:`ALL`
indicates that an array is by default an output if it is neither
specified in `outputs` nor in `aggregator`.
aggregators: The dict of names and their corresponding aggregators.
excludes: The names to exclude. If a name is excluded, no
aggregator will be designated to this name, i.e., ``get(name)``
returns None, and ``__getitem__(name)`` raises `KeyError`.
stage_type: If specified, will add stage metric prefix to the keys
of `metrics`, `outputs` and `aggregators`.
Returns:
The aggregator dict.
Notes:
:obj:`ALL` could be specified to at most one of `metrics`
and `outputs`. The argument `aggregators` has higher priority
than `outputs`, and so does `outputs` have higher priority than
`metrics`. That is to say, if a name is specified in both
`aggregators` and `outputs`, then the aggregator specified in
`aggregators` will be chosen; this is also true if a name is
specified in both `outputs` and `metrics`.
"""
# the aggregator factories
average_aggregator_factory = lambda: \
BatchAggregator(mode=BatchAggregationMode.AVERAGE, axis=None)
concat_aggregator_factory = lambda: \
BatchAggregator(mode=BatchAggregationMode.CONCAT, axis=0)
# determine the default factory
if metrics == ALL and outputs == ALL:
raise ValueError('Only one of `metrics` and `outputs` can be '
'`ALL`.')
elif metrics == ALL:
default_factory = average_aggregator_factory
elif outputs == ALL:
default_factory = concat_aggregator_factory
else:
default_factory = None
# build the aggregator instances
agg_dict = {}
if metrics != ALL and metrics:
for key in metrics:
if stage_type is not None:
key = stage_type.add_metric_prefix(key)
agg_dict[key] = average_aggregator_factory()
if outputs != ALL and outputs:
for key in outputs:
if stage_type is not None:
key = stage_type.add_metric_prefix(key)
agg_dict[key] = concat_aggregator_factory()
if aggregators:
for key, agg in aggregators.items():
if stage_type is not None:
key = stage_type.add_metric_prefix(key)
agg_dict[key] = agg
# build the excludes names
if excludes and stage_type is not None:
excludes = [stage_type.add_metric_prefix(n) for n in excludes]
# now construct the `BatchAggregatorDict` instance
return BatchAggregatorDict(
agg_dict, excludes=excludes, default_factory=default_factory)
def __init__(self,
aggregators: Mapping[str, BatchAggregator],
excludes: Sequence[str] = (),
default_factory: Optional[
Callable[[], BatchAggregator]] = None):
"""
Construct a new :class:`BatchAggregatorDict`.
Args:
aggregators: The mapping from names to aggregators.
excludes: The names to exclude from this dict. If a name is
excluded, no aggregator will be designated to this name,
i.e., ``get(name)`` returns None, and ``__getitem__(name)``
raises :class:`KeyError`.
default_factory: The default factory, which is used to create
new :class:`BatchAggregator` instances if the aggregator
to a requested name does not exist. If not specified,
accessing non-existing name will raise an error.
"""
self._aggregators = {}
self._excludes = set(excludes or ())
self._default_factory = default_factory
for key in aggregators:
if key not in self._excludes:
agg = aggregators[key]
if not isinstance(agg, BatchAggregator):
raise TypeError(f'Item {key!r} is not an instance of '
f'{BatchAggregator.__qualname__}: '
f'{agg!r}')
self._aggregators[key] = agg
def get(self, item: str, default: Any = None) -> Optional[BatchAggregator]:
if item not in self._excludes:
if item not in self._aggregators:
if self._default_factory is not None:
self._aggregators[item] = self._default_factory()
else:
return default
return self._aggregators[item]
def __getitem__(self, item: str) -> BatchAggregator:
ret = self.get(item)
if ret is None:
raise KeyError(item)
return ret
def __len__(self) -> int:
return len(self._aggregators)
def __iter__(self) -> Iterator[str]:
return iter(self._aggregators)
| 36.3
| 83
| 0.566071
|
acffc5a1abdedb2406bea684a208712892b1b205
| 1,934
|
py
|
Python
|
animation_unitest.py
|
sillybun/pyctlib
|
d4c8b191fd270923ad73924760847f2bdbdd9116
|
[
"MIT"
] | 3
|
2021-07-03T17:27:44.000Z
|
2021-09-26T20:48:19.000Z
|
animation_unitest.py
|
sillybun/pyctlib
|
d4c8b191fd270923ad73924760847f2bdbdd9116
|
[
"MIT"
] | null | null | null |
animation_unitest.py
|
sillybun/pyctlib
|
d4c8b191fd270923ad73924760847f2bdbdd9116
|
[
"MIT"
] | 1
|
2022-03-08T08:54:03.000Z
|
2022-03-08T08:54:03.000Z
|
import sys
import os
from sys import getsizeof
sys.path.append(os.path.abspath("."))
import zytlib
import pathlib
import numpy as np
from zytlib import vector, IndexMapping, scope, vhelp
from zytlib.vector import chain_function
from zytlib.filemanager import path, get_relative_path, file
from zytlib import touch
from zytlib.wrapper import generate_typehint_wrapper
import argparse
from time import sleep
from zytlib.utils import totuple
from zytlib.touch import once
import seaborn as sns
import matplotlib.pyplot as plt
from zytlib.visual.animation import TimeStamp, ScatterAnimation
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from matplotlib import animation
import torch
%matplotlib notebook
fig, ax = plt.subplots(figsize=(8, 8), tight_layout=True)
# lnn_vector = vector()
ttl = fig.suptitle("PCA @ ")
ax_vector = vector()
sa_vector = vector()
for rank_colored in range(3):
s_ax = plt.subplot(2, 2, rank_colored+1)
ax_vector.append(s_ax)
sa = ScatterAnimation(s_ax, 126)
for item in range(6):
sa.register(torch.randn(126, 64, 2))
sa.set_xlim(-1, 1)
sa.set_ylim(-1, 1)
sa_vector.append(sa)
ax_t = plt.subplot(2, 2, 4)
ts = TimeStamp(ax_t, 126)
ts.register(vector.rand(126))
ts.register(vector.rand(126))
def init():
ret = tuple()
for sa in sa_vector:
ret = ret + sa.init()
ret = ret + ts.init()
return ret
def update(frame):
ret = tuple()
for sa in sa_vector:
ret = ret + sa.update(frame)
ret = ret + ts.update(frame)
ttl.set_text("PCA @ {}".format(frame))
return ret
ani = FuncAnimation(fig, update, frames=np.arange(126),
init_func=init, blit=True)
plt.show()
# Writer = animation.writers['ffmpeg']
# writer = Writer(fps=5, metadata=dict(artist='Me'), bitrate=18000)
# ani.save(f'PCA colored by different rank.mp4', writer="ffmpeg", fps=5, dpi=600)
| 24.794872
| 81
| 0.711479
|
acffc65a0769530f0fab63689fceaf3fcbca5133
| 13,790
|
py
|
Python
|
PythonVirtEnv/Lib/site-packages/plotly/validators/_scattermapbox.py
|
zuhorski/EPL_Project
|
2d2417652879cfbe33c44c003ad77b7222590849
|
[
"MIT"
] | 2
|
2021-07-18T11:39:56.000Z
|
2021-11-06T17:13:05.000Z
|
venv/Lib/site-packages/plotly/validators/_scattermapbox.py
|
wakisalvador/constructed-misdirection
|
74779e9ec640a11bc08d5d1967c85ac4fa44ea5e
|
[
"Unlicense"
] | null | null | null |
venv/Lib/site-packages/plotly/validators/_scattermapbox.py
|
wakisalvador/constructed-misdirection
|
74779e9ec640a11bc08d5d1967c85ac4fa44ea5e
|
[
"Unlicense"
] | null | null | null |
import _plotly_utils.basevalidators
class ScattermapboxValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="scattermapbox", parent_name="", **kwargs):
super(ScattermapboxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Scattermapbox"),
data_docs=kwargs.pop(
"data_docs",
"""
below
Determines if this scattermapbox trace's layers
are to be inserted before the layer with the
specified ID. By default, scattermapbox layers
are inserted above all the base layers. To
place the scattermapbox layers above every
other layer, set `below` to "''".
connectgaps
Determines whether or not gaps (i.e. {nan} or
missing values) in the provided data arrays are
connected.
customdata
Assigns extra data each datum. This may be
useful when listening to hover, click and
selection events. Note that, "scatter" traces
also appends customdata items in the markers
DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud
for customdata .
fill
Sets the area to fill with a solid color. Use
with `fillcolor` if not "none". "toself"
connects the endpoints of the trace (or each
segment of the trace if it has gaps) into a
closed shape.
fillcolor
Sets the fill color. Defaults to a half-
transparent variant of the line color, marker
color, or marker line color, whichever is
available.
hoverinfo
Determines which trace information appear on
hover. If `none` or `skip` are set, no
information is displayed upon hovering. But, if
`none` is set, click and hover events are still
fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud
for hoverinfo .
hoverlabel
:class:`plotly.graph_objects.scattermapbox.Hove
rlabel` instance or dict with compatible
properties
hovertemplate
Template string used for rendering the
information that appear on hover box. Note that
this will override `hoverinfo`. Variables are
inserted using %{variable}, for example "y:
%{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for
several points, "xother" will be added to those
with different x positions from the first
point. An underscore before or after
"(x|y)other" will add a space on that side,
only when this field is shown. Numbers are
formatted using d3-format's syntax
%{variable:d3-format}, for example "Price:
%{y:$.2f}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}".
https://github.com/d3/d3-time-
format#locale_format for details on the date
formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event
data described at this link
https://plotly.com/javascript/plotlyjs-
events/#event-data. Additionally, every
attributes that can be specified per-point (the
ones that are `arrayOk: true`) are available.
Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the
secondary box completely, use an empty tag
`<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud
for hovertemplate .
hovertext
Sets hover text elements associated with each
(lon,lat) pair If a single string, the same
string appears over all the data points. If an
array of string, the items are mapped in order
to the this trace's (lon,lat) coordinates. To
be seen, trace `hoverinfo` must contain a
"text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud
for hovertext .
ids
Assigns id labels to each datum. These ids for
object constancy of data points during
animation. Should be an array of strings, not
numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud
for ids .
lat
Sets the latitude coordinates (in degrees
North).
latsrc
Sets the source reference on Chart Studio Cloud
for lat .
legendgroup
Sets the legend group for this trace. Traces
part of the same legend group hide/show at the
same time when toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.scattermapbox.Lege
ndgrouptitle` instance or dict with compatible
properties
legendrank
Sets the legend rank for this trace. Items and
groups with smaller ranks are presented on
top/left side while with `*reversed*
`legend.traceorder` they are on bottom/right
side. The default legendrank is 1000, so that
you can use ranks less than 1000 to place
certain items before all unranked items, and
ranks greater than 1000 to go after all
unranked items.
line
:class:`plotly.graph_objects.scattermapbox.Line
` instance or dict with compatible properties
lon
Sets the longitude coordinates (in degrees
East).
lonsrc
Sets the source reference on Chart Studio Cloud
for lon .
marker
:class:`plotly.graph_objects.scattermapbox.Mark
er` instance or dict with compatible properties
meta
Assigns extra meta information associated with
this trace that can be used in various text
attributes. Attributes such as trace `name`,
graph, axis and colorbar `title.text`,
annotation `text` `rangeselector`,
`updatemenues` and `sliders` `label` text all
support `meta`. To access the trace `meta`
values in an attribute in the same trace,
simply use `%{meta[i]}` where `i` is the index
or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or
key of the `meta` and `n` is the trace index.
metasrc
Sets the source reference on Chart Studio Cloud
for meta .
mode
Determines the drawing mode for this scatter
trace. If the provided `mode` includes "text"
then the `text` elements appear at the
coordinates. Otherwise, the `text` elements
appear on hover.
name
Sets the trace name. The trace name appear as
the legend item and on hover.
opacity
Sets the opacity of the trace.
selected
:class:`plotly.graph_objects.scattermapbox.Sele
cted` instance or dict with compatible
properties
selectedpoints
Array containing integer indices of selected
points. Has an effect only for traces that
support selections. Note that an empty array
means an empty selection where the `unselected`
are turned on for all points, whereas, any
other non-array values means no selection all
where the `selected` and `unselected` styles
have no effect.
showlegend
Determines whether or not an item corresponding
to this trace is shown in the legend.
stream
:class:`plotly.graph_objects.scattermapbox.Stre
am` instance or dict with compatible properties
subplot
Sets a reference between this trace's data
coordinates and a mapbox subplot. If "mapbox"
(the default value), the data refer to
`layout.mapbox`. If "mapbox2", the data refer
to `layout.mapbox2`, and so on.
text
Sets text elements associated with each
(lon,lat) pair If a single string, the same
string appears over all the data points. If an
array of string, the items are mapped in order
to the this trace's (lon,lat) coordinates. If
trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be
seen in the hover labels.
textfont
Sets the icon text font
(color=mapbox.layer.paint.text-color,
size=mapbox.layer.layout.text-size). Has an
effect only when `type` is set to "symbol".
textposition
Sets the positions of the `text` elements with
respects to the (x,y) coordinates.
textsrc
Sets the source reference on Chart Studio Cloud
for text .
texttemplate
Template string used for rendering the
information text that appear on points. Note
that this will override `textinfo`. Variables
are inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's
syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}".
https://github.com/d3/d3-time-
format#locale_format for details on the date
formatting syntax. Every attributes that can be
specified per-point (the ones that are
`arrayOk: true`) are available. variables
`lat`, `lon` and `text`.
texttemplatesrc
Sets the source reference on Chart Studio Cloud
for texttemplate .
uid
Assign an id to this trace, Use this to provide
object constancy between traces during
animations and transitions.
uirevision
Controls persistence of some user-driven
changes to the trace: `constraintrange` in
`parcoords` traces, as well as some `editable:
true` modifications such as `name` and
`colorbar.title`. Defaults to
`layout.uirevision`. Note that other user-
driven trace attribute changes are controlled
by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and
`colorbar.(x|y)` (accessible with `config:
{editable: true}`) is controlled by
`layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on
trace index if no `uid` is provided. So if your
app can add/remove traces before the end of the
`data` array, such that the same trace has a
different index, you can still preserve user-
driven changes if you give each trace a `uid`
that stays with it as it moves.
unselected
:class:`plotly.graph_objects.scattermapbox.Unse
lected` instance or dict with compatible
properties
visible
Determines whether or not this trace is
visible. If "legendonly", the trace is not
drawn, but can appear as a legend item
(provided that the legend itself is visible).
""",
),
**kwargs
)
| 48.556338
| 78
| 0.54583
|
acffc65eb04a01dd578a1a7020c65a845dd6c593
| 12,726
|
py
|
Python
|
SWIM-Executables/Windows/pyinstaller-2.0 for windows/PyInstaller/lib/macholib/MachO.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | 47
|
2020-03-08T08:43:28.000Z
|
2022-03-18T18:51:55.000Z
|
SWIM-Executables/Windows/pyinstaller-2.0 for windows/PyInstaller/lib/macholib/MachO.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | null | null | null |
SWIM-Executables/Windows/pyinstaller-2.0 for windows/PyInstaller/lib/macholib/MachO.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | 16
|
2020-03-08T08:43:30.000Z
|
2022-01-10T22:05:57.000Z
|
"""
Utilities for reading and writing Mach-O headers
"""
from __future__ import print_function
import sys
import struct
from macholib.mach_o import *
from macholib.dyld import dyld_find, framework_info
from macholib.util import fileview
try:
from macholib.compat import bytes
except ImportError:
pass
try:
unicode
except NameError:
unicode = str
__all__ = ['MachO']
_RELOCATABLE = set((
# relocatable commands that should be used for dependency walking
LC_LOAD_DYLIB,
LC_LOAD_WEAK_DYLIB,
LC_PREBOUND_DYLIB,
LC_REEXPORT_DYLIB,
))
_RELOCATABLE_NAMES = {
LC_LOAD_DYLIB: 'load_dylib',
LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
LC_PREBOUND_DYLIB: 'prebound_dylib',
LC_REEXPORT_DYLIB: 'reexport_dylib',
}
def _shouldRelocateCommand(cmd):
"""
Should this command id be investigated for relocation?
"""
return cmd in _RELOCATABLE
class MachO(object):
"""
Provides reading/writing the Mach-O header of a specific existing file
"""
# filename - the original filename of this mach-o
# sizediff - the current deviation from the initial mach-o size
# header - the mach-o header
# commands - a list of (load_command, somecommand, data)
# data is either a str, or a list of segment structures
# total_size - the current mach-o header size (including header)
# low_offset - essentially, the maximum mach-o header size
# id_cmd - the index of my id command, or None
def __init__(self, filename):
# supports the ObjectGraph protocol
self.graphident = filename
self.filename = filename
# initialized by load
self.fat = None
self.headers = []
with open(filename, 'rb') as fp:
self.load(fp)
def __repr__(self):
return "<MachO filename=%r>" % (self.filename,)
def load(self, fh):
assert fh.tell() == 0
header = struct.unpack('>I', fh.read(4))[0]
fh.seek(0)
if header == FAT_MAGIC:
self.load_fat(fh)
else:
fh.seek(0, 2)
size = fh.tell()
fh.seek(0)
self.load_header(fh, 0, size)
def load_fat(self, fh):
self.fat = fat_header.from_fileobj(fh)
archs = [fat_arch.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
for arch in archs:
self.load_header(fh, arch.offset, arch.size)
def rewriteLoadCommands(self, *args, **kw):
changed = False
for header in self.headers:
if header.rewriteLoadCommands(*args, **kw):
changed = True
return changed
def load_header(self, fh, offset, size):
fh.seek(offset)
header = struct.unpack('>I', fh.read(4))[0]
fh.seek(offset)
if header == MH_MAGIC:
magic, hdr, endian = MH_MAGIC, mach_header, '>'
elif header == MH_CIGAM:
magic, hdr, endian = MH_MAGIC, mach_header, '<'
elif header == MH_MAGIC_64:
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
elif header == MH_CIGAM_64:
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '<'
else:
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
header, fh))
hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
self.headers.append(hdr)
def write(self, f):
for header in self.headers:
header.write(f)
class MachOHeader(object):
"""
Provides reading/writing the Mach-O header of a specific existing file
"""
# filename - the original filename of this mach-o
# sizediff - the current deviation from the initial mach-o size
# header - the mach-o header
# commands - a list of (load_command, somecommand, data)
# data is either a str, or a list of segment structures
# total_size - the current mach-o header size (including header)
# low_offset - essentially, the maximum mach-o header size
# id_cmd - the index of my id command, or None
def __init__(self, parent, fh, offset, size, magic, hdr, endian):
self.MH_MAGIC = magic
self.mach_header = hdr
# These are all initialized by self.load()
self.parent = parent
self.offset = offset
self.size = size
self.endian = endian
self.header = None
self.commands = None
self.id_cmd = None
self.sizediff = None
self.total_size = None
self.low_offset = None
self.filetype = None
self.headers = []
self.load(fh)
def __repr__(self):
return "<%s filename=%r offset=%d size=%d endian=%r>" % (
type(self).__name__, self.parent.filename, self.offset, self.size,
self.endian)
def load(self, fh):
fh = fileview(fh, self.offset, self.size)
fh.seek(0)
self.sizediff = 0
kw = {'_endian_': self.endian}
header = self.mach_header.from_fileobj(fh, **kw)
self.header = header
if header.magic != self.MH_MAGIC:
raise ValueError("header has magic %08x, expecting %08x" % (
header.magic, self.MH_MAGIC))
cmd = self.commands = []
self.filetype = MH_FILETYPE_SHORTNAMES[header.filetype]
read_bytes = 0
low_offset = sys.maxsize
for i in range(header.ncmds):
# read the load command
cmd_load = load_command.from_fileobj(fh, **kw)
# read the specific command
klass = LC_REGISTRY.get(cmd_load.cmd, None)
if klass is None:
raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
cmd_cmd = klass.from_fileobj(fh, **kw)
if cmd_load.cmd == LC_ID_DYLIB:
# remember where this command was
if self.id_cmd is not None:
raise ValueError("This dylib already has an id")
self.id_cmd = i
if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
# for segment commands, read the list of segments
segs = []
# assert that the size makes sense
if cmd_load.cmd == LC_SEGMENT:
section_cls = section
else: # LC_SEGMENT_64
section_cls = section_64
expected_size = (
sizeof(klass) + sizeof(load_command) +
(sizeof(section_cls) * cmd_cmd.nsects)
)
if cmd_load.cmdsize != expected_size:
raise ValueError("Segment size mismatch")
# this is a zero block or something
# so the beginning is wherever the fileoff of this command is
if cmd_cmd.nsects == 0:
if cmd_cmd.filesize != 0:
low_offset = min(low_offset, cmd_cmd.fileoff)
else:
# this one has multiple segments
for j in range(cmd_cmd.nsects):
# read the segment
seg = section_cls.from_fileobj(fh, **kw)
# if the segment has a size and is not zero filled
# then its beginning is the offset of this segment
not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
if seg.offset > 0 and seg.size > 0 and not_zerofill:
low_offset = min(low_offset, seg.offset)
segs.append(seg)
# data is a list of segments
cmd_data = segs
else:
# data is a raw str
data_size = (
cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
)
cmd_data = fh.read(data_size)
cmd.append((cmd_load, cmd_cmd, cmd_data))
read_bytes += cmd_load.cmdsize
# make sure the header made sense
if read_bytes != header.sizeofcmds:
raise ValueError("Read %d bytes, header reports %d bytes" % (
read_bytes, header.sizeofcmds))
self.total_size = sizeof(self.mach_header) + read_bytes
self.low_offset = low_offset
# this header overwrites a segment, what the heck?
if self.total_size > low_offset:
raise ValueError("total_size > low_offset (%d > %d)" % (
self.total_size, low_offset))
def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
"""
for all relocatable commands
yield (command_index, command_name, filename)
"""
for (idx, (lc, cmd, data)) in enumerate(self.commands):
if shouldRelocateCommand(lc.cmd):
name = _RELOCATABLE_NAMES[lc.cmd]
ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
sys.getfilesystemencoding())
def rewriteInstallNameCommand(self, loadcmd):
"""Rewrite the load command of this dylib"""
if self.id_cmd is not None:
self.rewriteDataForCommand(self.id_cmd, loadcmd)
return True
return False
def changedHeaderSizeBy(self, bytes):
self.sizediff += bytes
if (self.total_size + self.sizediff) > self.low_offset:
print("WARNING: Mach-O header may be too large to relocate")
def rewriteLoadCommands(self, changefunc):
"""
Rewrite the load commands based upon a change dictionary
"""
data = changefunc(self.parent.filename)
changed = False
if data is not None:
if self.rewriteInstallNameCommand(
data.encode(sys.getfilesystemencoding())):
changed = True
for idx, name, filename in self.walkRelocatables():
data = changefunc(filename)
if data is not None:
if self.rewriteDataForCommand(idx, data.encode(
sys.getfilesystemencoding())):
changed = True
return changed
def rewriteDataForCommand(self, idx, data):
lc, cmd, old_data = self.commands[idx]
hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
align = struct.calcsize('L')
data = data + (b'\x00' * (align - (len(data) % align)))
newsize = hdrsize + len(data)
self.commands[idx] = (lc, cmd, data)
self.changedHeaderSizeBy(newsize - lc.cmdsize)
lc.cmdsize, cmd.name = newsize, hdrsize
return True
def synchronize_size(self):
if (self.total_size + self.sizediff) > self.low_offset:
raise ValueError("New Mach-O header is too large to relocate")
self.header.sizeofcmds += self.sizediff
self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
self.sizediff = 0
def write(self, fileobj):
fileobj = fileview(fileobj, self.offset, self.size)
fileobj.seek(0)
# serialize all the mach-o commands
self.synchronize_size()
self.header.to_fileobj(fileobj)
for lc, cmd, data in self.commands:
lc.to_fileobj(fileobj)
cmd.to_fileobj(fileobj)
if isinstance(data, unicode):
fileobj.write(data.encode(sys.getfilesystemencoding()))
elif isinstance(data, (bytes, str)):
fileobj.write(data)
else:
# segments..
for obj in data:
obj.to_fileobj(fileobj)
# zero out the unused space, doubt this is strictly necessary
# and is generally probably already the case
fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
def getSymbolTableCommand(self):
for lc, cmd, data in self.commands:
if lc.cmd == LC_SYMTAB:
return cmd
return None
def getDynamicSymbolTableCommand(self):
for lc, cmd, data in self.commands:
if lc.cmd == LC_DYSYMTAB:
return cmd
return None
def main(fn):
m = MachO(fn)
seen = set()
for header in m.headers:
for idx, name, other in header.walkRelocatables():
if other not in seen:
seen.add(other)
print('\t' + name + ": " + other)
if __name__ == '__main__':
import sys
files = sys.argv[1:] or ['/bin/ls']
for fn in files:
print(fn)
main(fn)
| 35.154696
| 79
| 0.572057
|
acffc94af86a02a08259879f16a6964f096dc012
| 8,487
|
py
|
Python
|
lte/gateway/python/magma/pipelined/rule_mappers.py
|
veshkemburu/magma
|
44660c281cece41f2c3f8ef9b9d88d17d47ba312
|
[
"BSD-3-Clause"
] | null | null | null |
lte/gateway/python/magma/pipelined/rule_mappers.py
|
veshkemburu/magma
|
44660c281cece41f2c3f8ef9b9d88d17d47ba312
|
[
"BSD-3-Clause"
] | null | null | null |
lte/gateway/python/magma/pipelined/rule_mappers.py
|
veshkemburu/magma
|
44660c281cece41f2c3f8ef9b9d88d17d47ba312
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import threading
from collections import namedtuple
from lte.protos.mobilityd_pb2 import IPAddress
from magma.common.redis.client import get_default_client
from magma.common.redis.containers import RedisFlatDict, RedisHashDict
from magma.common.redis.serializers import (
RedisSerde,
get_json_deserializer,
get_json_serializer,
)
from magma.pipelined.imsi import encode_imsi
SubscriberRuleKey = namedtuple('SubscriberRuleKey', 'key_type imsi ip_addr rule_id')
class RuleIDToNumMapper:
"""
Rule ID to Number Mapper
This class assigns integers to rule ids so that they can be identified in
an openflow register. The methods can be called from multiple threads
"""
def __init__(self):
self.redis_cli = get_default_client()
self._curr_rule_num = 1
self._rule_nums_by_rule = {}
self._rules_by_rule_num = {}
self._lock = threading.Lock() # write lock
def setup_redis(self):
self._rule_nums_by_rule = RuleIDDict()
self._rules_by_rule_num = RuleNameDict()
def _register_rule(self, rule_id):
""" NOT thread safe """
rule_num = self._rule_nums_by_rule.get(rule_id)
if rule_num is not None:
return rule_num
rule_num = self._curr_rule_num
self._rule_nums_by_rule[rule_id] = rule_num
self._rules_by_rule_num[rule_num] = rule_id
self._curr_rule_num += 1
return rule_num
def get_rule_num(self, rule_id):
with self._lock:
return self._rule_nums_by_rule[rule_id]
def get_or_create_rule_num(self, rule_id):
with self._lock:
rule_num = self._rule_nums_by_rule.get(rule_id)
if rule_num is None:
return self._register_rule(rule_id)
return rule_num
def get_rule_id(self, rule_num):
with self._lock:
return self._rules_by_rule_num[rule_num]
class SessionRuleToVersionMapper:
"""
Session & Rule to Version Mapper
This class assigns version numbers to rule id & subscriber id combinations
that can be used in an openflow register. The methods can be called from
multiple threads.
"""
def __init__(self):
self._version_by_imsi_and_rule = {}
self._lock = threading.Lock() # write lock
def _save_version_unsafe(self, imsi: str, ip_addr: str, rule_id: str,
version):
key = self._get_json_key(encode_imsi(imsi), ip_addr, rule_id)
self._version_by_imsi_and_rule[key] = version
def remove_all_ue_versions(self, imsi: str, ip_addr: IPAddress):
"""
Increment the version number for a given subscriber and rule. If the
rule id is not specified, then all rules for the subscriber will be
incremented.
"""
encoded_imsi = encode_imsi(imsi)
if ip_addr is None or ip_addr.address is None:
ip_addr_str = ""
else:
ip_addr_str = ip_addr.address.decode('utf-8').strip()
del_list = []
with self._lock:
for k in self._version_by_imsi_and_rule.keys():
_, cur_imsi, cur_ip_addr_str, _ = SubscriberRuleKey(*json.loads(k))
if cur_imsi == encoded_imsi and (ip_addr_str == "" or
ip_addr_str == cur_ip_addr_str):
del_list.append(k)
for k in del_list:
del self._version_by_imsi_and_rule[k]
def save_version(self, imsi: str, ip_addr: IPAddress,
rule_id: [str], version: int):
"""
Increment the version number for a given subscriber and rule. If the
rule id is not specified, then all rules for the subscriber will be
incremented.
"""
if ip_addr is None or ip_addr.address is None:
ip_addr_str = ""
else:
ip_addr_str = ip_addr.address.decode('utf-8').strip()
with self._lock:
self._save_version_unsafe(imsi, ip_addr_str, rule_id, version)
def get_version(self, imsi: str, ip_addr: IPAddress, rule_id: str) -> int:
"""
Returns the version number given a subscriber and a rule.
"""
if ip_addr is None or ip_addr.address is None:
ip_addr_str = ""
else:
ip_addr_str = ip_addr.address.decode('utf-8').strip()
key = self._get_json_key(encode_imsi(imsi), ip_addr_str, rule_id)
with self._lock:
version = self._version_by_imsi_and_rule.get(key)
if version is None:
version = -1
return version
def remove(self, imsi: str, ip_addr: IPAddress, rule_id: str, version: int):
"""
Removed the element from redis if the passed version matches the
current one
"""
if ip_addr is None or ip_addr.address is None:
ip_addr_str = ""
else:
ip_addr_str = ip_addr.address.decode('utf-8').strip()
key = self._get_json_key(encode_imsi(imsi), ip_addr_str, rule_id)
with self._lock:
cur_version = self._version_by_imsi_and_rule.get(key)
if version is None:
return
if cur_version == version:
del self._version_by_imsi_and_rule[key]
def _get_json_key(self, imsi: str, ip_addr: str, rule_id: str):
return json.dumps(SubscriberRuleKey('imsi_rule', imsi, ip_addr,
rule_id))
class RuleIDDict(RedisFlatDict):
"""
RuleIDDict uses the RedisHashDict collection to store a mapping of
rule name to rule id.
Setting and deleting items in the dictionary syncs with Redis automatically
"""
_DICT_HASH = "pipelined:rule_ids"
def __init__(self):
client = get_default_client()
serde = RedisSerde(self._DICT_HASH, get_json_serializer(),
get_json_deserializer())
super().__init__(client, serde, writethrough=True)
def __missing__(self, key):
"""Instead of throwing a key error, return None when key not found"""
return None
class RuleNameDict(RedisHashDict):
"""
RuleNameDict uses the RedisHashDict collection to store a mapping of
rule id to rule name.
Setting and deleting items in the dictionary syncs with Redis automatically
"""
_DICT_HASH = "pipelined:rule_names"
def __init__(self):
client = get_default_client()
super().__init__(
client,
self._DICT_HASH,
get_json_serializer(), get_json_deserializer())
def __missing__(self, key):
"""Instead of throwing a key error, return None when key not found"""
return None
class RuleVersionDict(RedisFlatDict):
"""
RuleVersionDict uses the RedisHashDict collection to store a mapping of
subscriber+rule_id to rule version.
Setting and deleting items in the dictionary syncs with Redis automatically
"""
_DICT_HASH = "pipelined:rule_versions"
def __init__(self):
client = get_default_client()
serde = RedisSerde(self._DICT_HASH, get_json_serializer(),
get_json_deserializer())
super().__init__(client, serde, writethrough=True)
def __missing__(self, key):
"""Instead of throwing a key error, return None when key not found"""
return None
class RestartInfoStore(RedisHashDict):
"""
RuleVersionDict uses the RedisHashDict collection to store
latest ovs pid
Setting and deleting items in the dictionary syncs with Redis automatically
"""
_DICT_HASH = "pipelined:enforcement_stats_info"
def __init__(self):
client = get_default_client()
super().__init__(client, self._DICT_HASH,
get_json_serializer(), get_json_deserializer())
def __missing__(self, key):
"""Instead of throwing a key error, return 0 when key not found"""
return 0
| 35.215768
| 84
| 0.648875
|
acffc9a960a11ee590e2bb5f20ad0329c346a629
| 352
|
py
|
Python
|
401-500/401-410/406-queueReconstructionByHeight/queueReconstructionByHeight.py
|
xuychen/Leetcode
|
c8bf33af30569177c5276ffcd72a8d93ba4c402a
|
[
"MIT"
] | null | null | null |
401-500/401-410/406-queueReconstructionByHeight/queueReconstructionByHeight.py
|
xuychen/Leetcode
|
c8bf33af30569177c5276ffcd72a8d93ba4c402a
|
[
"MIT"
] | null | null | null |
401-500/401-410/406-queueReconstructionByHeight/queueReconstructionByHeight.py
|
xuychen/Leetcode
|
c8bf33af30569177c5276ffcd72a8d93ba4c402a
|
[
"MIT"
] | null | null | null |
class Solution(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
sorted_people = sorted(people, key=lambda x: (-x[0], x[1]))
result = []
for person in sorted_people:
result.insert(person[1], person)
return result
| 25.142857
| 67
| 0.542614
|
acffc9f0b0a57d1024d9539b0ee93b5c718deb8e
| 1,066
|
py
|
Python
|
scripts/sra_table_to_samples.py
|
stajichlab/PopGenomics_Afumigatus_Global
|
12e4dfab5df016142fc9845c908525cd3ae3de36
|
[
"MIT"
] | null | null | null |
scripts/sra_table_to_samples.py
|
stajichlab/PopGenomics_Afumigatus_Global
|
12e4dfab5df016142fc9845c908525cd3ae3de36
|
[
"MIT"
] | null | null | null |
scripts/sra_table_to_samples.py
|
stajichlab/PopGenomics_Afumigatus_Global
|
12e4dfab5df016142fc9845c908525cd3ae3de36
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import csv,sys,re
outdir="SRA"
with open("lib/SRA_samples.csv","rt") as inp, open("samples.csv","rt") as existing, open("samples.csv.2","w",newline="",encoding="utf-8") as outf:
outcsv = csv.writer(outf,delimiter=",")
csvin = csv.reader(inp,delimiter=",")
sampin = csv.reader(existing,delimiter=",")
table = {}
for row in sampin:
if row[0].startswith("Strain"):
continue
table[row[0]] = set(row[1].split(";"))
for row in csvin:
if row[0].startswith("RunAcc"):
continue
strain = row[1]
strain = re.sub(r' ','_',strain)
strain = re.sub(r'\/','-',strain)
srr = row[0]
srr = "{}/{}_[12].fastq.gz".format(outdir,srr)
if strain in table:
table[strain].add(srr)
else:
table[strain] = set([srr])
inp.close()
existing.close()
for d in sorted(table,key=lambda strain: ((next(iter(table[strain]))).split('/')[0],strain) ):
outcsv.writerow([d,";".join(sorted(table[d]))])
| 32.30303
| 146
| 0.550657
|
acffca4d555683675dbbb3ce2196f01ad76d01e9
| 4,942
|
py
|
Python
|
nuitka/codegen/templates/CodeTemplatesConstants.py
|
RESP3CT88/Nuitka
|
0fcc25d9f00c4fc78c79a863c4b7987f573962e1
|
[
"Apache-2.0"
] | null | null | null |
nuitka/codegen/templates/CodeTemplatesConstants.py
|
RESP3CT88/Nuitka
|
0fcc25d9f00c4fc78c79a863c4b7987f573962e1
|
[
"Apache-2.0"
] | null | null | null |
nuitka/codegen/templates/CodeTemplatesConstants.py
|
RESP3CT88/Nuitka
|
0fcc25d9f00c4fc78c79a863c4b7987f573962e1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Templates for the constants handling.
"""
template_constants_reading = r"""
#include "nuitka/prelude.h"
#include "structseq.h"
// Global constants storage
PyObject *global_constants[%(global_constants_count)d];
// Sentinel PyObject to be used for all our call iterator endings. It will
// become a PyCObject pointing to NULL. It's address is unique, and that's
// enough for us to use it as sentinel value.
PyObject *_sentinel_value = NULL;
PyObject *Nuitka_dunder_compiled_value = NULL;
#ifdef _NUITKA_STANDALONE
extern PyObject *getStandaloneSysExecutablePath(PyObject *basename);
#endif
static void _createGlobalConstants(void) {
// The empty name means global.
loadConstantsBlob(&global_constants[0], "", %(global_constants_count)d);
#if _NUITKA_EXE
/* Set the "sys.executable" path to the original CPython executable or point to inside the
distribution for standalone. */
PySys_SetObject(
(char *)"executable",
#ifndef _NUITKA_STANDALONE
%(sys_executable)s
#else
getStandaloneSysExecutablePath(%(sys_executable)s)
#endif
);
#ifndef _NUITKA_STANDALONE
/* Set the "sys.prefix" path to the original one. */
PySys_SetObject(
(char *)"prefix",
%(sys_prefix)s
);
/* Set the "sys.prefix" path to the original one. */
PySys_SetObject(
(char *)"exec_prefix",
%(sys_exec_prefix)s
);
#if PYTHON_VERSION >= 0x300
/* Set the "sys.base_prefix" path to the original one. */
PySys_SetObject(
(char *)"base_prefix",
%(sys_base_prefix)s
);
/* Set the "sys.exec_base_prefix" path to the original one. */
PySys_SetObject(
(char *)"base_exec_prefix",
%(sys_base_exec_prefix)s
);
#endif
#endif
#endif
static PyTypeObject Nuitka_VersionInfoType;
// Same fields as "sys.version_info" except no serial number.
static PyStructSequence_Field Nuitka_VersionInfoFields[] = {
{(char *)"major", (char *)"Major release number"},
{(char *)"minor", (char *)"Minor release number"},
{(char *)"micro", (char *)"Micro release number"},
{(char *)"releaselevel", (char *)"'alpha', 'beta', 'candidate', or 'release'"},
{0}
};
static PyStructSequence_Desc Nuitka_VersionInfoDesc = {
(char *)"__nuitka_version__", /* name */
(char *)"__compiled__\\n\\nVersion information as a named tuple.", /* doc */
Nuitka_VersionInfoFields, /* fields */
4
};
PyStructSequence_InitType(&Nuitka_VersionInfoType, &Nuitka_VersionInfoDesc);
Nuitka_dunder_compiled_value = PyStructSequence_New(&Nuitka_VersionInfoType);
assert(Nuitka_dunder_compiled_value != NULL);
PyStructSequence_SET_ITEM(Nuitka_dunder_compiled_value, 0, PyInt_FromLong(%(nuitka_version_major)s));
PyStructSequence_SET_ITEM(Nuitka_dunder_compiled_value, 1, PyInt_FromLong(%(nuitka_version_minor)s));
PyStructSequence_SET_ITEM(Nuitka_dunder_compiled_value, 2, PyInt_FromLong(%(nuitka_version_micro)s));
PyStructSequence_SET_ITEM(Nuitka_dunder_compiled_value, 3, Nuitka_String_FromString("%(nuitka_version_level)s"));
// Prevent users from creating the Nuitka version type object.
Nuitka_VersionInfoType.tp_init = NULL;
Nuitka_VersionInfoType.tp_new = NULL;
}
// In debug mode we can check that the constants were not tampered with in any
// given moment. We typically do it at program exit, but we can add extra calls
// for sanity.
#ifndef __NUITKA_NO_ASSERT__
void checkGlobalConstants(void) {
// TODO: Ask constant code to check values.
}
#endif
void createGlobalConstants(void) {
if (_sentinel_value == NULL) {
#if PYTHON_VERSION < 0x300
_sentinel_value = PyCObject_FromVoidPtr(NULL, NULL);
#else
// The NULL value is not allowed for a capsule, so use something else.
_sentinel_value = PyCapsule_New((void *)27, "sentinel", NULL);
#endif
assert(_sentinel_value);
_createGlobalConstants();
}
}
"""
from . import TemplateDebugWrapper # isort:skip
TemplateDebugWrapper.checkDebug(globals())
| 32.513158
| 117
| 0.694051
|
acffca8dc1bc939e9585c4cabc2a9c061e24d1f5
| 1,344
|
py
|
Python
|
omaha_server/crash/tests/test_managers.py
|
dentalwings/omaha-server
|
3d8e18c8f4aac4eb16445c0f3160ed1fc2fc8de5
|
[
"Apache-2.0"
] | 2
|
2019-06-13T20:47:18.000Z
|
2022-03-31T03:14:54.000Z
|
omaha_server/crash/tests/test_managers.py
|
dentalwings/omaha-server
|
3d8e18c8f4aac4eb16445c0f3160ed1fc2fc8de5
|
[
"Apache-2.0"
] | 1
|
2020-02-26T20:03:27.000Z
|
2020-02-26T20:03:27.000Z
|
omaha_server/crash/tests/test_managers.py
|
dentalwings/omaha-server
|
3d8e18c8f4aac4eb16445c0f3160ed1fc2fc8de5
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf8
"""
This software is licensed under the Apache 2 license, quoted below.
Copyright 2014 Crystalnix Limited
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
"""
from django.test import TestCase
from crash.models import Crash, Symbols
from crash.factories import CrashFactory, SymbolsFactory
class CrashManagerTest(TestCase):
def test_get_size(self):
archive_size = 10
minidump_size = 20
CrashFactory.create_batch(10, archive_size=archive_size, minidump_size=minidump_size)
size = Crash.objects.get_size()
self.assertEqual(size, (archive_size + minidump_size) * 10)
class SymbolsManagerTest(TestCase):
def test_get_size(self):
file_size = 42
SymbolsFactory.create_batch(10, file_size=file_size)
size = Symbols.objects.get_size()
self.assertEqual(size, file_size * 10)
| 32
| 93
| 0.751488
|
acffcb0326edecd277d5a5cbcdd43400e82907b5
| 997
|
py
|
Python
|
cardinal_pythonlib/sql/__init__.py
|
RudolfCardinal/pythonlib
|
4c583ad1aae3c1166a4e6f964df87eb6c02a73cb
|
[
"Apache-2.0"
] | 10
|
2015-09-30T02:46:48.000Z
|
2021-07-23T05:03:38.000Z
|
cardinal_pythonlib/sql/__init__.py
|
RudolfCardinal/pythonlib
|
4c583ad1aae3c1166a4e6f964df87eb6c02a73cb
|
[
"Apache-2.0"
] | 9
|
2019-07-04T11:10:31.000Z
|
2021-09-23T21:11:42.000Z
|
cardinal_pythonlib/sql/__init__.py
|
RudolfCardinal/pythonlib
|
4c583ad1aae3c1166a4e6f964df87eb6c02a73cb
|
[
"Apache-2.0"
] | 4
|
2017-07-17T15:17:44.000Z
|
2021-07-23T05:03:41.000Z
|
#!/usr/bin/env python
# cardinal_pythonlib/sql/__init__.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
The mere existence of this file makes Python treat the directory as a
package.
"""
| 34.37931
| 79
| 0.620863
|
acffcb533d398c77a18c9fb1813a132fa2e06665
| 5,311
|
py
|
Python
|
topi/python/topi/nn/pad.py
|
wix-playground/incubator-tvm
|
c9e2cc2c3daa8065257c76fce42d9c22e06ebb54
|
[
"Apache-2.0"
] | 286
|
2020-06-23T06:40:44.000Z
|
2022-03-30T01:27:49.000Z
|
topi/python/topi/nn/pad.py
|
SunicYosen/tvm-rram
|
99734d29b77ef88fe81b0fd0cb2b71db8dc2608e
|
[
"Apache-2.0"
] | 10
|
2020-07-31T03:26:59.000Z
|
2021-12-27T15:00:54.000Z
|
topi/python/topi/nn/pad.py
|
SunicYosen/tvm-rram
|
99734d29b77ef88fe81b0fd0cb2b71db8dc2608e
|
[
"Apache-2.0"
] | 30
|
2020-07-17T01:04:14.000Z
|
2021-12-27T14:05:19.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Pad the data by constant value """
from __future__ import absolute_import as _abs
import tvm
from ..util import equal_const_int
from .. import tag
@tvm.tag_scope(tag=tag.INJECTIVE+",pad")
def pad(data, pad_before, pad_after=None, pad_value=0.0, name="PadInput"):
"""Pad Input with zeros.
Parameters
----------
data : tvm.Tensor
n-D input, can be any layout.
pad_before : list / tuple of n ints
Pad width on each dimension to pad the before the axis begin.
pad_after : list / tuple of n ints, optional
Pad width each dimension to pad the after the axis end.
pad_value : float, optional
The value to be padded.
name : str, optional
The name prefix operators generated
Returns
-------
Output : tvm.Tensor
n-D, the same layout as Input.
"""
n = len(data.shape)
pad_after = pad_after if pad_after else pad_before
if len(pad_before) != n:
raise ValueError("Input dimension and pad_before dismatch : %d vs %d" % (
n, len(pad_before)))
if len(pad_after) != n:
raise ValueError("Input dimension and pad_after dismatch : %d vs %d" % (
n, len(pad_before)))
out_shape = tuple(
tvm.ir_pass.Simplify(
(data.shape[i] + pad_before[i] + pad_after[i])) for i in range(n))
pad_value = (pad_value if isinstance(pad_value, tvm.expr.Expr)
else tvm.const(pad_value, data.dtype))
def _pad(*indices):
not_zero = []
index_tuple = []
for i in range(n):
if equal_const_int(pad_before[i], 0) and equal_const_int(pad_after[i], 0):
index_tuple.append(indices[i])
else:
index_tuple.append(indices[i] - pad_before[i])
not_zero.append(indices[i] >= pad_before[i])
not_zero.append(indices[i] < data.shape[i] + pad_before[i])
if not_zero:
not_zero = tvm.all(*not_zero)
return tvm.if_then_else(not_zero, data(*index_tuple), pad_value)
return data(*index_tuple)
return tvm.compute(out_shape, _pad, name=name)
@tvm.tag_scope(tag=tag.INJECTIVE + ",pad")
def mirror_pad(data,
pad_before,
pad_after=None,
mode='SYMMETRIC',
name="MirrorPadInput"):
"""Pad Input with mirroring either symmetric or reflected.
Parameters
----------
data : tvm.Tensor
n-D input, can be any layout.
pad_before : list / tuple of n ints
Pad width on each dimension to pad the before the axis begin.
pad_after : list / tuple of n ints, optional
Pad width each dimension to pad the after the axis end.
mode: str, optional
Type of mirror padding to apply. Must be SYMMETRIC or REFLECT
name : str, optional
The name prefix operators generated
Returns
-------
Output : tvm.Tensor
n-D, the same layout as Input.
"""
n = len(data.shape)
pad_after = pad_after if pad_after else pad_before
if len(pad_before) != n:
raise ValueError("Input dimension and pad_before dismatch : %d vs %d" %
(n, len(pad_before)))
if len(pad_after) != n:
raise ValueError("Input dimension and pad_after dismatch : %d vs %d" %
(n, len(pad_before)))
out_shape = tuple(
tvm.ir_pass.Simplify((data.shape[i] + pad_before[i] + pad_after[i]))
for i in range(n))
assert mode in ('SYMMETRIC', 'REFLECT')
mode = int(mode == 'SYMMETRIC')
def _pad(*indices):
index_tuple = []
above = []
below = []
for i in range(n):
if equal_const_int(pad_before[i], 0) and equal_const_int(
pad_after[i], 0):
index_tuple.append(indices[i])
above.append(False)
below.append(False)
else:
index_tuple.append(indices[i] - pad_before[i])
above.append(indices[i] >= data.shape[i] + pad_before[i])
below.append(indices[i] < pad_before[i])
mapped_tuple = []
for i, axis in enumerate(index_tuple):
mapped_axis = tvm.if_then_else(below[i], -axis - mode, axis)
mapped_axis = tvm.if_then_else(
above[i], (2 * (data.shape[i] - 1)) - axis + mode, mapped_axis)
mapped_tuple.append(mapped_axis)
return data(*mapped_tuple)
return tvm.compute(out_shape, _pad, name=name)
| 36.376712
| 86
| 0.615703
|
acffcba1eec411a3d2965d2218e8595ee0cca912
| 116,857
|
py
|
Python
|
build/releases/release-0.492/ob/lisp-core.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 26
|
2019-02-01T19:12:21.000Z
|
2022-03-25T04:40:38.000Z
|
build/releases/release-0.492/ob/lisp-core.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 3
|
2019-10-29T17:49:19.000Z
|
2022-03-20T21:21:31.000Z
|
build/releases/release-0.492/ob/lisp-core.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 4
|
2019-02-02T16:50:48.000Z
|
2020-10-29T03:10:58.000Z
|
# -----------------------------------------------------------------------------
#
# Copyright 2013-2019 lispers.net - Dino Farinacci <farinacci@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -----------------------------------------------------------------------------
#
# lisp-core.py
#
# This is the core process that is used to demux to the specific LISP
# functional components. The 4342 listen socket is centralized here.
#
#
# +------------- data encapsulation via network --------------+
# | |
# | IPC when mr & ms colocated |
# | +--------------------------------+ |
# | | | |
# | | IPC when mr & ddt colo | |
# | | +------------+ | |
# | | | | | |
# | | | v v v 4341
# +-------------+ +----------+ +----------+ +----------+ +----------+
# | lisp-[ir]tr | | lisp-mr | | lisp-ddt | | lisp-ms | | lisp-etr |
# +-------------+ +----------+ +----------+ +----------+ +----------+
# ^ IPC ^ IPC ^ IPC ^ IPC ^ IPC
# | | | | |
# | | | | |
# | | | | |
# +--------------+--------------+--------------+--------------+
# |
# | for dispatching control messages
# +-----------+
# | lisp-core |
# +-----------+
# | 4342
# |
# via network
#
# -----------------------------------------------------------------------------
if 64 - 64: i11iIiiIii
import lisp
import lispconfig
import multiprocessing
import threading
import commands
import time
import os
import bottle
from cherrypy import wsgiserver
from cherrypy . wsgiserver . ssl_pyopenssl import pyOpenSSLAdapter
if 65 - 65: O0 / iIii1I11I1II1 % OoooooooOO - i1IIi
import json
import sys
import socket
import thread
if 73 - 73: II111iiii
if 22 - 22: I1IiiI * Oo0Ooo / OoO0O00 . OoOoOO00 . o0oOOo0O0Ooo / I1ii11iIi11i
if 48 - 48: oO0o / OOooOOo / I11i / Ii1I
if 48 - 48: iII111i % IiII + I1Ii111 / ooOoO0o * Ii1I
if 46 - 46: ooOoO0o * I11i - OoooooooOO
if 30 - 30: o0oOOo0O0Ooo - O0 % o0oOOo0O0Ooo - OoooooooOO * O0 * OoooooooOO
Oo0o = ""
if 60 - 60: I1ii11iIi11i + I1Ii111 - I11i / i1IIi
Ii1iI = None
Oo = None
I1Ii11I1Ii1i = None
Ooo = [ None , None , None ]
o0oOoO00o = None
if 43 - 43: Ii1I . oO0o
if 27 - 27: OoO0O00 - O0 . I1Ii111 * iII111i - I1ii11iIi11i
if 15 - 15: I1IiiI
if 90 - 90: IiII * i1IIi / Ii1I . OoO0O00 * oO0o
if 16 - 16: ooOoO0o * IiII % I11i . I1Ii111 / IiII % iII111i
if 27 - 27: IiII . i1IIi * OoOoOO00 % Ii1I / i1IIi
if 3 - 3: IiII / ooOoO0o
if 28 - 28: ooOoO0o + I1Ii111 - ooOoO0o . OoooooooOO
@ bottle . route ( '/lisp/api' , method = "get" )
@ bottle . route ( '/lisp/api/<command>' , method = "get" )
@ bottle . route ( '/lisp/api/<command>/<data_structure>' , method = "get" )
def oO0 ( command = "" , data_structure = "" ) :
IIIi1i1I = [ { "?" : [ { "?" : "not-auth" } ] } ]
if 72 - 72: Oo0Ooo % OOooOOo . I1IiiI / I11i * I1IiiI
if 31 - 31: II111iiii + OoO0O00 . I1Ii111
if 68 - 68: I1IiiI - i11iIiiIii - OoO0O00 / OOooOOo - OoO0O00 + i1IIi
if 48 - 48: OoooooooOO % o0oOOo0O0Ooo . I1IiiI - Ii1I % i1IIi % OoooooooOO
if ( bottle . request . auth != None ) :
i1iIIi1 , ii11iIi1I = bottle . request . auth
if ( lispconfig . lisp_find_user_account ( i1iIIi1 , ii11iIi1I ) == False ) :
return ( json . dumps ( IIIi1i1I ) )
if 6 - 6: OoOoOO00 * iII111i
else :
if ( bottle . request . headers [ "User-Agent" ] . find ( "python" ) != - 1 ) :
return ( json . dumps ( IIIi1i1I ) )
if 67 - 67: ooOoO0o - oO0o * o0oOOo0O0Ooo % o0oOOo0O0Ooo % I11i * OoOoOO00
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( json . dumps ( IIIi1i1I ) )
if 26 - 26: Ii1I - o0oOOo0O0Ooo
if 63 - 63: II111iiii . II111iiii
if 32 - 32: i1IIi . I11i % OoO0O00 . o0oOOo0O0Ooo
if 42 - 42: I1Ii111 + I1ii11iIi11i
if 70 - 70: Oo0Ooo % Oo0Ooo . IiII % OoO0O00 * o0oOOo0O0Ooo % oO0o
if 23 - 23: i11iIiiIii + I1IiiI
if 68 - 68: OoOoOO00 . oO0o . i11iIiiIii
if ( command == "data" and data_structure != "" ) :
II = bottle . request . body . readline ( )
IIIi1i1I = json . loads ( II ) if II != "" else ""
if ( IIIi1i1I != "" ) : IIIi1i1I = IIIi1i1I . values ( ) [ 0 ]
if ( IIIi1i1I == [ ] ) : IIIi1i1I = ""
if 14 - 14: Oo0Ooo . I1IiiI / Ii1I
if ( type ( IIIi1i1I ) == dict and type ( IIIi1i1I . values ( ) [ 0 ] ) == dict ) :
IIIi1i1I = IIIi1i1I . values ( ) [ 0 ]
if 38 - 38: II111iiii % i11iIiiIii . ooOoO0o - OOooOOo + Ii1I
if 66 - 66: OoooooooOO * OoooooooOO . OOooOOo . i1IIi - OOooOOo
IIIi1i1I = o0o00ooo0 ( data_structure , IIIi1i1I )
return ( IIIi1i1I )
if 96 - 96: O0 % oO0o % iIii1I11I1II1
if 78 - 78: iIii1I11I1II1 - Ii1I * OoO0O00 + o0oOOo0O0Ooo + iII111i + iII111i
if 11 - 11: iII111i - OoO0O00 % ooOoO0o % iII111i / OoOoOO00 - OoO0O00
if 74 - 74: iII111i * O0
if 89 - 89: oO0o + Oo0Ooo
if ( command != "" ) :
command = "lisp " + command
else :
II = bottle . request . body . readline ( )
if ( II == "" ) :
IIIi1i1I = [ { "?" : [ { "?" : "no-body" } ] } ]
return ( json . dumps ( IIIi1i1I ) )
if 3 - 3: i1IIi / I1IiiI % I11i * i11iIiiIii / O0 * I11i
if 49 - 49: oO0o % Ii1I + i1IIi . I1IiiI % I1ii11iIi11i
IIIi1i1I = json . loads ( II )
command = IIIi1i1I . keys ( ) [ 0 ]
if 48 - 48: I11i + I11i / II111iiii / iIii1I11I1II1
if 20 - 20: o0oOOo0O0Ooo
IIIi1i1I = lispconfig . lisp_get_clause_for_api ( command )
return ( json . dumps ( IIIi1i1I ) )
if 77 - 77: OoOoOO00 / I11i
if 98 - 98: iIii1I11I1II1 / i1IIi / i11iIiiIii / o0oOOo0O0Ooo
if 28 - 28: OOooOOo - IiII . IiII + OoOoOO00 - OoooooooOO + O0
if 95 - 95: OoO0O00 % oO0o . O0
if 15 - 15: ooOoO0o / Ii1I . Ii1I - i1IIi
if 53 - 53: IiII + I1IiiI * oO0o
if 61 - 61: i1IIi * OOooOOo / OoooooooOO . i11iIiiIii . OoOoOO00
def o00O ( ) :
IIIi1i1I = { }
IIIi1i1I [ "hostname" ] = socket . gethostname ( )
IIIi1i1I [ "system-uptime" ] = commands . getoutput ( "uptime" )
IIIi1i1I [ "lisp-uptime" ] = lisp . lisp_print_elapsed ( lisp . lisp_uptime )
IIIi1i1I [ "lisp-version" ] = lisp . lisp_version
if 69 - 69: oO0o % I1Ii111 - o0oOOo0O0Ooo + I1Ii111 - O0 % OoooooooOO
Iii111II = "yes" if os . path . exists ( "./logs/lisp-traceback.log" ) else "no"
IIIi1i1I [ "traceback-log" ] = Iii111II
if 9 - 9: OoO0O00
i11 = lisp . lisp_myrlocs [ 0 ]
O0oo0OO0oOOOo = lisp . lisp_myrlocs [ 1 ]
i11 = "none" if ( i11 == None ) else i11 . print_address_no_iid ( )
O0oo0OO0oOOOo = "none" if ( O0oo0OO0oOOOo == None ) else O0oo0OO0oOOOo . print_address_no_iid ( )
IIIi1i1I [ "lisp-rlocs" ] = [ i11 , O0oo0OO0oOOOo ]
return ( json . dumps ( IIIi1i1I ) )
if 35 - 35: IiII % I1IiiI
if 70 - 70: iII111i * I1ii11iIi11i
if 46 - 46: ooOoO0o / OoO0O00
if 52 - 52: o0oOOo0O0Ooo - OoooooooOO + Ii1I + Ii1I - o0oOOo0O0Ooo / I1Ii111
if 44 - 44: ooOoO0o . i1IIi - I1ii11iIi11i . O0 - ooOoO0o
if 92 - 92: iII111i . I11i + o0oOOo0O0Ooo
if 28 - 28: i1IIi * Oo0Ooo - o0oOOo0O0Ooo * IiII * Ii1I / OoO0O00
if 94 - 94: II111iiii % I1ii11iIi11i / OoOoOO00 * iIii1I11I1II1
if 54 - 54: o0oOOo0O0Ooo - I1IiiI + OoooooooOO
if 70 - 70: Ii1I / I11i . iII111i % Oo0Ooo
if 67 - 67: OoOoOO00 * o0oOOo0O0Ooo . IiII - OoO0O00 * o0oOOo0O0Ooo
if 46 - 46: OOooOOo + OoOoOO00 . I1IiiI * oO0o % IiII
if 86 - 86: I1IiiI + Ii1I % i11iIiiIii * oO0o . ooOoO0o * I11i
if 44 - 44: oO0o
if 88 - 88: I1Ii111 % Ii1I . II111iiii
def o0o00ooo0 ( data_structure , data ) :
iI1ii1Ii = [ "site-cache" , "map-cache" , "system" , "map-resolver" ,
"map-server" , "database-mapping" ]
if 92 - 92: OoOoOO00
if ( data_structure not in iI1ii1Ii ) : return ( json . dumps ( [ ] ) )
if 26 - 26: iII111i . I1Ii111
if 68 - 68: OoO0O00
if 35 - 35: OoO0O00 - iII111i / Oo0Ooo / OoOoOO00
if 24 - 24: ooOoO0o - ooOoO0o / II111iiii - I1ii11iIi11i
if ( data_structure == "system" ) : return ( o00O ( ) )
if 69 - 69: oO0o . I1Ii111 + Ii1I / Oo0Ooo - oO0o
if 63 - 63: OOooOOo % oO0o * oO0o * OoO0O00 / I1ii11iIi11i
if 74 - 74: II111iiii
if 75 - 75: o0oOOo0O0Ooo . ooOoO0o
if ( data != "" ) : data = json . dumps ( data )
Oo0O00Oo0o0 = lisp . lisp_api_ipc ( "lisp-core" , data_structure + "%" + data )
if 87 - 87: ooOoO0o * Oo0Ooo % i11iIiiIii % OoOoOO00 - OOooOOo
if ( data_structure in [ "map-cache" , "map-resolver" ] ) :
if ( lisp . lisp_is_running ( "lisp-rtr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , "lisp-rtr" )
elif ( lisp . lisp_is_running ( "lisp-itr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , "lisp-itr" )
else :
return ( json . dumps ( [ ] ) )
if 68 - 68: I1Ii111 % i1IIi . IiII . I1ii11iIi11i
if 92 - 92: iII111i . I1Ii111
if ( data_structure in [ "map-server" , "database-mapping" ] ) :
if ( lisp . lisp_is_running ( "lisp-etr" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , "lisp-etr" )
else :
return ( json . dumps ( [ ] ) )
if 31 - 31: I1Ii111 . OoOoOO00 / O0
if 89 - 89: OoOoOO00
if ( data_structure == "site-cache" ) :
if ( lisp . lisp_is_running ( "lisp-ms" ) ) :
lisp . lisp_ipc_lock . acquire ( )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , "lisp-ms" )
else :
return ( json . dumps ( [ ] ) )
if 68 - 68: OoO0O00 * OoooooooOO % O0 + OoO0O00 + ooOoO0o
if 4 - 4: ooOoO0o + O0 * OOooOOo
if 55 - 55: Oo0Ooo + iIii1I11I1II1 / OoOoOO00 * oO0o - i11iIiiIii - Ii1I
lisp . lprint ( "Waiting for api get-data '{}', parmameters: '{}'" . format ( data_structure , data ) )
if 25 - 25: I1ii11iIi11i
if 7 - 7: i1IIi / I1IiiI * I1Ii111 . IiII . iIii1I11I1II1
iIii , ooo0O , oOoO0o00OO0 , i1I1ii = lisp . lisp_receive ( Oo , True )
lisp . lisp_ipc_lock . release ( )
return ( i1I1ii )
if 61 - 61: II111iiii
if 64 - 64: ooOoO0o / OoOoOO00 - O0 - I11i
if 86 - 86: I11i % OoOoOO00 / I1IiiI / OoOoOO00
if 42 - 42: OoO0O00
if 67 - 67: I1Ii111 . iII111i . O0
if 10 - 10: I1ii11iIi11i % I1ii11iIi11i - iIii1I11I1II1 / OOooOOo + Ii1I
if 87 - 87: oO0o * I1ii11iIi11i + OOooOOo / iIii1I11I1II1 / iII111i
@ bottle . route ( '/lisp/api' , method = "put" )
@ bottle . route ( '/lisp/api/<command>' , method = "put" )
@ bottle . route ( '/lisp/api/<command>' , method = "delete" )
def I1111IIi ( command = "" ) :
IIIi1i1I = [ { "?" : [ { "?" : "not-auth" } ] } ]
if ( bottle . request . auth == None ) : return ( IIIi1i1I )
if 93 - 93: OoooooooOO / I1IiiI % i11iIiiIii + I1ii11iIi11i * OoO0O00
if 15 - 15: I11i . OoO0O00 / Oo0Ooo + I11i
if 78 - 78: O0 . oO0o . II111iiii % OOooOOo
if 49 - 49: Ii1I / OoO0O00 . II111iiii
if ( bottle . request . auth != None ) :
i1iIIi1 , ii11iIi1I = bottle . request . auth
if ( lispconfig . lisp_find_user_account ( i1iIIi1 , ii11iIi1I ) == False ) :
return ( json . dumps ( IIIi1i1I ) )
if 68 - 68: i11iIiiIii % I1ii11iIi11i + i11iIiiIii
else :
if ( bottle . request . headers [ "User-Agent" ] . find ( "python" ) != - 1 ) :
return ( json . dumps ( IIIi1i1I ) )
if 31 - 31: II111iiii . I1IiiI
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( json . dumps ( IIIi1i1I ) )
if 1 - 1: Oo0Ooo / o0oOOo0O0Ooo % iII111i * IiII . i11iIiiIii
if 2 - 2: I1ii11iIi11i * I11i - iIii1I11I1II1 + I1IiiI . oO0o % iII111i
if 92 - 92: iII111i
if 25 - 25: Oo0Ooo - I1IiiI / OoooooooOO / o0oOOo0O0Ooo
if 12 - 12: I1IiiI * iII111i % i1IIi % iIii1I11I1II1
if 20 - 20: OOooOOo % Ii1I / Ii1I + Ii1I
if 45 - 45: oO0o - IiII - OoooooooOO - OoO0O00 . II111iiii / O0
if ( command == "user-account" ) :
if ( lispconfig . lisp_is_user_superuser ( i1iIIi1 ) == False ) :
IIIi1i1I = [ { "user-account" : [ { "?" : "not-auth" } ] } ]
return ( json . dumps ( IIIi1i1I ) )
if 51 - 51: O0 + iII111i
if 8 - 8: oO0o * OoOoOO00 - Ii1I - OoO0O00 * OOooOOo % I1IiiI
if 48 - 48: O0
if 11 - 11: I11i + OoooooooOO - OoO0O00 / o0oOOo0O0Ooo + Oo0Ooo . II111iiii
if 41 - 41: Ii1I - O0 - O0
if 68 - 68: OOooOOo % I1Ii111
II = bottle . request . body . readline ( )
if ( II == "" ) :
IIIi1i1I = [ { "?" : [ { "?" : "no-body" } ] } ]
return ( json . dumps ( IIIi1i1I ) )
if 88 - 88: iIii1I11I1II1 - ooOoO0o + OOooOOo
if 40 - 40: I1IiiI * Ii1I + OOooOOo % iII111i
IIIi1i1I = json . loads ( II )
if ( command != "" ) :
command = "lisp " + command
else :
command = IIIi1i1I [ 0 ] . keys ( ) [ 0 ]
if 74 - 74: oO0o - Oo0Ooo + OoooooooOO + I1Ii111 / OoOoOO00
if 23 - 23: O0
if 85 - 85: Ii1I
if 84 - 84: I1IiiI . iIii1I11I1II1 % OoooooooOO + Ii1I % OoooooooOO % OoO0O00
if 42 - 42: OoO0O00 / I11i / o0oOOo0O0Ooo + iII111i / OoOoOO00
if 84 - 84: ooOoO0o * II111iiii + Oo0Ooo
lisp . lisp_ipc_lock . acquire ( )
if ( bottle . request . method == "DELETE" ) :
IIIi1i1I = lispconfig . lisp_remove_clause_for_api ( IIIi1i1I )
else :
IIIi1i1I = lispconfig . lisp_put_clause_for_api ( IIIi1i1I )
if 53 - 53: iII111i % II111iiii . IiII - iIii1I11I1II1 - IiII * II111iiii
lisp . lisp_ipc_lock . release ( )
return ( json . dumps ( IIIi1i1I ) )
if 77 - 77: iIii1I11I1II1 * OoO0O00
if 95 - 95: I1IiiI + i11iIiiIii
if 6 - 6: ooOoO0o / i11iIiiIii + iII111i * oO0o
if 80 - 80: II111iiii
if 83 - 83: I11i . i11iIiiIii + II111iiii . o0oOOo0O0Ooo * I11i
@ bottle . route ( '/lisp/show/api-doc' , method = "get" )
def oooO0 ( ) :
if ( os . path . exists ( "lispapi.py" ) ) : os . system ( "pydoc lispapi > lispapi.txt" )
if ( os . path . exists ( "lispapi.txt" ) == False ) :
return ( "lispapi.txt file not found" )
if 46 - 46: I1Ii111
return ( bottle . static_file ( "lispapi.txt" , root = "./" ) )
if 60 - 60: o0oOOo0O0Ooo
if 25 - 25: OoO0O00
if 62 - 62: OOooOOo + O0
if 98 - 98: o0oOOo0O0Ooo
if 51 - 51: Oo0Ooo - oO0o + II111iiii * Ii1I . I11i + oO0o
@ bottle . route ( '/lisp/show/command-doc' , method = "get" )
def OoO0o ( ) :
return ( bottle . static_file ( "lisp.config.example" , root = "./" ,
mimetype = "text/plain" ) )
if 78 - 78: oO0o % O0 % Ii1I
if 46 - 46: OoooooooOO . i11iIiiIii
if 94 - 94: o0oOOo0O0Ooo * Ii1I / Oo0Ooo / Ii1I
if 87 - 87: Oo0Ooo . IiII
if 75 - 75: ooOoO0o + OoOoOO00 + o0oOOo0O0Ooo * I11i % oO0o . iII111i
if 55 - 55: OOooOOo . I1IiiI
if 61 - 61: Oo0Ooo % IiII . Oo0Ooo
@ bottle . route ( '/lisp/show/lisp-xtr' , method = "get" )
def o0oOO000oO0oo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 77 - 77: Oo0Ooo - i1IIi - I11i . OoOoOO00
if 39 - 39: II111iiii / ooOoO0o + I1Ii111 / OoOoOO00
if 13 - 13: IiII + O0 + iII111i % I1IiiI / o0oOOo0O0Ooo . IiII
if 86 - 86: oO0o * o0oOOo0O0Ooo % i1IIi . Ii1I . i11iIiiIii
if 56 - 56: I1ii11iIi11i % O0 - I1IiiI
if 100 - 100: Ii1I - O0 % oO0o * OOooOOo + I1IiiI
if ( os . path . exists ( "./show-ztr" ) ) :
Oo0O0oooo = open ( "./show-ztr" , "r" ) ; I111iI = Oo0O0oooo . read ( ) ; Oo0O0oooo . close ( )
else :
Oo0O0oooo = open ( "./show-xtr" , "r" ) ; I111iI = Oo0O0oooo . read ( ) ; Oo0O0oooo . close ( )
if 56 - 56: I1IiiI
if 54 - 54: I1Ii111 / OOooOOo . oO0o % iII111i
OoO0OOOOo0O = ""
I111iI = I111iI . split ( "\n" )
for OooOO in I111iI :
if ( OooOO [ 0 : 4 ] == " " ) : OoO0OOOOo0O += lisp . lisp_space ( 4 )
if ( OooOO [ 0 : 2 ] == " " ) : OoO0OOOOo0O += lisp . lisp_space ( 2 )
OoO0OOOOo0O += OooOO + "<br>"
if 21 - 21: I11i / IiII % iIii1I11I1II1 * Oo0Ooo
OoO0OOOOo0O = lisp . convert_font ( OoO0OOOOo0O )
return ( lisp . lisp_print_sans ( OoO0OOOOo0O ) )
if 57 - 57: II111iiii + i1IIi
if 10 - 10: oO0o + i1IIi
if 87 - 87: I1IiiI
if 58 - 58: OoOoOO00 % o0oOOo0O0Ooo
if 50 - 50: I1Ii111 . o0oOOo0O0Ooo
if 97 - 97: O0 + OoOoOO00
if 89 - 89: o0oOOo0O0Ooo + OoO0O00 * I11i * Ii1I
@ bottle . route ( '/lisp/show/<xtr>/keys' , method = "get" )
def iiIiI1i1 ( xtr ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 69 - 69: ooOoO0o
I11iII = lispconfig . lisp_is_user_superuser ( None )
if 5 - 5: I1IiiI
if ( I11iII == False ) :
i1I1ii = "Permission denied"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 48 - 48: o0oOOo0O0Ooo - oO0o / OoooooooOO
if 100 - 100: I1IiiI / o0oOOo0O0Ooo % II111iiii % Oo0Ooo % OOooOOo
if ( xtr not in [ "itr" , "etr" , "rtr" ] ) :
i1I1ii = "Invalid URL"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 98 - 98: I11i % i11iIiiIii % ooOoO0o + Ii1I
OOoOO0o0o0 = "show {}-keys" . format ( xtr )
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 11 - 11: I1IiiI
if 16 - 16: Ii1I + IiII * O0 % i1IIi . I1IiiI
if 67 - 67: OoooooooOO / I1IiiI * Ii1I + I11i
if 65 - 65: OoooooooOO - I1ii11iIi11i / ooOoO0o / II111iiii / i1IIi
if 71 - 71: I1Ii111 + Ii1I
if 28 - 28: OOooOOo
if 38 - 38: ooOoO0o % II111iiii % I11i / OoO0O00 + OoOoOO00 / i1IIi
if 54 - 54: iIii1I11I1II1 % I1ii11iIi11i - OOooOOo / oO0o - OoO0O00 . I11i
@ bottle . route ( '/lisp/geo-map/<geo_prefix>' )
def IIo0Oo0oO0oOO00 ( geo_prefix ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 92 - 92: OoooooooOO * I1Ii111
if 100 - 100: I1Ii111 + I1Ii111 * IiII
geo_prefix = geo_prefix . split ( "-" )
geo_prefix = "-" . join ( geo_prefix [ 0 : - 1 ] ) + "/" + geo_prefix [ - 1 ]
I1i = lisp . lisp_geo ( "" )
I1i . parse_geo_string ( geo_prefix )
O00Oooo , i11I = I1i . dms_to_decimal ( )
o00Oo0oooooo = I1i . radius * 1000
if 76 - 76: I11i / OOooOOo . O0 % I1IiiI . o0oOOo0O0Ooo + IiII
o0o = open ( "./lispers.net-geo.html" , "r" ) ; oo0 = o0o . read ( ) ; o0o . close ( )
oo0 = oo0 . replace ( "$LAT" , str ( O00Oooo ) )
oo0 = oo0 . replace ( "$LON" , str ( i11I ) )
oo0 = oo0 . replace ( "$RADIUS" , str ( o00Oo0oooooo ) )
return ( oo0 )
if 61 - 61: OoOoOO00 - OOooOOo - i1IIi
if 25 - 25: O0 * I11i + I1ii11iIi11i . o0oOOo0O0Ooo . o0oOOo0O0Ooo
if 58 - 58: I1IiiI
if 53 - 53: i1IIi
if 59 - 59: o0oOOo0O0Ooo
if 81 - 81: OoOoOO00 - OoOoOO00 . iII111i
if 73 - 73: I11i % i11iIiiIii - I1IiiI
@ bottle . route ( '/lisp/login' , method = "get" )
def oOO00O ( ) :
return ( lispconfig . lisp_login_page ( ) )
if 7 - 7: O0 * i11iIiiIii * Ii1I + ooOoO0o % OoO0O00 - ooOoO0o
if 39 - 39: Oo0Ooo * OOooOOo % OOooOOo - OoooooooOO + o0oOOo0O0Ooo - I11i
if 23 - 23: i11iIiiIii
if 30 - 30: o0oOOo0O0Ooo - i1IIi % II111iiii + I11i * iIii1I11I1II1
if 81 - 81: IiII % i1IIi . iIii1I11I1II1
if 4 - 4: i11iIiiIii % OoO0O00 % i1IIi / IiII
if 6 - 6: iII111i / I1IiiI % OOooOOo - I1IiiI
if 31 - 31: OOooOOo
@ bottle . route ( '/lisp/login' , method = "post" )
def i1 ( ) :
if ( lispconfig . lisp_validate_user ( ) ) :
return ( lispconfig . lisp_landing_page ( ) )
if 88 - 88: OoO0O00 - ooOoO0o + OOooOOo * I1IiiI % iIii1I11I1II1 + Oo0Ooo
return ( oOO00O ( ) )
if 76 - 76: I1IiiI * iII111i % I1Ii111
if 57 - 57: iIii1I11I1II1 - i1IIi / I1Ii111 - O0 * OoooooooOO % II111iiii
if 68 - 68: OoooooooOO * I11i % OoOoOO00 - IiII
if 34 - 34: I1Ii111 . iIii1I11I1II1 * OoOoOO00 * oO0o / I1Ii111 / I1ii11iIi11i
if 78 - 78: Oo0Ooo - o0oOOo0O0Ooo / OoOoOO00
if 10 - 10: iII111i + Oo0Ooo * I1ii11iIi11i + iIii1I11I1II1 / I1Ii111 / I1ii11iIi11i
if 42 - 42: I1IiiI
@ bottle . route ( '/lisp' )
def II1i11I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 50 - 50: OoooooooOO % I11i
return ( lispconfig . lisp_landing_page ( ) )
if 49 - 49: oO0o - i11iIiiIii . I1Ii111 * Ii1I % iII111i + i1IIi
if 71 - 71: o0oOOo0O0Ooo
if 38 - 38: oO0o % OoOoOO00 + I1ii11iIi11i . i11iIiiIii
if 53 - 53: i11iIiiIii * iII111i
if 68 - 68: iIii1I11I1II1 * iIii1I11I1II1 . o0oOOo0O0Ooo / II111iiii % Oo0Ooo
if 38 - 38: ooOoO0o - OOooOOo / iII111i
if 66 - 66: O0 % I1ii11iIi11i + i11iIiiIii . OoOoOO00 / Ii1I + I1ii11iIi11i
@ bottle . route ( '/lisp/traceback' )
def ooo00Ooo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 93 - 93: i11iIiiIii - I1IiiI * I1ii11iIi11i * I11i % O0 + OoooooooOO
if 25 - 25: IiII + Ii1I / ooOoO0o . o0oOOo0O0Ooo % O0 * OoO0O00
o0O0oo0OO0O = True
if 68 - 68: oO0o . I11i % OoooooooOO . I11i
if 64 - 64: iIii1I11I1II1 / I1IiiI . II111iiii + OoooooooOO . OoO0O00
if 56 - 56: Oo0Ooo . I1ii11iIi11i . I1IiiI
if 39 - 39: O0 + I1Ii111
if ( os . path . exists ( "./logs/lisp-traceback.log" ) ) :
i1I1ii = commands . getoutput ( "cat ./logs/lisp-traceback.log" )
if ( i1I1ii ) :
i1I1ii = i1I1ii . replace ( "----------" , "<b>----------</b>" )
i1I1ii = i1I1ii . replace ( "\n" , "<br>" )
o0O0oo0OO0O = False
if 91 - 91: OoooooooOO - iIii1I11I1II1 + OoOoOO00 / OoO0O00 . OoOoOO00 + O0
if 26 - 26: I1ii11iIi11i - OoooooooOO
if 11 - 11: I1IiiI * oO0o
if 81 - 81: iII111i + IiII
if 98 - 98: I1IiiI
if 95 - 95: ooOoO0o / ooOoO0o
if ( o0O0oo0OO0O ) :
i1I1ii = ""
IIiI1Ii = "egrep --with-filename Traceback ./logs/*.log"
O0O0O0Oo = commands . getoutput ( IIiI1Ii )
for OOOOoO00o0O in O0O0O0Oo :
if ( OOOOoO00o0O . find ( ":" ) == - 1 ) : continue
OooOO = OOOOoO00o0O . split ( ":" )
if ( OooOO [ 1 ] == "0" ) : continue
i1I1ii += "Found Tracebacks in log file {}<br>" . format ( OooOO [ 0 ] )
o0O0oo0OO0O = False
if 41 - 41: OOooOOo * Ii1I - IiII + o0oOOo0O0Ooo
i1I1ii = i1I1ii [ 0 : - 4 ]
if 64 - 64: Ii1I
if 66 - 66: i11iIiiIii - OOooOOo * Oo0Ooo
if ( o0O0oo0OO0O ) :
i1I1ii = "No Tracebacks found - a stable system is a happy system"
if 76 - 76: i11iIiiIii + o0oOOo0O0Ooo / I1ii11iIi11i - OoO0O00 - Ii1I + I1ii11iIi11i
if 51 - 51: iIii1I11I1II1 . ooOoO0o + iIii1I11I1II1
i1I1ii = lisp . lisp_print_cour ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 95 - 95: I1IiiI
if 46 - 46: OoOoOO00 + OoO0O00
if 70 - 70: iII111i / iIii1I11I1II1
if 85 - 85: OoooooooOO % i1IIi * OoooooooOO / I1ii11iIi11i
if 96 - 96: OoooooooOO + oO0o
if 44 - 44: oO0o
if 20 - 20: I11i + Ii1I / O0 % iIii1I11I1II1
@ bottle . route ( '/lisp/show/not-supported' )
def oOo0O ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 64 - 64: I1ii11iIi11i - iII111i + iII111i - I11i
return ( lispconfig . lisp_not_supported ( ) )
if 30 - 30: iIii1I11I1II1 . I1IiiI . OOooOOo / o0oOOo0O0Ooo
if 42 - 42: Oo0Ooo
if 19 - 19: oO0o % I1ii11iIi11i * iIii1I11I1II1 + I1IiiI
if 46 - 46: Oo0Ooo
if 1 - 1: iII111i
if 97 - 97: OOooOOo + iII111i + O0 + i11iIiiIii
if 77 - 77: o0oOOo0O0Ooo / OoooooooOO
@ bottle . route ( '/lisp/show/status' )
def IIii11I1i1I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 99 - 99: iII111i
if 76 - 76: OoO0O00 * I1IiiI
if 82 - 82: Ii1I * iII111i / I1ii11iIi11i
if 36 - 36: OoooooooOO - i1IIi . O0 / II111iiii + o0oOOo0O0Ooo
if 33 - 33: II111iiii / ooOoO0o * O0 % Ii1I * I1Ii111
i1I1ii = ""
I11iII = lispconfig . lisp_is_user_superuser ( None )
if ( I11iII ) :
O0o = lisp . lisp_button ( "show configuration" , "/lisp/show/conf" )
O0OOoOOO0oO = lisp . lisp_button ( "show configuration diff" , "/lisp/show/diff" )
I1ii11 = lisp . lisp_button ( "archive configuration" , "/lisp/archive/conf" )
oOoOoOoo0 = lisp . lisp_button ( "clear configuration" , "/lisp/clear/conf/verify" )
OOOOoO00o0O = lisp . lisp_button ( "log flows" , "/lisp/log/flows" )
III1ii1I = lisp . lisp_button ( "install LISP software" , "/lisp/install/image" )
Ii1i1iI = lisp . lisp_button ( "restart LISP subsystem" , "/lisp/restart/verify" )
if 16 - 16: OOooOOo / Oo0Ooo / OoooooooOO * I1IiiI + i1IIi % OOooOOo
i1I1ii = "<center>{}{}{}{}{}{}{}</center><hr>" . format ( O0o , O0OOoOOO0oO , I1ii11 , oOoOoOoo0 ,
OOOOoO00o0O , III1ii1I , Ii1i1iI )
if 71 - 71: OoOoOO00
if 14 - 14: i11iIiiIii % OOooOOo
OooO0oo = commands . getoutput ( "uptime" )
o0o0oOoOO0O = commands . getoutput ( "uname -pv" )
i1ii1II1ii = lisp . lisp_version . replace ( "+" , "" )
if 28 - 28: I1ii11iIi11i
if 61 - 61: OOooOOo % OOooOOo * o0oOOo0O0Ooo / o0oOOo0O0Ooo
if 75 - 75: IiII . ooOoO0o
if 50 - 50: OoOoOO00
if 60 - 60: ooOoO0o * iIii1I11I1II1 * I1ii11iIi11i * Oo0Ooo
O0ooooo0OOOO0 = multiprocessing . cpu_count ( )
if 9 - 9: II111iiii - o0oOOo0O0Ooo / iII111i / o0oOOo0O0Ooo
I1i111iiIIIi = OooO0oo . find ( ", load" )
OooO0oo = OooO0oo [ 0 : I1i111iiIIIi ]
O00 = lisp . lisp_print_elapsed ( lisp . lisp_uptime )
if 17 - 17: Ii1I - OoooooooOO % Ii1I . IiII / i11iIiiIii % iII111i
iIiIIIIIii = "Not available"
if 58 - 58: o0oOOo0O0Ooo / IiII . OoOoOO00 / OoooooooOO + I1Ii111
if 86 - 86: I11i * I1IiiI + I11i + II111iiii
if 8 - 8: I1Ii111 - iII111i / ooOoO0o
if 96 - 96: OoOoOO00
OOoOO0o0o0 = "ps auww" if lisp . lisp_is_macos ( ) else "ps aux"
IIiiI = commands . getoutput ( "{} | egrep 'PID|python lisp|python -O lisp' | egrep -v grep" . format ( OOoOO0o0o0 ) )
if 31 - 31: I1ii11iIi11i + Ii1I + I1Ii111 / Ii1I
if 25 - 25: OoO0O00
IIiiI = IIiiI . replace ( " " , lisp . space ( 1 ) )
IIiiI = IIiiI . replace ( "\n" , "<br>" )
if 24 - 24: IiII * i11iIiiIii * OOooOOo
if 85 - 85: o0oOOo0O0Ooo . OoOoOO00 / ooOoO0o . O0 % I1Ii111
if 90 - 90: Oo0Ooo % O0 * iIii1I11I1II1 . iII111i
if 8 - 8: ooOoO0o + II111iiii / iII111i / I11i
if ( o0o0oOoOO0O . find ( "Darwin" ) != - 1 ) :
O0ooooo0OOOO0 = O0ooooo0OOOO0 / 2
iIiIIIIIii = commands . getoutput ( "top -l 1 | head -50" )
iIiIIIIIii = iIiIIIIIii . split ( "PID" )
iIiIIIIIii = iIiIIIIIii [ 0 ]
if 74 - 74: O0 / i1IIi
if 78 - 78: OoooooooOO . OoO0O00 + ooOoO0o - i1IIi
if 31 - 31: OoooooooOO . OOooOOo
if 83 - 83: iII111i . O0 / Oo0Ooo / OOooOOo - II111iiii
if 100 - 100: OoO0O00
I1i111iiIIIi = iIiIIIIIii . find ( "Load Avg" )
II1i = iIiIIIIIii [ 0 : I1i111iiIIIi ] . find ( "threads" )
Ii1IIIIi1ii1I = iIiIIIIIii [ 0 : II1i + 7 ]
iIiIIIIIii = Ii1IIIIi1ii1I + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "CPU usage" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "SharedLibs:" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "MemRegions" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "PhysMem" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "VM:" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "Networks" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
I1i111iiIIIi = iIiIIIIIii . find ( "Disks" )
iIiIIIIIii = iIiIIIIIii [ 0 : I1i111iiIIIi ] + "<br>" + iIiIIIIIii [ I1i111iiIIIi : : ]
else :
if 13 - 13: I1IiiI % OoOoOO00 . I1ii11iIi11i / Oo0Ooo % OOooOOo . OoooooooOO
if 22 - 22: IiII / i11iIiiIii
if 62 - 62: OoO0O00 / I1ii11iIi11i
if 7 - 7: OoooooooOO . IiII
I111iI = commands . getoutput ( "top -b -n 1 | head -50" )
I111iI = I111iI . split ( "PID" )
I111iI [ 1 ] = I111iI [ 1 ] . replace ( " " , lisp . space ( 1 ) )
I111iI = I111iI [ 0 ] + I111iI [ 1 ]
iIiIIIIIii = I111iI . replace ( "\n" , "<br>" )
if 53 - 53: Ii1I % Ii1I * o0oOOo0O0Ooo + OoOoOO00
if 92 - 92: OoooooooOO + i1IIi / Ii1I * O0
O00oOo00o0o = commands . getoutput ( "cat release-notes.txt" )
O00oOo00o0o = O00oOo00o0o . replace ( "\n" , "<br>" )
if 85 - 85: iII111i + OoooooooOO * iII111i - I1Ii111 % i11iIiiIii
i1I1ii += '''
<br><table align="center" border="1" cellspacing="3x" cellpadding="5x">
<tr>
<td width="20%"><i>LISP Subsystem Version:<br>
LISP Release {} Build Date:</i></td>
<td width="80%"><font face="Courier New">{}<br>
{}</font></td>
</tr>
<tr>
<td width="20%"><i>LISP Subsystem Uptime:<br>System Uptime:</i></td>
<td width="80%"><font face="Courier New">{}<br>
{}</font></td>
</tr>
<tr>
<td width="20%"><i>System Architecture:<br>
Number of CPUs:<font face="Courier New">{}{}</font></td>
<td width="80%"><font face="Courier New">{}</font></td>
</tr>
<tr>
<td width="20%" valign="top"><i>LISP Process Status:</i></td>
<td width="80%">
<div style="height: 100px; overflow: auto">
<font size="2" face="Courier New">{}</font></div></td>
</tr>
<tr>
<td width="20%" valign="top"><i>System Resource Utilization:</i></td>
<td width="80%">
<div style="height: 200px; overflow: auto">
<font face="Courier New">{}</font></td>
</tr>
<tr>
<td width="20%" valign="top"><i>Release Notes:</i></td>
<td width="80%">
<div style="height: 300px; overflow: auto">
<font size="2" face="Courier New">{}</font></div></td>
</tr>
</table>
''' . format ( i1ii1II1ii , lisp . lisp_version , Oo0o , O00 ,
OooO0oo , lisp . lisp_space ( 1 ) , O0ooooo0OOOO0 , o0o0oOoOO0O , IIiiI , iIiIIIIIii ,
O00oOo00o0o )
if 71 - 71: I1ii11iIi11i - ooOoO0o / OoOoOO00 * OoOoOO00 / i1IIi . i1IIi
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 53 - 53: I1Ii111
if 21 - 21: I11i
if 92 - 92: i11iIiiIii / I1Ii111 - iII111i % ooOoO0o * I1Ii111 + Oo0Ooo
if 11 - 11: OoooooooOO . I1Ii111
if 80 - 80: OoooooooOO - OOooOOo * Ii1I * I1ii11iIi11i / I1IiiI / OOooOOo
if 13 - 13: I1Ii111 * ooOoO0o + i11iIiiIii * I1Ii111 - ooOoO0o
if 23 - 23: iIii1I11I1II1 * i1IIi % OoooooooOO * IiII
@ bottle . route ( '/lisp/show/conf' )
def I1Iiiiiii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 39 - 39: IiII * Oo0Ooo + iIii1I11I1II1 - IiII + OOooOOo
return ( bottle . static_file ( "lisp.config" , root = "./" , mimetype = "text/plain" ) )
if 69 - 69: O0
if 85 - 85: ooOoO0o / O0
if 18 - 18: o0oOOo0O0Ooo % O0 * I1ii11iIi11i
if 62 - 62: I1Ii111 . IiII . OoooooooOO
if 11 - 11: OOooOOo / I11i
if 73 - 73: i1IIi / i11iIiiIii
if 58 - 58: Oo0Ooo . II111iiii + oO0o - i11iIiiIii / II111iiii / O0
@ bottle . route ( '/lisp/show/diff' )
def oOOoOo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 89 - 89: II111iiii + i1IIi + II111iiii
return ( bottle . static_file ( "lisp.config.diff" , root = "./" ,
mimetype = "text/plain" ) )
if 7 - 7: O0 % o0oOOo0O0Ooo + I1ii11iIi11i * iII111i - iII111i
if 42 - 42: OoOoOO00 * OoOoOO00 * I1Ii111 . I11i
if 51 - 51: OOooOOo % iIii1I11I1II1 - OoooooooOO % ooOoO0o * iIii1I11I1II1 % OoO0O00
if 99 - 99: oO0o * II111iiii * I1Ii111
if 92 - 92: Oo0Ooo
if 40 - 40: OoOoOO00 / IiII
if 79 - 79: OoO0O00 - iIii1I11I1II1 + Ii1I - I1Ii111
@ bottle . route ( '/lisp/archive/conf' )
def OoO ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 35 - 35: OoOoOO00 + i11iIiiIii - II111iiii
if 15 - 15: i11iIiiIii % I1IiiI * I11i / I1Ii111
lisp . lisp_ipc_lock . acquire ( )
os . system ( "cp ./lisp.config ./lisp.config.archive" )
lisp . lisp_ipc_lock . release ( )
if 90 - 90: iII111i
i1I1ii = "Configuration file saved to "
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
i1I1ii += lisp . lisp_print_cour ( "./lisp.config.archive" )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 31 - 31: OOooOOo + O0
if 87 - 87: ooOoO0o
if 45 - 45: OoO0O00 / OoooooooOO - iII111i / Ii1I % IiII
if 83 - 83: I1IiiI . iIii1I11I1II1 - IiII * i11iIiiIii
if 20 - 20: i1IIi * I1Ii111 + II111iiii % o0oOOo0O0Ooo % oO0o
if 13 - 13: Oo0Ooo
if 60 - 60: I1ii11iIi11i * I1IiiI
@ bottle . route ( '/lisp/clear/conf' )
def I1iIiI11I1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 27 - 27: Ii1I . i11iIiiIii % I1Ii111
if 65 - 65: II111iiii . I1IiiI % oO0o * OoO0O00
os . system ( "cp ./lisp.config ./lisp.config.before-clear" )
lisp . lisp_ipc_lock . acquire ( )
iI11I ( )
lisp . lisp_ipc_lock . release ( )
if 11 - 11: iII111i - oO0o + II111iiii - iIii1I11I1II1
i1I1ii = "Configuration cleared, a backup copy is stored in "
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
i1I1ii += lisp . lisp_print_cour ( "./lisp.config.before-clear" )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 7 - 7: IiII - I11i / II111iiii * Ii1I . iII111i * iII111i
if 61 - 61: I11i % ooOoO0o - OoO0O00 / Oo0Ooo
if 4 - 4: OoooooooOO - i1IIi % Ii1I - OOooOOo * o0oOOo0O0Ooo
if 85 - 85: OoooooooOO * iIii1I11I1II1 . iII111i / OoooooooOO % I1IiiI % O0
if 36 - 36: Ii1I / II111iiii / IiII / IiII + I1ii11iIi11i
if 95 - 95: IiII
if 51 - 51: II111iiii + IiII . i1IIi . I1ii11iIi11i + OoOoOO00 * I1IiiI
@ bottle . route ( '/lisp/clear/conf/verify' )
def OOoOoo0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 17 - 17: Ii1I + oO0o . OoO0O00 - Oo0Ooo * i11iIiiIii
if 20 - 20: I1IiiI . OoooooooOO % OOooOOo
i1I1ii = "<br>Are you sure you want to clear the configuration?"
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
if 63 - 63: I1IiiI % iIii1I11I1II1
I1ii = lisp . lisp_button ( "yes" , "/lisp/clear/conf" )
O00O0O = lisp . lisp_button ( "cancel" , "/lisp" )
i1I1ii += I1ii + O00O0O + "<br>"
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 19 - 19: OoO0O00 * I11i / I11i . OoooooooOO - OOooOOo + i11iIiiIii
if 88 - 88: i11iIiiIii - ooOoO0o
if 67 - 67: OOooOOo . Oo0Ooo + OoOoOO00 - OoooooooOO
if 70 - 70: OOooOOo / II111iiii - iIii1I11I1II1 - iII111i
if 11 - 11: iIii1I11I1II1 . OoooooooOO . II111iiii / i1IIi - I11i
if 30 - 30: OoOoOO00
if 21 - 21: i11iIiiIii / I1Ii111 % OOooOOo * O0 . I11i - iIii1I11I1II1
if 26 - 26: II111iiii * OoOoOO00
if 10 - 10: II111iiii . iII111i
def I1iOOOO ( ) :
oOoO0o00OO0 = ""
if 88 - 88: iII111i
for iiI11I1i1i1iI in [ "443" , "-8080" , "8080" ] :
OoOOo000o0 = 'ps auxww | egrep "lisp-core.pyo {}" | egrep -v grep' . format ( iiI11I1i1i1iI )
i1I1ii = commands . getoutput ( OoOOo000o0 )
if ( i1I1ii == "" ) : continue
if 12 - 12: II111iiii . I11i / OOooOOo
i1I1ii = i1I1ii . split ( "\n" ) [ 0 ]
i1I1ii = i1I1ii . split ( " " )
if ( i1I1ii [ - 2 ] == "lisp-core.pyo" and i1I1ii [ - 1 ] == iiI11I1i1i1iI ) : oOoO0o00OO0 = iiI11I1i1i1iI
break
if 77 - 77: ooOoO0o - I1IiiI % I11i - O0
return ( oOoO0o00OO0 )
if 67 - 67: OOooOOo + Oo0Ooo
if 84 - 84: O0 * OoooooooOO - IiII * IiII
if 8 - 8: ooOoO0o / i1IIi . oO0o
if 41 - 41: iII111i + OoO0O00
if 86 - 86: OoOoOO00 . iIii1I11I1II1 - OoO0O00
if 56 - 56: O0
if 61 - 61: o0oOOo0O0Ooo / OOooOOo / Oo0Ooo * O0
@ bottle . route ( '/lisp/restart' )
def iIII1i1i ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 35 - 35: II111iiii * I11i - OoooooooOO . I11i . I11i
if 11 - 11: I1Ii111 / OoOoOO00 + I11i % iIii1I11I1II1
if 42 - 42: I1ii11iIi11i * OoOoOO00 % ooOoO0o - OoOoOO00 . i11iIiiIii - I1Ii111
if 84 - 84: I1Ii111 - I1ii11iIi11i / I11i
if 13 - 13: IiII - Oo0Ooo - ooOoO0o
if 92 - 92: ooOoO0o / OoOoOO00 * OoO0O00 . I11i % II111iiii
OooOO = commands . getoutput ( "egrep requiretty /etc/sudoers" ) . split ( " " )
if ( OooOO [ - 1 ] == "requiretty" and OooOO [ 0 ] == "Defaults" ) :
i1I1ii = "Need to remove 'requiretty' from /etc/sudoers"
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 71 - 71: I1Ii111 % i1IIi - II111iiii - OOooOOo + OOooOOo * ooOoO0o
if 51 - 51: iIii1I11I1II1 / OoOoOO00 + OOooOOo - I11i + iII111i
lisp . lprint ( lisp . bold ( "LISP subsystem restart request received" , False ) )
if 29 - 29: o0oOOo0O0Ooo % iIii1I11I1II1 . OoooooooOO % OoooooooOO % II111iiii / iII111i
if 70 - 70: i11iIiiIii % iII111i
if 11 - 11: IiII % I1ii11iIi11i % Ii1I / II111iiii % I1Ii111 - Oo0Ooo
if 96 - 96: I1ii11iIi11i / II111iiii . Ii1I - iII111i * I11i * oO0o
if 76 - 76: Ii1I - II111iiii * OOooOOo / OoooooooOO
oOoO0o00OO0 = I1iOOOO ( )
if 18 - 18: OoO0O00 + iIii1I11I1II1 - II111iiii - I1IiiI
if 71 - 71: OoooooooOO
if 33 - 33: I1Ii111
if 62 - 62: I1ii11iIi11i + Ii1I + i1IIi / OoooooooOO
OoOOo000o0 = "sleep 1; sudo ./RESTART-LISP {}" . format ( oOoO0o00OO0 )
thread . start_new_thread ( os . system , ( OoOOo000o0 , ) )
if 7 - 7: o0oOOo0O0Ooo + i1IIi . I1IiiI / Oo0Ooo
i1I1ii = lisp . lisp_print_sans ( "Restarting LISP subsystem ..." )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 22 - 22: ooOoO0o - ooOoO0o % OOooOOo . I1Ii111 + oO0o
if 63 - 63: I1IiiI % I1Ii111 * o0oOOo0O0Ooo + I1Ii111 / Oo0Ooo % iII111i
if 45 - 45: IiII
if 20 - 20: OoooooooOO * o0oOOo0O0Ooo * O0 . OOooOOo
if 78 - 78: iIii1I11I1II1 + I11i - Ii1I * I1Ii111 - OoooooooOO % OoOoOO00
if 34 - 34: O0
if 80 - 80: i1IIi - Oo0Ooo / OoO0O00 - i11iIiiIii
@ bottle . route ( '/lisp/restart/verify' )
def OO0O0o0o0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 31 - 31: Ii1I
if 44 - 44: OoOoOO00 - iIii1I11I1II1 - Oo0Ooo
i1I1ii = "<br>Are you sure you want to restart the LISP subsystem?"
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
if 80 - 80: iIii1I11I1II1 * I1Ii111 % I11i % Oo0Ooo
I1ii = lisp . lisp_button ( "yes" , "/lisp/restart" )
O00O0O = lisp . lisp_button ( "cancel" , "/lisp" )
i1I1ii += I1ii + O00O0O + "<br>"
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 95 - 95: iIii1I11I1II1 - I1ii11iIi11i . I1Ii111 - I1IiiI
if 75 - 75: OoO0O00 + o0oOOo0O0Ooo - i1IIi . OoooooooOO * Ii1I / IiII
if 86 - 86: OoOoOO00 * II111iiii - O0 . OoOoOO00 % iIii1I11I1II1 / OOooOOo
if 11 - 11: I1IiiI * oO0o + I1ii11iIi11i / I1ii11iIi11i
if 37 - 37: i11iIiiIii + i1IIi
if 23 - 23: iII111i + I11i . OoOoOO00 * I1IiiI + I1ii11iIi11i
if 18 - 18: IiII * o0oOOo0O0Ooo . IiII / O0
@ bottle . route ( '/lisp/install' , method = "post" )
def iiIII1II ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 100 - 100: Oo0Ooo % Ii1I / I11i
if 30 - 30: Oo0Ooo - OOooOOo - iII111i
OOO = bottle . request . forms . get ( "image_url" )
if ( OOO . find ( "lispers.net" ) == - 1 or OOO . find ( ".tgz" ) == - 1 ) :
I11IIiIiI = "Invalid install request for file {}" . format ( OOO )
lisp . lprint ( lisp . bold ( I11IIiIiI , False ) )
i1I1ii = lisp . lisp_print_sans ( "Invalid lispers.net tarball file name" )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 5 - 5: Oo0Ooo * OoOoOO00
if 46 - 46: ooOoO0o
if ( lisp . lisp_is_ubuntu ( ) ) :
OoOOo000o0 = "python lisp-get-bits.pyo {} force 2>&1 > /dev/null" . format ( OOO )
else :
OoOOo000o0 = "python lisp-get-bits.pyo {} force >& /dev/null" . format ( OOO )
if 33 - 33: iII111i - II111iiii * OoooooooOO - Oo0Ooo - OOooOOo
IIiiI = os . system ( OoOOo000o0 )
if 84 - 84: I1Ii111 + Oo0Ooo - OoOoOO00 * OoOoOO00
OoooO0o = OOO . split ( "/" ) [ - 1 ]
if 24 - 24: OoOoOO00 % i1IIi + iII111i . i11iIiiIii . I1ii11iIi11i
if ( os . path . exists ( OoooO0o ) ) :
IIi1II = OOO . split ( "release-" ) [ 1 ]
IIi1II = IIi1II . split ( ".tgz" ) [ 0 ]
if 2 - 2: II111iiii - OoO0O00 . IiII * iII111i / oO0o
i1I1ii = "Install completed for release {}" . format ( IIi1II )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
if 80 - 80: OOooOOo / I11i / OoOoOO00 + i1IIi - Oo0Ooo
i1I1ii += "<br><br>" + lisp . lisp_button ( "restart LISP subsystem" ,
"/lisp/restart/verify" ) + "<br>"
else :
I11IIiIiI = lisp . lisp_print_cour ( OOO )
i1I1ii = "Install failed for file {}" . format ( I11IIiIiI )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
if 11 - 11: o0oOOo0O0Ooo * OoO0O00
if 15 - 15: OoOoOO00
I11IIiIiI = "Install request for file {} {}" . format ( OOO ,
"succeeded" if ( IIiiI == 0 ) else "failed" )
lisp . lprint ( lisp . bold ( I11IIiIiI , False ) )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 62 - 62: Ii1I
if 51 - 51: OoOoOO00
if 14 - 14: IiII % oO0o % Oo0Ooo - i11iIiiIii
if 53 - 53: Ii1I % Oo0Ooo
if 59 - 59: OOooOOo % iIii1I11I1II1 . i1IIi + II111iiii * IiII
if 41 - 41: Ii1I % I1ii11iIi11i
if 12 - 12: OOooOOo
@ bottle . route ( '/lisp/install/image' )
def ooOo0O ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 37 - 37: Ii1I % OoO0O00
if 79 - 79: I1ii11iIi11i + I1IiiI / I1IiiI
I11IIiIiI = lisp . lisp_print_sans ( "<br>Enter lispers.net tarball URL:" )
i1I1ii = '''
<form action="/lisp/install" method="post" style="display: inline;">
{}
<input type="text" name="image_url" size="75" required/>
<input type="submit" style="background-color:transparent;border-radius:10px;" value="Submit" />
</form><br>''' . format ( I11IIiIiI )
if 71 - 71: OOooOOo * OoO0O00 % OoooooooOO % OoO0O00 / I1IiiI
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 56 - 56: OoooooooOO % i11iIiiIii * iIii1I11I1II1 . OoO0O00 * O0
if 23 - 23: i11iIiiIii
if 39 - 39: o0oOOo0O0Ooo - I1ii11iIi11i % iII111i * OoO0O00 - OOooOOo / iII111i
if 29 - 29: I1ii11iIi11i
if 52 - 52: i11iIiiIii / i1IIi
if 1 - 1: ooOoO0o
if 78 - 78: I1ii11iIi11i + I11i - O0
if 10 - 10: I1Ii111 % I1IiiI
@ bottle . route ( '/lisp/log/flows' )
def oo0OoOooo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 95 - 95: IiII * I1ii11iIi11i % ooOoO0o % Ii1I - Ii1I
if 97 - 97: I1ii11iIi11i + iIii1I11I1II1 . O0
os . system ( "touch ./log-flows" )
if 64 - 64: i1IIi % ooOoO0o / i11iIiiIii - i1IIi % OOooOOo . iII111i
i1I1ii = lisp . lisp_print_sans ( "Flow data appended to file " )
II1i111 = "<a href='/lisp/show/log/lisp-flow/100'>logs/lisp-flows.log</a>"
i1I1ii += lisp . lisp_print_cour ( II1i111 )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 50 - 50: IiII % i1IIi
if 21 - 21: OoooooooOO - iIii1I11I1II1
if 93 - 93: oO0o - o0oOOo0O0Ooo % OoOoOO00 . OoOoOO00 - ooOoO0o
if 90 - 90: ooOoO0o + II111iiii * I1ii11iIi11i / Ii1I . o0oOOo0O0Ooo + o0oOOo0O0Ooo
if 40 - 40: ooOoO0o / OoOoOO00 % i11iIiiIii % I1ii11iIi11i / I1IiiI
if 62 - 62: i1IIi - OoOoOO00
if 62 - 62: i1IIi + Oo0Ooo % IiII
if 28 - 28: I1ii11iIi11i . i1IIi
@ bottle . route ( '/lisp/search/log/<name>/<num>/<keyword>' )
def iIIi ( name = "" , num = "" , keyword = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 96 - 96: iII111i
if 18 - 18: iII111i * I11i - Ii1I
OOoOO0o0o0 = "tail -n {} logs/{}.log | egrep -B10 -A10 {}" . format ( num , name ,
keyword )
i1I1ii = commands . getoutput ( OOoOO0o0o0 )
if 31 - 31: Oo0Ooo - O0 % OoOoOO00 % oO0o
if ( i1I1ii ) :
iI1iii = i1I1ii . count ( keyword )
i1I1ii = lisp . convert_font ( i1I1ii )
i1I1ii = i1I1ii . replace ( "--\n--\n" , "--\n" )
i1I1ii = i1I1ii . replace ( "\n" , "<br>" )
i1I1ii = i1I1ii . replace ( "--<br>" , "<hr>" )
i1I1ii = "Found <b>{}</b> occurences<hr>" . format ( iI1iii ) + i1I1ii
else :
i1I1ii = "Keyword {} not found" . format ( keyword )
if 87 - 87: I1ii11iIi11i / OoooooooOO - Oo0Ooo % OoOoOO00 % IiII % Oo0Ooo
if 29 - 29: OoooooooOO . I1IiiI % I1ii11iIi11i - iII111i
if 8 - 8: i1IIi
if 32 - 32: oO0o / II111iiii
if 45 - 45: I1ii11iIi11i + OoO0O00 * i11iIiiIii / OOooOOo % I11i * O0
i1o0oooO = "<font color='blue'><b>{}</b>" . format ( keyword )
i1I1ii = i1I1ii . replace ( keyword , i1o0oooO )
i1I1ii = i1I1ii . replace ( keyword , keyword + "</font>" )
if 89 - 89: II111iiii / oO0o
i1I1ii = lisp . lisp_print_cour ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 14 - 14: OOooOOo . I1IiiI * ooOoO0o + II111iiii - ooOoO0o + OOooOOo
if 18 - 18: oO0o - o0oOOo0O0Ooo - I1IiiI - I1IiiI
if 54 - 54: Oo0Ooo + I1IiiI / iII111i . I1IiiI * OoOoOO00
if 1 - 1: OoOoOO00 * OoO0O00 . i1IIi / Oo0Ooo . I1ii11iIi11i + Oo0Ooo
if 17 - 17: Oo0Ooo + OoO0O00 / Ii1I / iII111i * OOooOOo
if 29 - 29: OoO0O00 % OoooooooOO * oO0o / II111iiii - oO0o
if 19 - 19: i11iIiiIii
@ bottle . post ( '/lisp/search/log/<name>/<num>' )
def oo0oOO ( name = "" , num = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 32 - 32: OoOoOO00 * I1IiiI % ooOoO0o * Ii1I . O0
if 48 - 48: iII111i * iII111i
I1I1 = bottle . request . forms . get ( "keyword" )
return ( iIIi ( name , num , I1I1 ) )
if 4 - 4: o0oOOo0O0Ooo % OoOoOO00 * OOooOOo
if 32 - 32: i11iIiiIii - I1Ii111
if 53 - 53: OoooooooOO - IiII
if 87 - 87: oO0o . I1IiiI
if 17 - 17: Ii1I . i11iIiiIii
if 5 - 5: I1ii11iIi11i + O0 + O0 . I1Ii111 - ooOoO0o
if 63 - 63: oO0o
@ bottle . route ( '/lisp/show/log/<name>/<num>' )
def Oo0 ( name = "" , num = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 79 - 79: OoO0O00 % OOooOOo / iIii1I11I1II1 + OoOoOO00 * OoO0O00
if 30 - 30: OoooooooOO / I11i + iII111i / I1ii11iIi11i * O0
if 16 - 16: Oo0Ooo / i11iIiiIii
if 64 - 64: i11iIiiIii / Ii1I * i1IIi
if 73 - 73: Oo0Ooo - OoOoOO00 - oO0o - I1IiiI
if ( num == "" ) : num = 100
if 65 - 65: o0oOOo0O0Ooo
I1ii1II1iII = '''
<form action="/lisp/search/log/{}/{}" method="post">
<i>Keyword search:</i>
<input type="text" name="keyword" />
<input style="background-color:transparent;border-radius:10px;" type="submit" value="Submit" />
</form><hr>
''' . format ( name , num )
if 8 - 8: OoOoOO00 / O0 * O0 % I1Ii111 - Oo0Ooo + I11i
if ( os . path . exists ( "logs/{}.log" . format ( name ) ) ) :
i1I1ii = commands . getoutput ( "tail -n {} logs/{}.log" . format ( num , name ) )
i1I1ii = lisp . convert_font ( i1I1ii )
i1I1ii = i1I1ii . replace ( "\n" , "<br>" )
i1I1ii = I1ii1II1iII + lisp . lisp_print_cour ( i1I1ii )
else :
oo = lisp . lisp_print_sans ( "File" )
Ii1IiIiIi1IiI = lisp . lisp_print_cour ( "logs/{}.log" . format ( name ) )
i1iiIIi1I = lisp . lisp_print_sans ( "does not exist" )
i1I1ii = "{} {} {}" . format ( oo , Ii1IiIiIi1IiI , i1iiIIi1I )
if 36 - 36: I1IiiI * Oo0Ooo
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 77 - 77: oO0o % i1IIi - Ii1I
if 93 - 93: OoO0O00 * Oo0Ooo
if 73 - 73: o0oOOo0O0Ooo - I1IiiI * i1IIi / i11iIiiIii * OOooOOo % II111iiii
if 56 - 56: OoooooooOO * Oo0Ooo . Oo0Ooo . I1ii11iIi11i
if 24 - 24: Oo0Ooo . I11i * Ii1I % iII111i / OOooOOo
if 58 - 58: I1IiiI - I1ii11iIi11i % O0 . I1IiiI % OoO0O00 % IiII
if 87 - 87: oO0o - i11iIiiIii
@ bottle . route ( '/lisp/debug/<name>' )
def ooOoO ( name = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 23 - 23: I11i
if 40 - 40: o0oOOo0O0Ooo - II111iiii / Oo0Ooo
if 14 - 14: I1ii11iIi11i
if 5 - 5: o0oOOo0O0Ooo . iIii1I11I1II1 % iIii1I11I1II1
if 56 - 56: OoooooooOO - I11i - i1IIi
if ( name == "disable%all" ) :
IIIi1i1I = lispconfig . lisp_get_clause_for_api ( "lisp debug" )
if ( IIIi1i1I [ 0 ] . has_key ( "lisp debug" ) ) :
OoO0OOOOo0O = [ ]
for I1i1I in IIIi1i1I [ 0 ] [ "lisp debug" ] :
iii1I1Iii = I1i1I . keys ( ) [ 0 ]
OoO0OOOOo0O . append ( { iii1I1Iii : "no" } )
if 82 - 82: Ii1I + IiII
OoO0OOOOo0O = { "lisp debug" : OoO0OOOOo0O }
lispconfig . lisp_put_clause_for_api ( OoO0OOOOo0O )
if 12 - 12: I1Ii111
if 93 - 93: i11iIiiIii % iIii1I11I1II1 % i11iIiiIii + o0oOOo0O0Ooo / o0oOOo0O0Ooo / II111iiii
IIIi1i1I = lispconfig . lisp_get_clause_for_api ( "lisp xtr-parameters" )
if ( IIIi1i1I [ 0 ] . has_key ( "lisp xtr-parameters" ) ) :
OoO0OOOOo0O = [ ]
for I1i1I in IIIi1i1I [ 0 ] [ "lisp xtr-parameters" ] :
iii1I1Iii = I1i1I . keys ( ) [ 0 ]
if ( iii1I1Iii in [ "data-plane-logging" , "flow-logging" ] ) :
OoO0OOOOo0O . append ( { iii1I1Iii : "no" } )
else :
OoO0OOOOo0O . append ( { iii1I1Iii : I1i1I [ iii1I1Iii ] } )
if 49 - 49: OOooOOo . I1ii11iIi11i . i11iIiiIii - II111iiii / Ii1I
if 62 - 62: OOooOOo
OoO0OOOOo0O = { "lisp xtr-parameters" : OoO0OOOOo0O }
lispconfig . lisp_put_clause_for_api ( OoO0OOOOo0O )
if 1 - 1: IiII / IiII - i11iIiiIii
if 87 - 87: Oo0Ooo / O0 * IiII / o0oOOo0O0Ooo
return ( lispconfig . lisp_landing_page ( ) )
if 19 - 19: I1Ii111 + i1IIi . I1IiiI - Oo0Ooo
if 16 - 16: oO0o + ooOoO0o / o0oOOo0O0Ooo
if 82 - 82: IiII * i11iIiiIii % II111iiii - OoooooooOO
if 90 - 90: Oo0Ooo . oO0o * i1IIi - i1IIi
if 16 - 16: I1IiiI * i1IIi - o0oOOo0O0Ooo . IiII % I11i / o0oOOo0O0Ooo
name = name . split ( "%" )
Ii11iI1ii1111 = name [ 0 ]
Iii111II = name [ 1 ]
if 42 - 42: I1Ii111 + I1Ii111 * II111iiii
o0Oo = [ "data-plane-logging" , "flow-logging" ]
if 57 - 57: OOooOOo / Oo0Ooo
oO0O0Ooo = "lisp xtr-parameters" if ( Ii11iI1ii1111 in o0Oo ) else "lisp debug"
if 4 - 4: II111iiii . I11i + Ii1I * I1Ii111 . ooOoO0o
if 87 - 87: OoOoOO00 / OoO0O00 / i11iIiiIii
IIIi1i1I = lispconfig . lisp_get_clause_for_api ( oO0O0Ooo )
if 74 - 74: oO0o / I1ii11iIi11i % o0oOOo0O0Ooo
if ( IIIi1i1I [ 0 ] . has_key ( oO0O0Ooo ) ) :
OoO0OOOOo0O = { }
for I1i1I in IIIi1i1I [ 0 ] [ oO0O0Ooo ] :
OoO0OOOOo0O [ I1i1I . keys ( ) [ 0 ] ] = I1i1I . values ( ) [ 0 ]
if ( OoO0OOOOo0O . has_key ( Ii11iI1ii1111 ) ) : OoO0OOOOo0O [ Ii11iI1ii1111 ] = Iii111II
if 88 - 88: OoOoOO00 - i11iIiiIii % o0oOOo0O0Ooo * I11i + I1ii11iIi11i
OoO0OOOOo0O = { oO0O0Ooo : OoO0OOOOo0O }
lispconfig . lisp_put_clause_for_api ( OoO0OOOOo0O )
if 52 - 52: II111iiii . I1IiiI + OoOoOO00 % OoO0O00
return ( lispconfig . lisp_landing_page ( ) )
if 62 - 62: o0oOOo0O0Ooo
if 15 - 15: I11i + Ii1I . OOooOOo * OoO0O00 . OoOoOO00
if 18 - 18: i1IIi % II111iiii + I1Ii111 % Ii1I
if 72 - 72: iIii1I11I1II1
if 45 - 45: Oo0Ooo - o0oOOo0O0Ooo % I1Ii111
if 38 - 38: I1Ii111 % OOooOOo - OoooooooOO
if 87 - 87: OoO0O00 % I1IiiI
@ bottle . route ( '/lisp/clear/<name>' )
@ bottle . route ( '/lisp/clear/etr/<etr_name>/<stats_name>' )
@ bottle . route ( '/lisp/clear/rtr/<rtr_name>/<stats_name>' )
@ bottle . route ( '/lisp/clear/itr/<itr_name>' )
@ bottle . route ( '/lisp/clear/rtr/<rtr_name>' )
def ooooOoO0O ( name = "" , itr_name = '' , rtr_name = "" , etr_name = "" ,
stats_name = "" ) :
if 1 - 1: I1ii11iIi11i / OoO0O00 + oO0o . o0oOOo0O0Ooo / I1ii11iIi11i - iII111i
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 5 - 5: OOooOOo
if 4 - 4: iII111i % I1Ii111 / OoO0O00 . OOooOOo / OOooOOo - I1ii11iIi11i
if 79 - 79: I1ii11iIi11i + I1Ii111
if 10 - 10: Oo0Ooo + O0
if 43 - 43: iIii1I11I1II1 / II111iiii % o0oOOo0O0Ooo - OOooOOo
if ( lispconfig . lisp_is_user_superuser ( None ) == False ) :
i1I1ii = lisp . lisp_print_sans ( "Not authorized" )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 62 - 62: I11i
if 63 - 63: OOooOOo + ooOoO0o * oO0o / o0oOOo0O0Ooo / Oo0Ooo * iIii1I11I1II1
Oo0O00Oo0o0 = "clear"
if ( name == "referral" ) :
OOoO00ooO = "lisp-mr"
I1IIIIiii1i = "Referral"
elif ( itr_name == "map-cache" ) :
OOoO00ooO = "lisp-itr"
I1IIIIiii1i = "ITR <a href='/lisp/show/itr/map-cache'>map-cache</a>"
elif ( rtr_name == "map-cache" ) :
OOoO00ooO = "lisp-rtr"
I1IIIIiii1i = "RTR <a href='/lisp/show/rtr/map-cache'>map-cache</a>"
elif ( etr_name == "stats" ) :
OOoO00ooO = "lisp-etr"
I1IIIIiii1i = ( "ETR '{}' decapsulation <a href='/lisp/show/" + "database'>stats</a>" ) . format ( stats_name )
if 51 - 51: OOooOOo . I1IiiI
Oo0O00Oo0o0 += "%" + stats_name
elif ( rtr_name == "stats" ) :
OOoO00ooO = "lisp-rtr"
I1IIIIiii1i = ( "RTR '{}' decapsulation <a href='/lisp/show/" + "rtr/map-cache'>stats</a>" ) . format ( stats_name )
if 73 - 73: OoooooooOO . I1IiiI / I1Ii111 % Ii1I
Oo0O00Oo0o0 += "%" + stats_name
else :
i1I1ii = lisp . lisp_print_sans ( "Invalid command" )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 65 - 65: IiII - I1IiiI - Ii1I
if 42 - 42: II111iiii * I1IiiI % i1IIi - Ii1I % IiII
if 36 - 36: i11iIiiIii / oO0o * I1ii11iIi11i * I1ii11iIi11i + Ii1I * I11i
if 32 - 32: OoO0O00
if 50 - 50: ooOoO0o + i1IIi
Oo0O00Oo0o0 = lisp . lisp_command_ipc ( Oo0O00Oo0o0 , "lisp-core" )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , OOoO00ooO )
if 31 - 31: Ii1I
if 78 - 78: i11iIiiIii + o0oOOo0O0Ooo + I1Ii111 / o0oOOo0O0Ooo % iIii1I11I1II1 % IiII
if 83 - 83: iIii1I11I1II1 % OoOoOO00 % o0oOOo0O0Ooo % I1Ii111 . I1ii11iIi11i % O0
if 47 - 47: o0oOOo0O0Ooo
oo0ooooO = commands . getoutput ( "egrep 'lisp map-cache' ./lisp.config" )
if ( oo0ooooO != "" ) :
os . system ( "touch ./lisp.config" )
if 12 - 12: II111iiii
if 2 - 2: i1IIi - I1IiiI + I11i . II111iiii
i1I1ii = lisp . lisp_print_sans ( "{} cleared" . format ( I1IIIIiii1i ) )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 25 - 25: oO0o
if 34 - 34: OoOoOO00 . iIii1I11I1II1 % O0
if 43 - 43: I1ii11iIi11i - iII111i
if 70 - 70: iII111i / OOooOOo % ooOoO0o - Ii1I
if 47 - 47: iII111i
if 92 - 92: OOooOOo + OoOoOO00 % i1IIi
if 23 - 23: I1Ii111 - OOooOOo + Ii1I - OoOoOO00 * OoOoOO00 . Oo0Ooo
@ bottle . route ( '/lisp/show/map-server' )
def iIii11iI1II ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 42 - 42: ooOoO0o - I1IiiI + I1ii11iIi11i % Ii1I
if 44 - 44: i1IIi - O0 - I1ii11iIi11i * I1ii11iIi11i + OoOoOO00
return ( lispconfig . lisp_process_show_command ( Oo ,
"show map-server" ) )
if 56 - 56: ooOoO0o / iIii1I11I1II1 . Ii1I % OoOoOO00 + OOooOOo
if 10 - 10: I1Ii111 * i11iIiiIii - iIii1I11I1II1 . Oo0Ooo - I1ii11iIi11i
if 20 - 20: I1ii11iIi11i / I1IiiI * OoO0O00 * I1IiiI * O0
if 1 - 1: iIii1I11I1II1 + Oo0Ooo / O0 - iII111i % IiII + IiII
if 24 - 24: I1IiiI + Oo0Ooo + OOooOOo - OoooooooOO + Oo0Ooo
if 93 - 93: ooOoO0o . iIii1I11I1II1 % i11iIiiIii . OoOoOO00 % ooOoO0o + O0
if 65 - 65: Ii1I + OoO0O00 - OoooooooOO
@ bottle . route ( '/lisp/show/database' )
def OOoOO0o ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 51 - 51: Oo0Ooo - I1ii11iIi11i * I11i
return ( lispconfig . lisp_process_show_command ( Oo ,
"show database-mapping" ) )
if 12 - 12: iIii1I11I1II1 % ooOoO0o % ooOoO0o
if 78 - 78: IiII . OoOoOO00 . I11i
if 97 - 97: oO0o
if 80 - 80: I1IiiI . Ii1I
if 47 - 47: I11i + ooOoO0o + II111iiii % i11iIiiIii
if 93 - 93: I1ii11iIi11i % OoOoOO00 . O0 / iII111i * oO0o
if 29 - 29: o0oOOo0O0Ooo
@ bottle . route ( '/lisp/show/itr/map-cache' )
def oo0iIiI ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 81 - 81: OoOoOO00 % Ii1I
return ( lispconfig . lisp_process_show_command ( Oo ,
"show itr-map-cache" ) )
if 87 - 87: iIii1I11I1II1 . OoooooooOO * OoOoOO00
if 100 - 100: OoO0O00 / i1IIi - I1IiiI % Ii1I - iIii1I11I1II1
if 17 - 17: I11i / o0oOOo0O0Ooo % Oo0Ooo
if 71 - 71: IiII . I1Ii111 . OoO0O00
if 68 - 68: i11iIiiIii % oO0o * OoO0O00 * IiII * II111iiii + O0
if 66 - 66: I11i % I1ii11iIi11i % OoooooooOO
if 34 - 34: o0oOOo0O0Ooo / iII111i % O0 . OoO0O00 . i1IIi
@ bottle . route ( '/lisp/show/itr/rloc-probing' )
def ii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 94 - 94: ooOoO0o * I11i - IiII . iIii1I11I1II1
return ( lispconfig . lisp_process_show_command ( Oo ,
"show itr-rloc-probing" ) )
if 66 - 66: ooOoO0o - OOooOOo * OoOoOO00 / oO0o * II111iiii * OoO0O00
if 91 - 91: OoooooooOO / Ii1I . I1IiiI + ooOoO0o . II111iiii
if 45 - 45: oO0o * OoOoOO00 / iIii1I11I1II1
if 77 - 77: I1Ii111 - I11i
if 11 - 11: I1ii11iIi11i
if 26 - 26: iIii1I11I1II1 * I1Ii111 - OOooOOo
if 27 - 27: I1ii11iIi11i * I1Ii111 - OoO0O00 + Ii1I * Ii1I
@ bottle . post ( '/lisp/show/itr/map-cache/lookup' )
def o0OO0O0OO0oO0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 9 - 9: oO0o % i11iIiiIii / Oo0Ooo
if 20 - 20: oO0o * O0 + I11i - OoooooooOO . I11i
oO = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( oO ) == False ) :
i1I1ii = "Address '{}' has invalid format" . format ( oO )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 31 - 31: OoO0O00 * i11iIiiIii * Ii1I . i11iIiiIii
if 12 - 12: OoOoOO00 % IiII % I1ii11iIi11i . i11iIiiIii * iIii1I11I1II1
OOoOO0o0o0 = "show itr-map-cache" + "%" + oO
return ( lispconfig . lisp_process_show_command ( Oo ,
OOoOO0o0o0 ) )
if 66 - 66: i11iIiiIii * iIii1I11I1II1 % OoooooooOO
if 5 - 5: OoOoOO00 % OoooooooOO
if 60 - 60: OoOoOO00 . i1IIi % OoO0O00 % ooOoO0o % OOooOOo
if 33 - 33: iIii1I11I1II1 - Ii1I * I1ii11iIi11i % iIii1I11I1II1 + OoO0O00 . OOooOOo
if 56 - 56: i11iIiiIii * iII111i . oO0o
if 78 - 78: OoOoOO00
if 1 - 1: OOooOOo . IiII
@ bottle . route ( '/lisp/show/rtr/map-cache' )
@ bottle . route ( '/lisp/show/rtr/map-cache/<dns>' )
def I1iIII1IiiI ( dns = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 96 - 96: I1IiiI % i1IIi . o0oOOo0O0Ooo . O0
if 37 - 37: i1IIi - OOooOOo % OoooooooOO / OOooOOo % ooOoO0o
if ( dns == "dns" ) :
return ( lispconfig . lisp_process_show_command ( Oo ,
"show rtr-map-cache-dns" ) )
else :
return ( lispconfig . lisp_process_show_command ( Oo ,
"show rtr-map-cache" ) )
if 48 - 48: i11iIiiIii % oO0o
if 29 - 29: iII111i + i11iIiiIii % I11i
if 93 - 93: OoOoOO00 % iIii1I11I1II1
if 90 - 90: I1IiiI - OOooOOo / Ii1I / O0 / I11i
if 87 - 87: OoOoOO00 / IiII + iIii1I11I1II1
if 93 - 93: iIii1I11I1II1 + oO0o % ooOoO0o
if 21 - 21: OOooOOo
if 6 - 6: IiII
@ bottle . route ( '/lisp/show/rtr/rloc-probing' )
def i1I1II ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 17 - 17: O0 * OoOoOO00 * I1ii11iIi11i * II111iiii * I11i % i1IIi
return ( lispconfig . lisp_process_show_command ( Oo ,
"show rtr-rloc-probing" ) )
if 33 - 33: I1ii11iIi11i * I1ii11iIi11i . ooOoO0o . i11iIiiIii
if 48 - 48: o0oOOo0O0Ooo . Ii1I + OoOoOO00 % I1ii11iIi11i / i11iIiiIii
if 74 - 74: II111iiii . O0 - I1IiiI + IiII % i11iIiiIii % OoOoOO00
if 78 - 78: Ii1I + OoOoOO00 + IiII - IiII . i11iIiiIii / OoO0O00
if 27 - 27: Ii1I - O0 % I11i * I1Ii111 . IiII % iIii1I11I1II1
if 37 - 37: OoooooooOO + O0 - i1IIi % ooOoO0o
if 24 - 24: OoOoOO00
@ bottle . post ( '/lisp/show/rtr/map-cache/lookup' )
def Oo0oOo0ooOOOo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 71 - 71: II111iiii - Ii1I - iII111i * O0 * IiII
if 46 - 46: IiII
oO = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( oO ) == False ) :
i1I1ii = "Address '{}' has invalid format" . format ( oO )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 29 - 29: II111iiii . OoOoOO00 % o0oOOo0O0Ooo * II111iiii - o0oOOo0O0Ooo * iIii1I11I1II1
if 35 - 35: II111iiii - IiII . i1IIi
OOoOO0o0o0 = "show rtr-map-cache" + "%" + oO
return ( lispconfig . lisp_process_show_command ( Oo ,
OOoOO0o0o0 ) )
if 95 - 95: I1IiiI + I1IiiI - OOooOOo - iII111i
if 45 - 45: Ii1I . OoooooooOO
if 27 - 27: Ii1I * Oo0Ooo . OoOoOO00
if 17 - 17: II111iiii % iII111i * OOooOOo % i1IIi . I1IiiI . iIii1I11I1II1
if 27 - 27: i11iIiiIii - I1IiiI
if 35 - 35: OoooooooOO - I1Ii111 / OoO0O00
if 50 - 50: OoOoOO00
@ bottle . route ( '/lisp/show/referral' )
def i1i1Ii11Ii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 57 - 57: OOooOOo + I1Ii111 % I1ii11iIi11i . OoO0O00 / OoO0O00 * O0
return ( lispconfig . lisp_process_show_command ( Oo ,
"show referral-cache" ) )
if 6 - 6: i1IIi - II111iiii * o0oOOo0O0Ooo . OoO0O00
if 68 - 68: o0oOOo0O0Ooo
if 20 - 20: I1Ii111 - I1Ii111
if 37 - 37: IiII
if 37 - 37: Oo0Ooo / IiII * O0
if 73 - 73: iII111i * iII111i / ooOoO0o
if 43 - 43: I1ii11iIi11i . i1IIi . IiII + O0 * Ii1I * O0
@ bottle . post ( '/lisp/show/referral/lookup' )
def II11ii ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 39 - 39: iII111i . I1IiiI * OoOoOO00 - i11iIiiIii
if 1 - 1: iII111i * OoOoOO00
oO = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( oO ) == False ) :
i1I1ii = "Address '{}' has invalid format" . format ( oO )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 66 - 66: OoOoOO00 + i1IIi % II111iiii . O0 * I1ii11iIi11i % I1ii11iIi11i
if 87 - 87: OOooOOo + o0oOOo0O0Ooo . iII111i - OoooooooOO
OOoOO0o0o0 = "show referral-cache" + "%" + oO
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 6 - 6: iIii1I11I1II1 * OoooooooOO
if 28 - 28: Oo0Ooo * o0oOOo0O0Ooo / I1Ii111
if 52 - 52: O0 / o0oOOo0O0Ooo % iII111i * I1IiiI % OOooOOo
if 69 - 69: I1ii11iIi11i
if 83 - 83: o0oOOo0O0Ooo
if 38 - 38: I1Ii111 + OoooooooOO . i1IIi
if 19 - 19: iII111i - o0oOOo0O0Ooo - Ii1I - OoOoOO00 . iII111i . I1Ii111
@ bottle . route ( '/lisp/show/delegations' )
def i11I1I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 71 - 71: iII111i
return ( lispconfig . lisp_process_show_command ( Oo ,
"show delegations" ) )
if 23 - 23: i1IIi . iIii1I11I1II1 . OOooOOo . O0 % Ii1I % i11iIiiIii
if 11 - 11: O0 - II111iiii . OOooOOo . Ii1I % I1Ii111
if 21 - 21: Oo0Ooo / iII111i . I1Ii111 * OoooooooOO + I11i - i1IIi
if 58 - 58: I1ii11iIi11i
if 2 - 2: II111iiii / I1Ii111
if 54 - 54: i1IIi . I11i - I1ii11iIi11i + ooOoO0o + Oo0Ooo / Oo0Ooo
if 22 - 22: ooOoO0o . iIii1I11I1II1
@ bottle . post ( '/lisp/show/delegations/lookup' )
def i1IiiiiIi1I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 56 - 56: OoooooooOO * O0
if 85 - 85: OoooooooOO % OoOoOO00 * iIii1I11I1II1
oO = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( oO ) == False ) :
i1I1ii = "Address '{}' has invalid format" . format ( oO )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 44 - 44: iIii1I11I1II1 . I1ii11iIi11i + I1Ii111 . ooOoO0o
if 7 - 7: I1ii11iIi11i + iIii1I11I1II1 * I11i * I11i / II111iiii - Ii1I
OOoOO0o0o0 = "show delegations" + "%" + oO
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 65 - 65: oO0o + OoOoOO00 + II111iiii
if 77 - 77: II111iiii
if 50 - 50: O0 . O0 . ooOoO0o % Oo0Ooo
if 68 - 68: oO0o
if 10 - 10: Ii1I
if 77 - 77: OOooOOo / II111iiii + IiII + ooOoO0o - i11iIiiIii
if 44 - 44: I1IiiI + OoOoOO00 + I1ii11iIi11i . I1IiiI * OoOoOO00 % iIii1I11I1II1
if 72 - 72: OOooOOo . OOooOOo - I1ii11iIi11i
if 48 - 48: Oo0Ooo - ooOoO0o + Oo0Ooo - I1IiiI * i11iIiiIii . iII111i
@ bottle . route ( '/lisp/show/site' )
@ bottle . route ( '/lisp/show/site/<eid_prefix>' )
def I1 ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 35 - 35: I1IiiI
if 36 - 36: i1IIi - I1ii11iIi11i - I1Ii111
OOoOO0o0o0 = "show site"
if 7 - 7: i11iIiiIii + I1IiiI
if ( eid_prefix != "" ) :
OOoOO0o0o0 = lispconfig . lisp_parse_eid_in_url ( OOoOO0o0o0 , eid_prefix )
if 47 - 47: I1Ii111 - OOooOOo / ooOoO0o - Oo0Ooo + iII111i - iIii1I11I1II1
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 68 - 68: Ii1I - oO0o + Oo0Ooo
if 44 - 44: Ii1I * o0oOOo0O0Ooo * II111iiii
if 5 - 5: i1IIi + O0 % O0 * O0 + OoOoOO00 % i1IIi
if 80 - 80: iII111i / o0oOOo0O0Ooo + OoO0O00 / oO0o
if 46 - 46: i11iIiiIii / IiII % i1IIi - I11i * OoOoOO00
if 94 - 94: Ii1I - I1ii11iIi11i + o0oOOo0O0Ooo - Oo0Ooo
if 15 - 15: OOooOOo
@ bottle . route ( '/lisp/show/itr/dynamic-eid/<eid_prefix>' )
def i1iiI ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 83 - 83: oO0o / iIii1I11I1II1 + i1IIi / iII111i
if 47 - 47: oO0o + OoooooooOO . II111iiii . iII111i
OOoOO0o0o0 = "show itr-dynamic-eid"
if 66 - 66: ooOoO0o * OoOoOO00
if ( eid_prefix != "" ) :
OOoOO0o0o0 = lispconfig . lisp_parse_eid_in_url ( OOoOO0o0o0 , eid_prefix )
if 2 - 2: oO0o . I1Ii111 * Oo0Ooo + O0 - I11i * iIii1I11I1II1
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 12 - 12: o0oOOo0O0Ooo * I1Ii111 % II111iiii * i1IIi * iIii1I11I1II1
if 81 - 81: Oo0Ooo - I11i
if 24 - 24: OoooooooOO . OoO0O00 * II111iiii
if 59 - 59: I1Ii111 + OoO0O00 / OOooOOo
if 97 - 97: Oo0Ooo * iII111i % ooOoO0o . iII111i - I1Ii111 - OOooOOo
if 79 - 79: I1IiiI - ooOoO0o
if 37 - 37: IiII . Oo0Ooo * Oo0Ooo * II111iiii * O0
@ bottle . route ( '/lisp/show/etr/dynamic-eid/<eid_prefix>' )
def o00OOo000O ( eid_prefix = "" ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 42 - 42: IiII % iII111i % o0oOOo0O0Ooo % oO0o + I11i % OoOoOO00
if 3 - 3: oO0o
OOoOO0o0o0 = "show etr-dynamic-eid"
if 64 - 64: OoO0O00 . I1IiiI - OoooooooOO . ooOoO0o - iII111i
if ( eid_prefix != "" ) :
OOoOO0o0o0 = lispconfig . lisp_parse_eid_in_url ( OOoOO0o0o0 , eid_prefix )
if 77 - 77: Ii1I % OoOoOO00 / II111iiii % iII111i % OoooooooOO % OoO0O00
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 19 - 19: IiII * I1Ii111 / oO0o * I1Ii111 - OoooooooOO * I11i
if 17 - 17: II111iiii + Oo0Ooo . I1Ii111
if 12 - 12: I1Ii111 + OOooOOo + I11i . IiII / Ii1I
if 29 - 29: IiII . ooOoO0o - II111iiii
if 68 - 68: iIii1I11I1II1 + II111iiii / oO0o
if 91 - 91: OoOoOO00 % iIii1I11I1II1 . I1IiiI
if 70 - 70: I11i % II111iiii % O0 . i1IIi / I1Ii111
@ bottle . post ( '/lisp/show/site/lookup' )
def OO0ooOoOO0OOo ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 51 - 51: iIii1I11I1II1 * o0oOOo0O0Ooo / iIii1I11I1II1 . iIii1I11I1II1 . iII111i * I11i
if 93 - 93: oO0o * Ii1I
oO = bottle . request . forms . get ( "eid" )
if ( lispconfig . lisp_validate_input_address_string ( oO ) == False ) :
i1I1ii = "Address '{}' has invalid format" . format ( oO )
i1I1ii = lisp . lisp_print_sans ( i1I1ii )
return ( lispconfig . lisp_show_wrapper ( i1I1ii ) )
if 27 - 27: I1IiiI * ooOoO0o
if 77 - 77: IiII
OOoOO0o0o0 = "show site" + "%" + oO + "@lookup"
return ( lispconfig . lisp_process_show_command ( Oo , OOoOO0o0o0 ) )
if 66 - 66: iIii1I11I1II1 . i11iIiiIii / I11i / ooOoO0o + I1Ii111
if 5 - 5: OoOoOO00 % iII111i + IiII
if 13 - 13: IiII
if 19 - 19: II111iiii - IiII
if 59 - 59: o0oOOo0O0Ooo * OoO0O00 - Ii1I . OOooOOo
if 89 - 89: OOooOOo
if 69 - 69: ooOoO0o - OoooooooOO * O0
@ bottle . post ( '/lisp/lig' )
def O0Oo0O0 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 33 - 33: ooOoO0o % i1IIi - oO0o . O0 / O0
if 96 - 96: OoooooooOO + IiII * O0
oo0OoOO0o0o = bottle . request . forms . get ( "eid" )
OO0OOO00 = bottle . request . forms . get ( "mr" )
ooOOo0o = bottle . request . forms . get ( "count" )
IiI1Iii1 = "no-info" if bottle . request . forms . get ( "no-nat" ) == "yes" else ""
if 85 - 85: i11iIiiIii / i11iIiiIii . OoO0O00 . O0
if 67 - 67: II111iiii / o0oOOo0O0Ooo . OOooOOo . OoooooooOO
if 19 - 19: IiII . I1ii11iIi11i / OoOoOO00
if 68 - 68: ooOoO0o / OoooooooOO * I11i / oO0o
if ( OO0OOO00 == "" ) : OO0OOO00 = "localhost"
if 88 - 88: o0oOOo0O0Ooo
if 1 - 1: OoooooooOO
if 48 - 48: ooOoO0o * OoOoOO00 - ooOoO0o - OOooOOo + OOooOOo
if 40 - 40: i11iIiiIii . iIii1I11I1II1
if ( oo0OoOO0o0o == "" ) :
i1I1ii = "Need to supply EID address"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 2 - 2: i1IIi * oO0o - oO0o + OoooooooOO % OoOoOO00 / OoOoOO00
if 3 - 3: OoooooooOO
O0OoO0o = ""
if os . path . exists ( "lisp-lig.pyo" ) : O0OoO0o = "-O lisp-lig.pyo"
if os . path . exists ( "lisp-lig.py" ) : O0OoO0o = "lisp-lig.py"
if 1 - 1: ooOoO0o % I11i * I1ii11iIi11i - II111iiii
if 49 - 49: oO0o - iII111i % OoOoOO00
if 72 - 72: I1IiiI + IiII . OoOoOO00 + OoOoOO00
if 94 - 94: i11iIiiIii % OoooooooOO / I1IiiI
if ( O0OoO0o == "" ) :
i1I1ii = "Cannot find lisp-lig.py or lisp-lig.pyo"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 24 - 24: I1IiiI * oO0o
if 85 - 85: II111iiii . ooOoO0o % OOooOOo % I11i
if ( ooOOo0o != "" ) : ooOOo0o = "count {}" . format ( ooOOo0o )
if 80 - 80: oO0o * I11i / iIii1I11I1II1 % oO0o / iIii1I11I1II1
OOoOO0o0o0 = 'python {} "{}" to {} {} {}' . format ( O0OoO0o , oo0OoOO0o0o , OO0OOO00 , ooOOo0o , IiI1Iii1 )
if 42 - 42: i1IIi / i11iIiiIii . Oo0Ooo * iII111i . i11iIiiIii * O0
i1I1ii = commands . getoutput ( OOoOO0o0o0 )
i1I1ii = i1I1ii . replace ( "\n" , "<br>" )
i1I1ii = lisp . convert_font ( i1I1ii )
if 44 - 44: i1IIi . I1IiiI / i11iIiiIii + IiII
iI111II1ii = lisp . space ( 2 ) + "RLOC:"
i1I1ii = i1I1ii . replace ( "RLOC:" , iI111II1ii )
O0ooO00ooOO0o = lisp . space ( 2 ) + "Empty,"
i1I1ii = i1I1ii . replace ( "Empty," , O0ooO00ooOO0o )
I1i = lisp . space ( 4 ) + "geo:"
i1I1ii = i1I1ii . replace ( "geo:" , I1i )
o0O = lisp . space ( 4 ) + "elp:"
i1I1ii = i1I1ii . replace ( "elp:" , o0O )
I1II = lisp . space ( 4 ) + "rle:"
i1I1ii = i1I1ii . replace ( "rle:" , I1II )
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 9 - 9: Oo0Ooo % OoooooooOO - Ii1I
if 43 - 43: OoO0O00 % OoO0O00
if 46 - 46: Oo0Ooo % iIii1I11I1II1 . iII111i . O0 * ooOoO0o / OoooooooOO
if 7 - 7: oO0o - O0 * I11i - o0oOOo0O0Ooo - II111iiii
if 41 - 41: I1IiiI - I1Ii111 % II111iiii . I1Ii111 - I11i
if 45 - 45: Ii1I - OOooOOo
if 70 - 70: OoO0O00 % I1IiiI / I1IiiI . I11i % ooOoO0o . II111iiii
@ bottle . post ( '/lisp/rig' )
def I1ii1Ii1 ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 73 - 73: O0 . oO0o + i11iIiiIii + iIii1I11I1II1 - I11i / OoOoOO00
if 99 - 99: I1ii11iIi11i * oO0o * I1ii11iIi11i - II111iiii + Ii1I
oo0OoOO0o0o = bottle . request . forms . get ( "eid" )
OOooO0Oo00 = bottle . request . forms . get ( "ddt" )
iIIIIIIIiIII = "follow-all-referrals" if bottle . request . forms . get ( "follow" ) == "yes" else ""
if 94 - 94: iII111i * iIii1I11I1II1 . I11i
if 13 - 13: iIii1I11I1II1 * OoOoOO00 / I1Ii111 % ooOoO0o + oO0o
if 41 - 41: I1ii11iIi11i
if 5 - 5: Oo0Ooo
if 100 - 100: Ii1I + iIii1I11I1II1
if ( OOooO0Oo00 == "" ) : OOooO0Oo00 = "localhost"
if 59 - 59: IiII
if 89 - 89: OoOoOO00 % iIii1I11I1II1
if 35 - 35: I1ii11iIi11i + I1Ii111 - OoOoOO00 % oO0o % o0oOOo0O0Ooo % OoOoOO00
if 45 - 45: I1IiiI * OOooOOo % OoO0O00
if ( oo0OoOO0o0o == "" ) :
i1I1ii = "Need to supply EID address"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 24 - 24: ooOoO0o - I11i * oO0o
if 87 - 87: Ii1I - I1ii11iIi11i % I1ii11iIi11i . oO0o / I1ii11iIi11i
II1io0 = ""
if os . path . exists ( "lisp-rig.pyo" ) : II1io0 = "-O lisp-rig.pyo"
if os . path . exists ( "lisp-rig.py" ) : II1io0 = "lisp-rig.py"
if 25 - 25: OoO0O00 * oO0o % i11iIiiIii + i11iIiiIii * OoO0O00
if 42 - 42: II111iiii / O0 . iIii1I11I1II1 / O0 / OoO0O00 / OoooooooOO
if 62 - 62: O0 . Oo0Ooo
if 33 - 33: Oo0Ooo / iIii1I11I1II1 % i1IIi
if ( II1io0 == "" ) :
i1I1ii = "Cannot find lisp-rig.py or lisp-rig.pyo"
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 76 - 76: Ii1I + iIii1I11I1II1 + OoOoOO00 . OoO0O00
if 49 - 49: IiII / ooOoO0o / OOooOOo
OOoOO0o0o0 = 'python {} "{}" to {} {}' . format ( II1io0 , oo0OoOO0o0o , OOooO0Oo00 , iIIIIIIIiIII )
if 25 - 25: I1IiiI % O0 + i1IIi - ooOoO0o
i1I1ii = commands . getoutput ( OOoOO0o0o0 )
i1I1ii = i1I1ii . replace ( "\n" , "<br>" )
i1I1ii = lisp . convert_font ( i1I1ii )
if 38 - 38: o0oOOo0O0Ooo % I1Ii111 + i11iIiiIii + iII111i + ooOoO0o / i11iIiiIii
o0OOOOOo0 = lisp . space ( 2 ) + "Referrals:"
i1I1ii = i1I1ii . replace ( "Referrals:" , o0OOOOOo0 )
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 57 - 57: iIii1I11I1II1 + iIii1I11I1II1
if 56 - 56: oO0o + ooOoO0o
if 32 - 32: II111iiii + OoOoOO00 % ooOoO0o / OoOoOO00 + I1ii11iIi11i
if 2 - 2: i11iIiiIii - I1Ii111 + OoO0O00 % I11i * Ii1I
if 54 - 54: O0 - iII111i . OOooOOo % iII111i + iII111i
if 36 - 36: OOooOOo % i11iIiiIii
if 47 - 47: i1IIi + II111iiii . Oo0Ooo * oO0o . I11i / i1IIi
if 50 - 50: I1Ii111 / i1IIi % OoooooooOO
def oOOOOO0Ooooo ( eid1 , eid2 ) :
O0OoO0o = None
if os . path . exists ( "lisp-lig.pyo" ) : O0OoO0o = "-O lisp-lig.pyo"
if os . path . exists ( "lisp-lig.py" ) : O0OoO0o = "lisp-lig.py"
if ( O0OoO0o == None ) : return ( [ None , None ] )
if 57 - 57: Ii1I - OoooooooOO
if 68 - 68: o0oOOo0O0Ooo % I1ii11iIi11i / I1Ii111 + I1Ii111 - I1Ii111 . OoO0O00
if 100 - 100: OoOoOO00 % Oo0Ooo
if 76 - 76: II111iiii / OoO0O00 + OoooooooOO . I1ii11iIi11i . I11i . ooOoO0o
iiiI = commands . getoutput ( "egrep -A 2 'lisp map-resolver {' ./lisp.config" )
OO0OOO00 = None
for I1I1 in [ "address = " , "dns-name = " ] :
OO0OOO00 = None
iIIIiiiIiI1 = iiiI . find ( I1I1 )
if ( iIIIiiiIiI1 == - 1 ) : continue
OO0OOO00 = iiiI [ iIIIiiiIiI1 + len ( I1I1 ) : : ]
iIIIiiiIiI1 = OO0OOO00 . find ( "\n" )
if ( iIIIiiiIiI1 == - 1 ) : continue
OO0OOO00 = OO0OOO00 [ 0 : iIIIiiiIiI1 ]
break
if 95 - 95: Ii1I - I1ii11iIi11i - O0 . I1IiiI . iII111i
if ( OO0OOO00 == None ) : return ( [ None , None ] )
if 7 - 7: I1Ii111
if 45 - 45: O0 - OOooOOo
if 56 - 56: O0 + Ii1I
if 24 - 24: i11iIiiIii - Ii1I + oO0o * I1IiiI
OoooOo0 = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
IiI1Ii1ii = [ ]
for oo0OoOO0o0o in [ eid1 , eid2 ] :
if 44 - 44: I1IiiI % Ii1I * I1IiiI . Oo0Ooo + I1ii11iIi11i . OOooOOo
if 6 - 6: IiII * OoooooooOO + I1Ii111 / Ii1I
if 35 - 35: ooOoO0o % I1IiiI - ooOoO0o - OoO0O00 - OoooooooOO
if 46 - 46: i1IIi . i1IIi . oO0o / I11i / ooOoO0o
if 34 - 34: OoooooooOO / Oo0Ooo * i11iIiiIii . II111iiii . OoooooooOO
if ( OoooOo0 . is_geo_string ( oo0OoOO0o0o ) ) :
IiI1Ii1ii . append ( oo0OoOO0o0o )
continue
if 59 - 59: i11iIiiIii . OoooooooOO / I11i * I1ii11iIi11i + OoooooooOO
if 3 - 3: i11iIiiIii * Oo0Ooo % iIii1I11I1II1 % I1IiiI * iII111i / OOooOOo
OOoOO0o0o0 = 'python {} "{}" to {} count 1' . format ( O0OoO0o , oo0OoOO0o0o , OO0OOO00 )
for IIiI1Ii in [ OOoOO0o0o0 , OOoOO0o0o0 + " no-info" ] :
i1I1ii = commands . getoutput ( OOoOO0o0o0 )
iIIIiiiIiI1 = i1I1ii . find ( "geo: " )
if ( iIIIiiiIiI1 == - 1 ) :
if ( IIiI1Ii != OOoOO0o0o0 ) : IiI1Ii1ii . append ( None )
continue
if 95 - 95: IiII * O0 * I1Ii111 . OoooooooOO % Oo0Ooo + I1ii11iIi11i
i1I1ii = i1I1ii [ iIIIiiiIiI1 + len ( "geo: " ) : : ]
iIIIiiiIiI1 = i1I1ii . find ( "\n" )
if ( iIIIiiiIiI1 == - 1 ) :
if ( IIiI1Ii != OOoOO0o0o0 ) : IiI1Ii1ii . append ( None )
continue
if 98 - 98: oO0o . OoooooooOO
IiI1Ii1ii . append ( i1I1ii [ 0 : iIIIiiiIiI1 ] )
break
if 54 - 54: O0 / IiII % ooOoO0o * i1IIi * O0
if 48 - 48: o0oOOo0O0Ooo . oO0o % OoOoOO00 - OoOoOO00
return ( IiI1Ii1ii )
if 33 - 33: I11i % II111iiii + OoO0O00
if 93 - 93: i1IIi . IiII / I1IiiI + IiII
if 58 - 58: I1ii11iIi11i + O0 . Oo0Ooo + OoOoOO00 - OoO0O00 - OoOoOO00
if 41 - 41: Oo0Ooo / i1IIi / Oo0Ooo - iII111i . o0oOOo0O0Ooo
if 65 - 65: O0 * i11iIiiIii . OoooooooOO / I1IiiI / iII111i
if 69 - 69: ooOoO0o % ooOoO0o
if 76 - 76: i11iIiiIii * iII111i / OoO0O00 % I1ii11iIi11i + OOooOOo
@ bottle . post ( '/lisp/geo' )
def IiIi1II111I ( ) :
if ( lispconfig . lisp_validate_user ( ) == False ) :
return ( oOO00O ( ) )
if 80 - 80: Ii1I / OOooOOo
if 21 - 21: Oo0Ooo - iIii1I11I1II1 - I1Ii111
oo0OoOO0o0o = bottle . request . forms . get ( "geo-point" )
III1I1Iii11i = bottle . request . forms . get ( "geo-prefix" )
i1I1ii = ""
if 96 - 96: oO0o - oO0o
if 87 - 87: Oo0Ooo / OoooooooOO - I1ii11iIi11i . IiII + iIii1I11I1II1 . I1ii11iIi11i
if 4 - 4: OoooooooOO + ooOoO0o . i1IIi / O0 - O0
if 52 - 52: OoO0O00 * OoooooooOO
if 12 - 12: O0 + IiII * i1IIi . OoO0O00
o0OO0oooo = lisp . lisp_address ( lisp . LISP_AFI_NONE , "" , 0 , 0 )
I11II1i1 = lisp . lisp_geo ( "" )
IiI1ii11I1 = lisp . lisp_geo ( "" )
I1i1iI , I1iI1I1ii1 = oOOOOO0Ooooo ( oo0OoOO0o0o , III1I1Iii11i )
if 33 - 33: o0oOOo0O0Ooo / O0 + OOooOOo
if 75 - 75: IiII % i11iIiiIii + iIii1I11I1II1
if 92 - 92: OoOoOO00 % O0
if 55 - 55: iIii1I11I1II1 * iII111i
if 85 - 85: iIii1I11I1II1 . II111iiii
if ( o0OO0oooo . is_geo_string ( oo0OoOO0o0o ) ) :
if ( I11II1i1 . parse_geo_string ( oo0OoOO0o0o ) == False ) :
i1I1ii = "Could not parse geo-point format"
if 54 - 54: Ii1I . OoooooooOO % Oo0Ooo
elif ( I1i1iI == None ) :
i1I1ii = "EID {} lookup could not find geo-point" . format (
lisp . bold ( oo0OoOO0o0o , True ) )
elif ( I11II1i1 . parse_geo_string ( I1i1iI ) == False ) :
i1I1ii = "Could not parse geo-point format returned from lookup"
if 22 - 22: OOooOOo
if 22 - 22: iII111i * I11i - Oo0Ooo * O0 / i11iIiiIii
if 78 - 78: Oo0Ooo * O0 / ooOoO0o + OoooooooOO + OOooOOo
if 23 - 23: iII111i % OoooooooOO / iIii1I11I1II1 + I1ii11iIi11i / i1IIi / o0oOOo0O0Ooo
if 94 - 94: i1IIi
if 36 - 36: I1IiiI + Oo0Ooo
if ( i1I1ii == "" ) :
if ( o0OO0oooo . is_geo_string ( III1I1Iii11i ) ) :
if ( IiI1ii11I1 . parse_geo_string ( III1I1Iii11i ) == False ) :
i1I1ii = "Could not parse geo-prefix format"
if 46 - 46: iII111i
elif ( I1iI1I1ii1 == None ) :
i1I1ii = "EID-prefix {} lookup could not find geo-prefix" . format ( lisp . bold ( III1I1Iii11i , True ) )
if 65 - 65: i1IIi . I1ii11iIi11i / ooOoO0o
elif ( IiI1ii11I1 . parse_geo_string ( I1iI1I1ii1 ) == False ) :
i1I1ii = "Could not parse geo-prefix format returned from lookup"
if 11 - 11: IiII * ooOoO0o / ooOoO0o - OOooOOo
if 68 - 68: I1IiiI % IiII - IiII / I1IiiI + I1ii11iIi11i - Oo0Ooo
if 65 - 65: ooOoO0o - i1IIi
if 62 - 62: I11i / oO0o % Oo0Ooo . OoooooooOO / i11iIiiIii / I1Ii111
if 60 - 60: I1IiiI % oO0o / o0oOOo0O0Ooo % oO0o * i11iIiiIii / iII111i
if 34 - 34: I1Ii111 - OOooOOo
if 25 - 25: oO0o % I1IiiI + i11iIiiIii + O0 * OoooooooOO
if ( i1I1ii == "" ) :
oo0OoOO0o0o = "" if ( oo0OoOO0o0o == I1i1iI ) else ", EID {}" . format ( oo0OoOO0o0o )
III1I1Iii11i = "" if ( III1I1Iii11i == I1iI1I1ii1 ) else ", EID-prefix {}" . format ( III1I1Iii11i )
if 64 - 64: i1IIi
if 10 - 10: I1Ii111 % O0 / I1IiiI % I11i
iiII = I11II1i1 . print_geo_url ( )
I1iI1111i = IiI1ii11I1 . print_geo_url ( )
I1Ii1iIIIIi = IiI1ii11I1 . radius
iii = I11II1i1 . dms_to_decimal ( )
iii = ( round ( iii [ 0 ] , 6 ) , round ( iii [ 1 ] , 6 ) )
O000OOO = IiI1ii11I1 . dms_to_decimal ( )
O000OOO = ( round ( O000OOO [ 0 ] , 6 ) , round ( O000OOO [ 1 ] , 6 ) )
o0 = round ( IiI1ii11I1 . get_distance ( I11II1i1 ) , 2 )
IIi1 = "inside" if IiI1ii11I1 . point_in_circle ( I11II1i1 ) else "outside"
if 73 - 73: OOooOOo + OOooOOo % I11i * i1IIi
if 4 - 4: OOooOOo - oO0o % OoOoOO00 / II111iiii % oO0o
O0OO0OoO = lisp . space ( 2 )
o0OOo = lisp . space ( 1 )
IiI1Ii11Ii = lisp . space ( 3 )
if 99 - 99: O0 . o0oOOo0O0Ooo % I11i - Oo0Ooo / I11i
i1I1ii = ( "Geo-Point:{}{} {}{}<br>Geo-Prefix:{}{} {}, {} " + "kilometer radius{}<br>" ) . format ( O0OO0OoO , iiII , iii , oo0OoOO0o0o ,
# I1Ii111 / OoOoOO00
o0OOo , I1iI1111i , O000OOO , I1Ii1iIIIIi , III1I1Iii11i )
i1I1ii += "Distance:{}{} kilometers, point is {} of circle" . format ( IiI1Ii11Ii ,
o0 , lisp . bold ( IIi1 , True ) )
if 82 - 82: OoooooooOO . Ii1I
return ( lispconfig . lisp_show_wrapper ( lisp . lisp_print_cour ( i1I1ii ) ) )
if 26 - 26: oO0o + IiII - II111iiii . II111iiii + I1ii11iIi11i + OoOoOO00
if 68 - 68: O0
if 76 - 76: I1ii11iIi11i
if 99 - 99: o0oOOo0O0Ooo
if 1 - 1: Ii1I * OoOoOO00 * OoO0O00 + Oo0Ooo
if 90 - 90: I1Ii111 % Oo0Ooo - Oo0Ooo . iIii1I11I1II1 / OOooOOo + I11i
if 89 - 89: oO0o
if 87 - 87: iII111i % Oo0Ooo
if 62 - 62: OoO0O00 + ooOoO0o / iII111i * i11iIiiIii
def iiIIIIiI111 ( addr_str , port , nonce ) :
if ( addr_str != None ) :
for OoooOO0Oo0 in lisp . lisp_info_sources_by_address . values ( ) :
I1iIiIii = OoooOO0Oo0 . address . print_address_no_iid ( )
if ( I1iIiIii == addr_str and OoooOO0Oo0 . port == port ) :
return ( OoooOO0Oo0 )
if 76 - 76: OoO0O00 . OoooooooOO % I1Ii111 * Ii1I
if 23 - 23: IiII + iIii1I11I1II1
return ( None )
if 14 - 14: O0 % IiII % Ii1I * oO0o
if 65 - 65: I11i % oO0o + I1ii11iIi11i
if ( nonce != None ) :
if ( nonce not in lisp . lisp_info_sources_by_nonce ) : return ( None )
return ( lisp . lisp_info_sources_by_nonce [ nonce ] )
if 86 - 86: iIii1I11I1II1 / O0 . I1Ii111 % iIii1I11I1II1 % Oo0Ooo
return ( None )
if 86 - 86: i11iIiiIii - o0oOOo0O0Ooo . ooOoO0o * Oo0Ooo / Ii1I % o0oOOo0O0Ooo
if 61 - 61: o0oOOo0O0Ooo + OoOoOO00
if 15 - 15: OoOoOO00 * oO0o + OOooOOo . I11i % I1IiiI - ooOoO0o
if 13 - 13: OoOoOO00 % OoOoOO00 % Oo0Ooo % I1IiiI * i1IIi % I11i
if 82 - 82: IiII . OoOoOO00 / ooOoO0o + iII111i - ooOoO0o
if 55 - 55: ooOoO0o % Oo0Ooo % o0oOOo0O0Ooo
if 29 - 29: IiII / iIii1I11I1II1 + I1ii11iIi11i % iII111i % I11i
if 46 - 46: iIii1I11I1II1
def oo0oO00o0O00o ( lisp_sockets , info_source , packet ) :
if 98 - 98: ooOoO0o . OOooOOo
if 60 - 60: OoO0O00 - i1IIi . OOooOOo + OOooOOo * OOooOOo + Ii1I
if 66 - 66: OOooOOo * OOooOOo / iIii1I11I1II1 + OoOoOO00 . OOooOOo
if 51 - 51: I1ii11iIi11i
o0oOOOOoo0 = lisp . lisp_ecm ( 0 )
packet = o0oOOOOoo0 . decode ( packet )
if ( packet == None ) :
lisp . lprint ( "Could not decode ECM packet" )
return ( True )
if 80 - 80: i11iIiiIii % I1ii11iIi11i
if 54 - 54: o0oOOo0O0Ooo + I11i - iIii1I11I1II1 % ooOoO0o % IiII
I1ii1II1iII = lisp . lisp_control_header ( )
if ( I1ii1II1iII . decode ( packet ) == None ) :
lisp . lprint ( "Could not decode control header" )
return ( True )
if 19 - 19: I1ii11iIi11i / iIii1I11I1II1 % i1IIi . OoooooooOO
if ( I1ii1II1iII . type != lisp . LISP_MAP_REQUEST ) :
lisp . lprint ( "Received ECM without Map-Request inside" )
return ( True )
if 57 - 57: ooOoO0o . Oo0Ooo - OoO0O00 - i11iIiiIii * I1Ii111 / o0oOOo0O0Ooo
if 79 - 79: I1ii11iIi11i + o0oOOo0O0Ooo % Oo0Ooo * o0oOOo0O0Ooo
if 21 - 21: iII111i
if 24 - 24: iII111i / ooOoO0o
if 61 - 61: iIii1I11I1II1 + oO0o
i1IiiI = lisp . lisp_map_request ( )
packet = i1IiiI . decode ( packet , None , 0 )
O0OOO0 = i1IiiI . nonce
o0OIi = info_source . address . print_address_no_iid ( )
if 11 - 11: oO0o . I1IiiI + IiII / i1IIi
if 1 - 1: Oo0Ooo * I1Ii111 . OoooooooOO
if 73 - 73: OoOoOO00 % o0oOOo0O0Ooo
if 71 - 71: oO0o - OoooooooOO * Oo0Ooo * I11i + o0oOOo0O0Ooo * I1ii11iIi11i
i1IiiI . print_map_request ( )
if 85 - 85: i11iIiiIii . OoooooooOO - iIii1I11I1II1
lisp . lprint ( "Process {} from info-source {}, port {}, nonce 0x{}" . format ( lisp . bold ( "nat-proxy Map-Request" , False ) ,
# O0 % ooOoO0o % I11i
lisp . red ( o0OIi , False ) , info_source . port ,
lisp . lisp_hex_string ( O0OOO0 ) ) )
if 25 - 25: OoooooooOO % Ii1I * II111iiii - OoO0O00
if 95 - 95: I1IiiI % I1Ii111 * I1IiiI + O0 . I1Ii111 % OoooooooOO
if 6 - 6: OoOoOO00 - ooOoO0o * o0oOOo0O0Ooo + OoOoOO00 % o0oOOo0O0Ooo
if 100 - 100: OoO0O00 % I1Ii111 - I11i % I11i % I11i / ooOoO0o
if 83 - 83: oO0o - ooOoO0o - IiII % i1IIi - iII111i . o0oOOo0O0Ooo
info_source . cache_nonce_for_info_source ( O0OOO0 )
if 96 - 96: Oo0Ooo + I1Ii111 . i1IIi
if 54 - 54: II111iiii . i1IIi / I1ii11iIi11i % I1IiiI / I1Ii111
if 65 - 65: OoOoOO00 . OoOoOO00 - oO0o + Oo0Ooo / i11iIiiIii
if 90 - 90: iIii1I11I1II1 + OoOoOO00
if 9 - 9: iIii1I11I1II1 . OoooooooOO + i1IIi - Oo0Ooo
info_source . no_timeout = i1IiiI . subscribe_bit
if 30 - 30: iII111i / OoO0O00 . iII111i
if 17 - 17: Oo0Ooo + OoooooooOO * OoooooooOO
if 5 - 5: I1Ii111 % OoooooooOO . OoOoOO00
if 67 - 67: I1ii11iIi11i + Ii1I
if 72 - 72: IiII % o0oOOo0O0Ooo
if 93 - 93: iIii1I11I1II1 + i11iIiiIii . o0oOOo0O0Ooo . i1IIi % I1IiiI % ooOoO0o
for oO0oo in i1IiiI . itr_rlocs :
if ( oO0oo . is_local ( ) ) : return ( False )
if 52 - 52: IiII % ooOoO0o
if 25 - 25: I11i / I11i % OoooooooOO - I1ii11iIi11i * oO0o
if 23 - 23: i11iIiiIii
if 100 - 100: oO0o + O0 . I1IiiI + i1IIi - OoOoOO00 + o0oOOo0O0Ooo
if 65 - 65: II111iiii / Oo0Ooo
iiII1i = lisp . lisp_myrlocs [ 0 ]
i1IiiI . itr_rloc_count = 0
i1IiiI . itr_rlocs = [ ]
i1IiiI . itr_rlocs . append ( iiII1i )
if 19 - 19: I1IiiI + i11iIiiIii . IiII - I11i / Ii1I + o0oOOo0O0Ooo
packet = i1IiiI . encode ( None , 0 )
i1IiiI . print_map_request ( )
if 38 - 38: Oo0Ooo / iIii1I11I1II1 * iIii1I11I1II1 % I1ii11iIi11i
O00o = i1IiiI . target_eid
if ( O00o . is_ipv6 ( ) ) :
o0o0ooOo00 = lisp . lisp_myrlocs [ 1 ]
if ( o0o0ooOo00 != None ) : iiII1i = o0o0ooOo00
if 91 - 91: OoO0O00 * I1Ii111 % OoO0O00 . o0oOOo0O0Ooo * I1ii11iIi11i . OOooOOo
if 13 - 13: I1ii11iIi11i
if 80 - 80: Oo0Ooo % IiII % OoooooooOO * Oo0Ooo % Ii1I
if 41 - 41: OoooooooOO / i1IIi
if 70 - 70: OoOoOO00 % o0oOOo0O0Ooo % i1IIi / I1ii11iIi11i % i11iIiiIii / i1IIi
i1i1Ii1IiIII = lisp . lisp_is_running ( "lisp-ms" )
lisp . lisp_send_ecm ( lisp_sockets , packet , O00o , lisp . LISP_CTRL_PORT ,
O00o , iiII1i , to_ms = i1i1Ii1IiIII , ddt = False )
return ( True )
if 9 - 9: I11i - oO0o + O0 / iII111i % i1IIi
if 97 - 97: o0oOOo0O0Ooo * ooOoO0o
if 78 - 78: I11i . OOooOOo + oO0o * iII111i - i1IIi
if 27 - 27: Ii1I % i1IIi . Oo0Ooo % I1Ii111
if 10 - 10: IiII / OoooooooOO
if 50 - 50: i11iIiiIii - OoooooooOO . oO0o + O0 . i1IIi
if 91 - 91: o0oOOo0O0Ooo . iII111i % Oo0Ooo - iII111i . oO0o % i11iIiiIii
if 25 - 25: iIii1I11I1II1
if 63 - 63: ooOoO0o
def oO0oOOOooo ( lisp_sockets , info_source , packet , mr_or_mn ) :
o0OIi = info_source . address . print_address_no_iid ( )
oOoO0o00OO0 = info_source . port
O0OOO0 = info_source . nonce
if 6 - 6: iIii1I11I1II1 - iIii1I11I1II1 % o0oOOo0O0Ooo / iIii1I11I1II1 * I1Ii111
mr_or_mn = "Reply" if mr_or_mn else "Notify"
mr_or_mn = lisp . bold ( "nat-proxy Map-{}" . format ( mr_or_mn ) , False )
if 3 - 3: OOooOOo . IiII / Oo0Ooo
lisp . lprint ( "Forward {} to info-source {}, port {}, nonce 0x{}" . format ( mr_or_mn , lisp . red ( o0OIi , False ) , oOoO0o00OO0 ,
# iIii1I11I1II1 % iIii1I11I1II1 / ooOoO0o . oO0o + I1Ii111 . I1Ii111
lisp . lisp_hex_string ( O0OOO0 ) ) )
if 90 - 90: o0oOOo0O0Ooo / OOooOOo - OOooOOo . I1IiiI
if 82 - 82: I1Ii111 . I1Ii111 - iII111i
if 72 - 72: i11iIiiIii
if 94 - 94: OOooOOo
i1IiI1ii1i = lisp . lisp_convert_4to6 ( o0OIi )
lisp . lisp_send ( lisp_sockets , i1IiI1ii1i , oOoO0o00OO0 , packet )
if 39 - 39: OOooOOo + OoO0O00
if 80 - 80: OOooOOo % OoO0O00 / OoOoOO00
if 54 - 54: Oo0Ooo % OoO0O00 - OOooOOo - I11i
if 71 - 71: ooOoO0o . i11iIiiIii
if 56 - 56: O0 * iII111i + iII111i * iIii1I11I1II1 / ooOoO0o * I1Ii111
if 25 - 25: iIii1I11I1II1 . I11i * i11iIiiIii + Oo0Ooo * I11i
if 67 - 67: iII111i
def oooO0o ( lisp_sockets , source , sport , packet ) :
global Oo
if 19 - 19: OOooOOo % OoO0O00 / Ii1I + II111iiii % OoooooooOO
I1ii1II1iII = lisp . lisp_control_header ( )
if ( I1ii1II1iII . decode ( packet ) == None ) :
lisp . lprint ( "Could not decode control header" )
return
if 89 - 89: Ii1I
if 51 - 51: iII111i
if 68 - 68: iII111i - o0oOOo0O0Ooo * OoO0O00 % ooOoO0o . ooOoO0o - iIii1I11I1II1
if 22 - 22: OoooooooOO / I1ii11iIi11i % iII111i * OoOoOO00
if 32 - 32: OoooooooOO % oO0o % iIii1I11I1II1 / O0
if 61 - 61: II111iiii . O0 - Ii1I - I1ii11iIi11i / i11iIiiIii - II111iiii
if 98 - 98: Ii1I - I1IiiI . i11iIiiIii * Oo0Ooo
if 29 - 29: Ii1I / ooOoO0o % I11i
if 10 - 10: iIii1I11I1II1 % OoooooooOO % I1ii11iIi11i
if 39 - 39: II111iiii * OoOoOO00 . O0 * I11i
if ( I1ii1II1iII . type == lisp . LISP_NAT_INFO ) :
if ( I1ii1II1iII . info_reply == False ) :
lisp . lisp_process_info_request ( lisp_sockets , packet , source , sport ,
lisp . lisp_ms_rtr_list )
if 89 - 89: Ii1I - ooOoO0o . I11i - I1Ii111 - I1IiiI
return
if 79 - 79: IiII + IiII + Ii1I
if 39 - 39: O0 - OoooooooOO
oo0O00ooo0o = packet
packet = lisp . lisp_packet_ipc ( packet , source , sport )
if 29 - 29: OoooooooOO . II111iiii % OoOoOO00
if 26 - 26: iIii1I11I1II1 - I1ii11iIi11i . IiII . IiII + iIii1I11I1II1 * Oo0Ooo
if 85 - 85: OOooOOo + II111iiii - OOooOOo * oO0o - i1IIi % iII111i
if 1 - 1: OoooooooOO / O0 + OoOoOO00 + OoOoOO00 . I1Ii111 - OoOoOO00
if ( I1ii1II1iII . type in ( lisp . LISP_MAP_REGISTER , lisp . LISP_MAP_NOTIFY_ACK ) ) :
lisp . lisp_ipc ( packet , Oo , "lisp-ms" )
return
if 9 - 9: I1Ii111 * OoooooooOO % I1IiiI / OoOoOO00 * I11i
if 48 - 48: OoooooooOO . OoOoOO00
if 65 - 65: oO0o . Oo0Ooo
if 94 - 94: OoOoOO00 + IiII . ooOoO0o
if 69 - 69: O0 - O0
if ( I1ii1II1iII . type == lisp . LISP_MAP_REPLY ) :
i1I1i1i1I1 = lisp . lisp_map_reply ( )
i1I1i1i1I1 . decode ( oo0O00ooo0o )
if 17 - 17: OoOoOO00 + OoooooooOO % OOooOOo
OoooOO0Oo0 = iiIIIIiI111 ( None , 0 , i1I1i1i1I1 . nonce )
if ( OoooOO0Oo0 ) :
oO0oOOOooo ( lisp_sockets , OoooOO0Oo0 , oo0O00ooo0o , True )
else :
O0OoO0o = "/tmp/lisp-lig"
if ( os . path . exists ( O0OoO0o ) ) :
lisp . lisp_ipc ( packet , Oo , O0OoO0o )
else :
lisp . lisp_ipc ( packet , Oo , "lisp-itr" )
if 36 - 36: i11iIiiIii + I1ii11iIi11i % OOooOOo . I1IiiI - ooOoO0o
if 94 - 94: I1IiiI % OoOoOO00 . IiII . ooOoO0o . OoO0O00
return
if 53 - 53: OoOoOO00
if 84 - 84: OoO0O00
if 97 - 97: i1IIi
if 98 - 98: OoooooooOO - I1IiiI + ooOoO0o
if 98 - 98: iII111i . IiII . IiII - OOooOOo
if ( I1ii1II1iII . type == lisp . LISP_MAP_NOTIFY ) :
oOOO0o = lisp . lisp_map_notify ( lisp_sockets )
oOOO0o . decode ( oo0O00ooo0o )
if 18 - 18: I1ii11iIi11i / Oo0Ooo - iII111i
OoooOO0Oo0 = iiIIIIiI111 ( None , 0 , oOOO0o . nonce )
if ( OoooOO0Oo0 ) :
oO0oOOOooo ( lisp_sockets , OoooOO0Oo0 , oo0O00ooo0o ,
False )
else :
O0OoO0o = "/tmp/lisp-lig"
if ( os . path . exists ( O0OoO0o ) ) :
lisp . lisp_ipc ( packet , Oo , O0OoO0o )
else :
OOoO00ooO = "lisp-rtr" if lisp . lisp_is_running ( "lisp-rtr" ) else "lisp-etr"
if 69 - 69: oO0o / IiII * ooOoO0o
lisp . lisp_ipc ( packet , Oo , OOoO00ooO )
if 81 - 81: oO0o
if 62 - 62: Ii1I + O0 * OoO0O00
return
if 59 - 59: II111iiii
if 43 - 43: Oo0Ooo + OoooooooOO
if 47 - 47: ooOoO0o
if 92 - 92: I11i % i11iIiiIii % Oo0Ooo
if 23 - 23: II111iiii * iII111i
if 80 - 80: I1Ii111 / i11iIiiIii + OoooooooOO
if ( I1ii1II1iII . type == lisp . LISP_MAP_REFERRAL ) :
II1io0 = "/tmp/lisp-rig"
if ( os . path . exists ( II1io0 ) ) :
lisp . lisp_ipc ( packet , Oo , II1io0 )
else :
lisp . lisp_ipc ( packet , Oo , "lisp-mr" )
if 38 - 38: I1ii11iIi11i % ooOoO0o + i1IIi * OoooooooOO * oO0o
return
if 83 - 83: iIii1I11I1II1 - ooOoO0o - I1Ii111 / OoO0O00 - O0
if 81 - 81: Ii1I - oO0o * I1ii11iIi11i / I1Ii111
if 21 - 21: OoO0O00
if 63 - 63: I11i . O0 * I11i + iIii1I11I1II1
if 46 - 46: i1IIi + II111iiii * i1IIi - Ii1I
if 79 - 79: II111iiii - oO0o * I1ii11iIi11i - OoOoOO00 . I1ii11iIi11i
if ( I1ii1II1iII . type == lisp . LISP_MAP_REQUEST ) :
OOoO00ooO = "lisp-itr" if ( I1ii1II1iII . is_smr ( ) ) else "lisp-etr"
if 11 - 11: O0 * OoOoOO00
if 37 - 37: OoOoOO00 + O0 . O0 * Oo0Ooo % I1Ii111 / iII111i
if 18 - 18: OoooooooOO
if 57 - 57: ooOoO0o . OoOoOO00 * o0oOOo0O0Ooo - OoooooooOO
if 75 - 75: i11iIiiIii / o0oOOo0O0Ooo . IiII . i1IIi . i1IIi / I11i
if ( I1ii1II1iII . rloc_probe ) : return
if 94 - 94: ooOoO0o + I1IiiI
lisp . lisp_ipc ( packet , Oo , OOoO00ooO )
return
if 56 - 56: OoOoOO00 % o0oOOo0O0Ooo
if 40 - 40: OOooOOo / IiII
if 29 - 29: Ii1I - Ii1I / ooOoO0o
if 49 - 49: I11i + oO0o % OoO0O00 - Oo0Ooo - O0 - OoooooooOO
if 4 - 4: II111iiii - oO0o % Oo0Ooo * i11iIiiIii
if 18 - 18: Oo0Ooo % O0
if 66 - 66: iIii1I11I1II1 % i11iIiiIii / I1IiiI
if 47 - 47: I1ii11iIi11i * oO0o + iIii1I11I1II1 - oO0o / IiII
if ( I1ii1II1iII . type == lisp . LISP_ECM ) :
OoooOO0Oo0 = iiIIIIiI111 ( source , sport , None )
if ( OoooOO0Oo0 ) :
if ( oo0oO00o0O00o ( lisp_sockets , OoooOO0Oo0 ,
oo0O00ooo0o ) ) : return
if 86 - 86: IiII
if 43 - 43: I1IiiI / iII111i / ooOoO0o + iIii1I11I1II1 + OoooooooOO
OOoO00ooO = "lisp-mr"
if ( I1ii1II1iII . is_to_etr ( ) ) :
OOoO00ooO = "lisp-etr"
elif ( I1ii1II1iII . is_to_ms ( ) ) :
OOoO00ooO = "lisp-ms"
elif ( I1ii1II1iII . is_ddt ( ) ) :
if ( lisp . lisp_is_running ( "lisp-ddt" ) ) :
OOoO00ooO = "lisp-ddt"
elif ( lisp . lisp_is_running ( "lisp-ms" ) ) :
OOoO00ooO = "lisp-ms"
if 33 - 33: II111iiii - IiII - ooOoO0o
elif ( lisp . lisp_is_running ( "lisp-mr" ) == False ) :
OOoO00ooO = "lisp-etr"
if 92 - 92: OoO0O00 * IiII
lisp . lisp_ipc ( packet , Oo , OOoO00ooO )
if 92 - 92: oO0o
return
if 7 - 7: iII111i
if 73 - 73: OoO0O00 % I1ii11iIi11i
if 32 - 32: OOooOOo + iII111i + iIii1I11I1II1 * Oo0Ooo
if 62 - 62: i11iIiiIii
if 2 - 2: I1IiiI
if 69 - 69: OoooooooOO / Oo0Ooo * I1Ii111
if 99 - 99: II111iiii * iIii1I11I1II1 % O0 * oO0o / II111iiii % OoooooooOO
if 14 - 14: IiII . IiII % ooOoO0o
if 42 - 42: o0oOOo0O0Ooo . OOooOOo - ooOoO0o
if 33 - 33: II111iiii / O0 / IiII - I11i - i1IIi
if 8 - 8: i11iIiiIii . iII111i / iIii1I11I1II1 / I1ii11iIi11i / IiII - Ii1I
if 32 - 32: o0oOOo0O0Ooo . i1IIi * Oo0Ooo
class O0oooo0O ( bottle . ServerAdapter ) :
def run ( self , hand ) :
Ii1iiIIi1i = "./lisp-cert.pem"
if 44 - 44: o0oOOo0O0Ooo
if 51 - 51: II111iiii
if 10 - 10: OoO0O00 % OoO0O00 / o0oOOo0O0Ooo - OoOoOO00
if 44 - 44: ooOoO0o - O0 / II111iiii . iIii1I11I1II1 . i1IIi
if 63 - 63: iIii1I11I1II1 + IiII % i1IIi / I1IiiI % II111iiii
if ( os . path . exists ( Ii1iiIIi1i ) == False ) :
os . system ( "cp ./lisp-cert.pem.default {}" . format ( Ii1iiIIi1i ) )
lisp . lprint ( ( "{} does not exist, creating a copy from lisp-" + "cert.pem.default" ) . format ( Ii1iiIIi1i ) )
if 60 - 60: o0oOOo0O0Ooo . OoOoOO00 % I1Ii111 / I1IiiI / O0
if 19 - 19: i11iIiiIii . I1IiiI + II111iiii / OOooOOo . I1ii11iIi11i * ooOoO0o
if 59 - 59: iIii1I11I1II1 / I1ii11iIi11i % ooOoO0o
Oooo = wsgiserver . CherryPyWSGIServer ( ( self . host , self . port ) , hand )
Oooo . ssl_adapter = pyOpenSSLAdapter ( Ii1iiIIi1i , Ii1iiIIi1i , None )
if 74 - 74: ooOoO0o % OoOoOO00 / Oo0Ooo
if 2 - 2: IiII % IiII % I1Ii111
try :
Oooo . start ( )
finally :
Oooo . stop ( )
if 60 - 60: OOooOOo
if 73 - 73: ooOoO0o
if 86 - 86: OoOoOO00 . I11i / Oo0Ooo * I11i
if 20 - 20: ooOoO0o - OOooOOo * OoO0O00 * o0oOOo0O0Ooo * OOooOOo / IiII
if 40 - 40: I1IiiI * o0oOOo0O0Ooo . I1IiiI
if 62 - 62: ooOoO0o + II111iiii % ooOoO0o
if 50 - 50: OoooooooOO + oO0o * I1IiiI - Ii1I / i11iIiiIii
if 5 - 5: O0 - I1IiiI
if 44 - 44: II111iiii . II111iiii + OOooOOo * Ii1I
if 16 - 16: II111iiii
if 100 - 100: O0 - i1IIi
if 48 - 48: oO0o % ooOoO0o + O0
if 27 - 27: I1ii11iIi11i / OOooOOo
if 33 - 33: OoooooooOO % I1ii11iIi11i . O0 / I1ii11iIi11i
if 63 - 63: IiII + iIii1I11I1II1 + I1IiiI + I1Ii111
if 72 - 72: OoO0O00 + i11iIiiIii + I1ii11iIi11i
def oOooOoOOo0O ( bottle_port ) :
lisp . lisp_set_exception ( )
if 41 - 41: iII111i
if 88 - 88: O0 . oO0o % I1IiiI
if 10 - 10: I1IiiI + O0
if 75 - 75: O0 % iIii1I11I1II1 / OoOoOO00 % OOooOOo / IiII
if 31 - 31: i11iIiiIii * OoOoOO00
if ( bottle_port < 0 ) :
bottle . run ( host = "0.0.0.0" , port = - bottle_port )
return
if 69 - 69: i11iIiiIii
if 61 - 61: O0
bottle . server_names [ "lisp-ssl-server" ] = O0oooo0O
if 21 - 21: OoO0O00 % iIii1I11I1II1 . OoO0O00
if 99 - 99: o0oOOo0O0Ooo * OOooOOo % oO0o * oO0o + OoooooooOO
if 82 - 82: I11i / OoOoOO00 - OOooOOo / ooOoO0o
if 50 - 50: OOooOOo + OoO0O00 . i11iIiiIii + I1ii11iIi11i + i11iIiiIii
try :
bottle . run ( host = "0.0.0.0" , port = bottle_port , server = "lisp-ssl-server" ,
fast = True )
except :
bottle . run ( host = "0.0.0.0" , port = bottle_port , fast = True )
if 31 - 31: oO0o * I1Ii111 . OoOoOO00 * I11i
return
if 28 - 28: IiII + I1IiiI - Oo0Ooo % OOooOOo . I11i + I1IiiI
if 72 - 72: Ii1I / Oo0Ooo / oO0o * OoOoOO00 + OOooOOo
if 58 - 58: o0oOOo0O0Ooo % I1IiiI . I1IiiI * OoO0O00 - IiII . OoooooooOO
if 10 - 10: I1Ii111
if 48 - 48: iII111i * i1IIi % OoooooooOO * Ii1I * OoO0O00
if 7 - 7: iII111i . Ii1I . iII111i - I1Ii111
if 33 - 33: ooOoO0o + OoooooooOO - OoO0O00 / i1IIi / OoooooooOO
if 82 - 82: I1ii11iIi11i / OOooOOo - iII111i / Oo0Ooo * OoO0O00
def o00OIIIIIiiI ( ) :
lisp . lisp_set_exception ( )
if 38 - 38: O0
return
if 79 - 79: i1IIi . oO0o
if 34 - 34: I1Ii111 * II111iiii
if 71 - 71: IiII
if 97 - 97: I1ii11iIi11i
if 86 - 86: Oo0Ooo - OOooOOo . OoOoOO00 . II111iiii * I1IiiI . II111iiii
if 34 - 34: o0oOOo0O0Ooo . I1Ii111 % IiII - O0 / I1Ii111
if 91 - 91: i11iIiiIii % I1Ii111 * oO0o - I1ii11iIi11i . I1Ii111
if 28 - 28: i11iIiiIii
if 51 - 51: I1IiiI + ooOoO0o * O0 . Ii1I
def O00Oo00OOoO0 ( lisp_socket ) :
lisp . lisp_set_exception ( )
IIiiI = { "lisp-itr" : False , "lisp-etr" : False , "lisp-rtr" : False ,
"lisp-mr" : False , "lisp-ms" : False , "lisp-ddt" : False }
if 99 - 99: OoO0O00 / i1IIi . I1ii11iIi11i
while ( True ) :
time . sleep ( 1 )
I1I1i11iiiiI = IIiiI
IIiiI = { }
if 66 - 66: oO0o / OoOoOO00
for OOoO00ooO in I1I1i11iiiiI :
IIiiI [ OOoO00ooO ] = lisp . lisp_is_running ( OOoO00ooO )
if ( I1I1i11iiiiI [ OOoO00ooO ] == IIiiI [ OOoO00ooO ] ) : continue
if 13 - 13: II111iiii
lisp . lprint ( "*** Process '{}' has {} ***" . format ( OOoO00ooO ,
"come up" if IIiiI [ OOoO00ooO ] else "gone down" ) )
if 55 - 55: Oo0Ooo % i1IIi * I11i
if 95 - 95: OOooOOo / II111iiii - o0oOOo0O0Ooo % I1Ii111 . I11i
if 63 - 63: iIii1I11I1II1 / ooOoO0o
if 24 - 24: Oo0Ooo / iIii1I11I1II1 % OOooOOo * OoOoOO00 - iIii1I11I1II1
if ( IIiiI [ OOoO00ooO ] == True ) :
lisp . lisp_ipc_lock . acquire ( )
lispconfig . lisp_send_commands ( lisp_socket , OOoO00ooO )
lisp . lisp_ipc_lock . release ( )
if 50 - 50: II111iiii
if 39 - 39: II111iiii . OoOoOO00 - Oo0Ooo * i1IIi . OoooooooOO
if 44 - 44: I1IiiI
return
if 55 - 55: oO0o . I1Ii111 * I1Ii111
if 82 - 82: I1IiiI % OoO0O00 % I11i + I11i
if 6 - 6: Oo0Ooo
if 73 - 73: I1Ii111 * I1ii11iIi11i + o0oOOo0O0Ooo - Oo0Ooo . I11i
if 93 - 93: i11iIiiIii
if 80 - 80: i1IIi . I1IiiI - oO0o + OOooOOo + iII111i % oO0o
if 13 - 13: II111iiii / OoOoOO00 / OoOoOO00 + ooOoO0o
def Ii1i ( ) :
lisp . lisp_set_exception ( )
ooooOoOooo00Oo = 60
if 72 - 72: I11i
while ( True ) :
time . sleep ( ooooOoOooo00Oo )
if 26 - 26: IiII % Oo0Ooo
OoOOoo = [ ]
II1ii1 = lisp . lisp_get_timestamp ( )
if 34 - 34: OoOoOO00 - oO0o * OoooooooOO
if 5 - 5: i11iIiiIii * iII111i - Ii1I - I1ii11iIi11i - i1IIi + iII111i
if 4 - 4: ooOoO0o + O0 . i1IIi * I1ii11iIi11i - o0oOOo0O0Ooo
if 42 - 42: o0oOOo0O0Ooo * OoOoOO00 . OoO0O00 - iII111i / II111iiii
for iii1I1Iii in lisp . lisp_info_sources_by_address :
OoooOO0Oo0 = lisp . lisp_info_sources_by_address [ iii1I1Iii ]
if ( OoooOO0Oo0 . no_timeout ) : continue
if ( OoooOO0Oo0 . uptime + ooooOoOooo00Oo < II1ii1 ) : continue
if 25 - 25: Oo0Ooo % OoOoOO00
OoOOoo . append ( iii1I1Iii )
if 75 - 75: i1IIi
O0OOO0 = OoooOO0Oo0 . nonce
if ( O0OOO0 == None ) : continue
if ( O0OOO0 in lisp . lisp_info_sources_by_nonce ) :
lisp . lisp_info_sources_by_nonce . pop ( O0OOO0 )
if 74 - 74: Oo0Ooo + I1Ii111 - oO0o - OoO0O00 + iII111i - iIii1I11I1II1
if 54 - 54: I1ii11iIi11i + II111iiii . I1IiiI / OoO0O00 . ooOoO0o
if 58 - 58: IiII % i11iIiiIii * II111iiii . I1ii11iIi11i
if 94 - 94: i11iIiiIii . OOooOOo + iIii1I11I1II1 * I1Ii111 * I1Ii111
if 36 - 36: I11i - IiII . IiII
if 60 - 60: i11iIiiIii * Oo0Ooo % OoO0O00 + OoO0O00
for iii1I1Iii in OoOOoo :
lisp . lisp_info_sources_by_address . pop ( iii1I1Iii )
if 84 - 84: iIii1I11I1II1 + OoooooooOO
if 77 - 77: O0 * I1ii11iIi11i * oO0o + OoO0O00 + I1ii11iIi11i - I1Ii111
return
if 10 - 10: I1ii11iIi11i + IiII
if 58 - 58: I1IiiI + OoooooooOO / iII111i . ooOoO0o % o0oOOo0O0Ooo / I1ii11iIi11i
if 62 - 62: II111iiii
if 12 - 12: IiII + II111iiii
if 92 - 92: I1Ii111 % iIii1I11I1II1 - iII111i / i11iIiiIii % ooOoO0o * o0oOOo0O0Ooo
if 80 - 80: iII111i
if 3 - 3: I1ii11iIi11i * I11i
if 53 - 53: iIii1I11I1II1 / iII111i % OoO0O00 + IiII / ooOoO0o
def oo00oO ( lisp_ipc_control_socket , lisp_sockets ) :
lisp . lisp_set_exception ( )
while ( True ) :
try : I11i1I11 = lisp_ipc_control_socket . recvfrom ( 9000 )
except : return ( [ "" , "" , "" , "" ] )
IIIi1i1I = I11i1I11 [ 0 ] . split ( "@" )
ooo0O = I11i1I11 [ 1 ]
if 32 - 32: IiII - oO0o . iIii1I11I1II1 . I1Ii111 + II111iiii % OoooooooOO
iIii = IIIi1i1I [ 0 ]
i1IiI1ii1i = IIIi1i1I [ 1 ]
oOoO0o00OO0 = int ( IIIi1i1I [ 2 ] )
Oo000 = IIIi1i1I [ 3 : : ]
if 75 - 75: O0
if ( len ( Oo000 ) > 1 ) :
Oo000 = lisp . lisp_bit_stuff ( Oo000 )
else :
Oo000 = Oo000 [ 0 ]
if 56 - 56: OoO0O00 / II111iiii
if 39 - 39: OoOoOO00 - OoooooooOO - i1IIi / II111iiii
if ( iIii != "control-packet" ) :
lisp . lprint ( ( "lisp_core_control_packet_process() received" + "unexpected control-packet, message ignored" ) )
if 49 - 49: Oo0Ooo + O0 + IiII . II111iiii % ooOoO0o
continue
if 33 - 33: OoOoOO00 . iIii1I11I1II1 / I11i % Ii1I
if 49 - 49: OoO0O00 + II111iiii / IiII - O0 % Ii1I
lisp . lprint ( ( "{} {} bytes from {}, dest/port: {}/{}, control-" + "packet: {}" ) . format ( lisp . bold ( "Receive" , False ) , len ( Oo000 ) ,
# Oo0Ooo / OoO0O00
ooo0O , i1IiI1ii1i , oOoO0o00OO0 , lisp . lisp_format_packet ( Oo000 ) ) )
if 40 - 40: I11i / iII111i + OoO0O00 / OoooooooOO - oO0o / I1Ii111
if 62 - 62: i11iIiiIii - I11i
if 81 - 81: I11i
if 92 - 92: OOooOOo - Oo0Ooo - OoooooooOO / IiII - i1IIi
if 81 - 81: i1IIi / I1Ii111 % i11iIiiIii . iIii1I11I1II1 * OoOoOO00 + OoooooooOO
if 31 - 31: i1IIi % II111iiii
I1ii1II1iII = lisp . lisp_control_header ( )
I1ii1II1iII . decode ( Oo000 )
if ( I1ii1II1iII . type == lisp . LISP_MAP_REPLY ) :
i1I1i1i1I1 = lisp . lisp_map_reply ( )
i1I1i1i1I1 . decode ( Oo000 )
if ( iiIIIIiI111 ( None , 0 , i1I1i1i1I1 . nonce ) ) :
oooO0o ( lisp_sockets , ooo0O , oOoO0o00OO0 , Oo000 )
continue
if 13 - 13: iIii1I11I1II1 - II111iiii % O0 . Ii1I % OoO0O00
if 2 - 2: OoooooooOO - Ii1I % oO0o / I1IiiI / o0oOOo0O0Ooo
if 3 - 3: II111iiii / OOooOOo
if 48 - 48: ooOoO0o . I1ii11iIi11i
if 49 - 49: i1IIi - OoOoOO00 . Oo0Ooo + iIii1I11I1II1 - ooOoO0o / Oo0Ooo
if 24 - 24: oO0o - iII111i / ooOoO0o
if 10 - 10: OoOoOO00 * i1IIi
if 15 - 15: I11i + i1IIi - II111iiii % I1IiiI
if ( I1ii1II1iII . type == lisp . LISP_MAP_NOTIFY and ooo0O == "lisp-etr" ) :
Oo0O00Oo0o0 = lisp . lisp_packet_ipc ( Oo000 , ooo0O , oOoO0o00OO0 )
lisp . lisp_ipc ( Oo0O00Oo0o0 , Oo , "lisp-itr" )
continue
if 34 - 34: I1IiiI
if 57 - 57: OOooOOo . Ii1I % o0oOOo0O0Ooo
if 32 - 32: I11i / IiII - O0 * iIii1I11I1II1
if 70 - 70: OoooooooOO % OoooooooOO % OoO0O00
if 98 - 98: OoO0O00
if 18 - 18: I11i + Oo0Ooo - OoO0O00 / I1Ii111 / OOooOOo
if 53 - 53: OOooOOo + o0oOOo0O0Ooo . oO0o / I11i
OoooOo0 = lisp . lisp_convert_4to6 ( i1IiI1ii1i )
OoooOo0 = lisp . lisp_address ( lisp . LISP_AFI_IPV6 , "" , 128 , 0 )
if ( OoooOo0 . is_ipv4_string ( i1IiI1ii1i ) ) : i1IiI1ii1i = "::ffff:" + i1IiI1ii1i
OoooOo0 . store_address ( i1IiI1ii1i )
if 52 - 52: I1Ii111 + I1Ii111
if 73 - 73: o0oOOo0O0Ooo . i11iIiiIii % OoooooooOO + ooOoO0o . OoooooooOO / OOooOOo
if 54 - 54: OoOoOO00 . OoooooooOO
if 36 - 36: oO0o / II111iiii * IiII % I1ii11iIi11i
lisp . lisp_send ( lisp_sockets , OoooOo0 , oOoO0o00OO0 , Oo000 )
if 31 - 31: II111iiii + OOooOOo - OoooooooOO . I11i
return
if 28 - 28: Ii1I . I1ii11iIi11i
if 77 - 77: I1ii11iIi11i % II111iiii
if 81 - 81: OoOoOO00 % Ii1I / O0 * iIii1I11I1II1 % IiII . I1IiiI
if 90 - 90: o0oOOo0O0Ooo
if 44 - 44: o0oOOo0O0Ooo / I1ii11iIi11i . Oo0Ooo + OoOoOO00
if 32 - 32: IiII - ooOoO0o * iII111i * I11i
if 84 - 84: Ii1I + I1ii11iIi11i % I1IiiI + i11iIiiIii
if 37 - 37: I11i % I1ii11iIi11i / ooOoO0o
def iI11I ( ) :
Oo0O0oooo = open ( "./lisp.config.example" , "r" ) ; I111iI = Oo0O0oooo . read ( ) ; Oo0O0oooo . close ( )
Oo0O0oooo = open ( "./lisp.config" , "w" )
I111iI = I111iI . split ( "\n" )
for OooOO in I111iI :
Oo0O0oooo . write ( OooOO + "\n" )
if ( OooOO [ 0 ] == "#" and OooOO [ - 1 ] == "#" and len ( OooOO ) >= 4 ) :
o0oO = OooOO [ 1 : - 2 ]
ooOo0 = len ( o0oO ) * "-"
if ( o0oO == ooOo0 ) : break
if 61 - 61: II111iiii
if 48 - 48: OOooOOo
Oo0O0oooo . close ( )
return
if 26 - 26: iII111i * I1Ii111 * oO0o * OoOoOO00
if 48 - 48: iII111i % i11iIiiIii . OoooooooOO * IiII % OoO0O00 . iII111i
if 6 - 6: O0 . ooOoO0o - oO0o / i11iIiiIii
if 84 - 84: I11i / I1ii11iIi11i * o0oOOo0O0Ooo * OoO0O00 * OOooOOo * O0
if 83 - 83: O0 % II111iiii + o0oOOo0O0Ooo / OoooooooOO
if 75 - 75: II111iiii . I1IiiI + OOooOOo - OoOoOO00 - O0 . I11i
if 19 - 19: Ii1I * i1IIi % O0 + I11i
if 25 - 25: I1Ii111 - Ii1I / O0 . OoooooooOO % I1IiiI . i1IIi
def Ii1iIIII1i ( bottle_port ) :
global Oo0o
global Ii1iI
global Oo
global I1Ii11I1Ii1i
global Ooo
global o0oOoO00o
if 84 - 84: i1IIi - I1IiiI % iII111i
lisp . lisp_i_am ( "core" )
lisp . lisp_set_exception ( )
lisp . lisp_print_banner ( "core-process starting up" )
lisp . lisp_uptime = lisp . lisp_get_timestamp ( )
lisp . lisp_version = commands . getoutput ( "cat lisp-version.txt" )
Oo0o = commands . getoutput ( "cat lisp-build-date.txt" )
if 80 - 80: o0oOOo0O0Ooo % iII111i
if 80 - 80: Ii1I
if 26 - 26: iIii1I11I1II1 . OoooooooOO - iIii1I11I1II1
if 59 - 59: I1ii11iIi11i + I11i . oO0o
if ( lisp . lisp_get_local_addresses ( ) == False ) : return ( False )
if 87 - 87: OoO0O00
if 34 - 34: I1Ii111 . OoOoOO00 / i11iIiiIii / iII111i
if 46 - 46: Oo0Ooo + II111iiii * I1IiiI + OOooOOo
if 31 - 31: Ii1I * o0oOOo0O0Ooo * Ii1I + OoO0O00 * o0oOOo0O0Ooo . I1Ii111
if 89 - 89: OoooooooOO * Ii1I * I1IiiI . ooOoO0o * Ii1I / iII111i
lisp . lisp_ipc_lock = multiprocessing . Lock ( )
if 46 - 46: i11iIiiIii
if 15 - 15: O0 / i1IIi / i1IIi . iII111i % OoOoOO00 + I1IiiI
if 48 - 48: I1Ii111 % iII111i % Ii1I % iIii1I11I1II1 . Ii1I
if 14 - 14: iII111i * OoO0O00 % O0 + I11i + I1ii11iIi11i
if 23 - 23: Oo0Ooo % iII111i + Ii1I - I1Ii111
if 65 - 65: OoooooooOO
if 22 - 22: OOooOOo + II111iiii + Oo0Ooo
if ( os . path . exists ( "lisp.py" ) ) : lisp . lisp_version += "+"
if 83 - 83: ooOoO0o
if 43 - 43: OOooOOo
if 84 - 84: OOooOOo . IiII . iII111i
if 2 - 2: Oo0Ooo - OoOoOO00
if 49 - 49: Ii1I + II111iiii / oO0o - OoOoOO00 % OoOoOO00 + I1IiiI
if 54 - 54: ooOoO0o % Oo0Ooo - OOooOOo
iIi11IiiiII11 = "0.0.0.0" if lisp . lisp_is_raspbian ( ) else "0::0"
if ( os . getenv ( "LISP_ANYCAST_MR" ) == None or lisp . lisp_myrlocs [ 0 ] == None ) :
Ii1iI = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_CTRL_PORT ) )
else :
iIi11IiiiII11 = lisp . lisp_myrlocs [ 0 ] . print_address_no_iid ( )
Ii1iI = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_CTRL_PORT ) )
if 26 - 26: iII111i / OoooooooOO - Oo0Ooo
lisp . lprint ( "Listen on {}, port 4342" . format ( iIi11IiiiII11 ) )
if 2 - 2: I1ii11iIi11i - Oo0Ooo
if 4 - 4: O0 / I11i . OoO0O00 - ooOoO0o / OOooOOo
if 25 - 25: I11i * OoOoOO00 - Oo0Ooo . ooOoO0o . oO0o
if 89 - 89: O0 * I11i * OoO0O00
if 3 - 3: OOooOOo / iII111i * iIii1I11I1II1 + II111iiii / o0oOOo0O0Ooo / IiII
if 25 - 25: OoOoOO00 + OoO0O00 % Ii1I % OOooOOo / oO0o
if ( lisp . lisp_external_data_plane ( ) == False ) :
o0oOoO00o = lisp . lisp_open_listen_socket ( iIi11IiiiII11 ,
str ( lisp . LISP_DATA_PORT ) )
lisp . lprint ( "Listen on {}, port 4341" . format ( iIi11IiiiII11 ) )
if 91 - 91: OoO0O00 / OoO0O00 . II111iiii . ooOoO0o - I1IiiI
if 23 - 23: I1IiiI
if 7 - 7: iII111i % I1ii11iIi11i
if 64 - 64: I1Ii111 + i11iIiiIii
if 35 - 35: OoOoOO00 + i1IIi % OOooOOo
if 68 - 68: IiII . ooOoO0o
Oo = lisp . lisp_open_send_socket ( "lisp-core" , "" )
Oo . settimeout ( 3 )
if 64 - 64: i1IIi + Oo0Ooo * I1IiiI / OOooOOo
if 3 - 3: Oo0Ooo / ooOoO0o + ooOoO0o . I1ii11iIi11i
if 50 - 50: iIii1I11I1II1 * oO0o
if 85 - 85: i1IIi
if 100 - 100: OoooooooOO / I11i % OoO0O00 + Ii1I
I1Ii11I1Ii1i = lisp . lisp_open_listen_socket ( "" , "lisp-core-pkt" )
if 42 - 42: Oo0Ooo / IiII . Ii1I * I1IiiI
Ooo = [ Ii1iI , Ii1iI ,
Oo ]
if 54 - 54: OoOoOO00 * iII111i + OoO0O00
if 93 - 93: o0oOOo0O0Ooo / I1IiiI
if 47 - 47: Oo0Ooo * OOooOOo
if 98 - 98: oO0o - oO0o . ooOoO0o
if 60 - 60: I1IiiI * I1ii11iIi11i / O0 + I11i + IiII
threading . Thread ( target = oo00oO ,
args = [ I1Ii11I1Ii1i , Ooo ] ) . start ( )
if 66 - 66: IiII * Oo0Ooo . OoooooooOO * I1Ii111
if 93 - 93: IiII / i1IIi
if 47 - 47: ooOoO0o - Ii1I
if 98 - 98: oO0o . I1Ii111 / OoOoOO00 . ooOoO0o
if 1 - 1: OOooOOo
if 87 - 87: O0 * II111iiii + iIii1I11I1II1 % oO0o % i11iIiiIii - OoOoOO00
if ( os . path . exists ( "./lisp.config" ) == False ) :
lisp . lprint ( ( "./lisp.config does not exist, creating a copy " + "from lisp.config.example" ) )
if 73 - 73: iII111i + Ii1I
iI11I ( )
if 37 - 37: oO0o - iIii1I11I1II1 + II111iiii . Ii1I % iIii1I11I1II1
if 17 - 17: I1Ii111 + i1IIi % O0
if 65 - 65: IiII
if 50 - 50: II111iiii / OoO0O00
if 79 - 79: I1ii11iIi11i - iIii1I11I1II1 % i1IIi / Oo0Ooo + II111iiii
if 95 - 95: oO0o
i11ii ( Ii1iI )
if 39 - 39: i1IIi . I1ii11iIi11i / I11i / I11i
threading . Thread ( target = lispconfig . lisp_config_process ,
args = [ Oo ] ) . start ( )
if 100 - 100: OoooooooOO - OoooooooOO + IiII
if 32 - 32: OoOoOO00 * o0oOOo0O0Ooo / OoooooooOO
if 90 - 90: I1Ii111
if 35 - 35: II111iiii / Ii1I
threading . Thread ( target = oOooOoOOo0O ,
args = [ bottle_port ] ) . start ( )
threading . Thread ( target = o00OIIIIIiiI , args = [ ] ) . start ( )
if 79 - 79: OoOoOO00 + I1Ii111 * iII111i * Ii1I
if 53 - 53: OOooOOo / Oo0Ooo
if 10 - 10: I1ii11iIi11i . o0oOOo0O0Ooo
if 75 - 75: O0 * i1IIi - I11i / OOooOOo % OOooOOo / OoOoOO00
threading . Thread ( target = O00Oo00OOoO0 ,
args = [ Oo ] ) . start ( )
if 5 - 5: O0 - iII111i / I1Ii111 . o0oOOo0O0Ooo
if 7 - 7: I1ii11iIi11i - OoOoOO00
if 54 - 54: oO0o / iIii1I11I1II1 / OoooooooOO . i1IIi - OoOoOO00
if 57 - 57: iIii1I11I1II1 * Ii1I * iII111i / oO0o
threading . Thread ( target = Ii1i ) . start ( )
return ( True )
if 46 - 46: Ii1I
if 61 - 61: o0oOOo0O0Ooo / ooOoO0o - II111iiii
if 87 - 87: I1ii11iIi11i / I1IiiI
if 45 - 45: OoOoOO00 * ooOoO0o / OoooooooOO + OoO0O00 . I1Ii111 / OoO0O00
if 64 - 64: Ii1I / i1IIi % I1IiiI - o0oOOo0O0Ooo
if 11 - 11: I1ii11iIi11i - OoooooooOO
if 16 - 16: IiII % OoooooooOO - ooOoO0o * Ii1I - Ii1I
def I1iiII1 ( ) :
if 45 - 45: OoO0O00 + OoO0O00 % ooOoO0o
if 36 - 36: Ii1I * I11i . I11i / Oo0Ooo / I1IiiI
if 80 - 80: OoooooooOO - i1IIi
if 51 - 51: i1IIi . OoOoOO00 / OoOoOO00 % i11iIiiIii * OOooOOo - I1Ii111
lisp . lisp_close_socket ( Oo , "lisp-core" )
lisp . lisp_close_socket ( I1Ii11I1Ii1i , "lisp-core-pkt" )
lisp . lisp_close_socket ( Ii1iI , "" )
lisp . lisp_close_socket ( o0oOoO00o , "" )
return
if 49 - 49: Oo0Ooo - iIii1I11I1II1
if 64 - 64: I1Ii111 + iIii1I11I1II1
if 14 - 14: Ii1I / OoooooooOO + II111iiii . O0 / i1IIi
if 58 - 58: o0oOOo0O0Ooo / i11iIiiIii / O0 % I11i % I1IiiI
if 86 - 86: IiII + OoOoOO00 / I1IiiI + I11i % I11i / i11iIiiIii
if 12 - 12: OoOoOO00 + o0oOOo0O0Ooo . I1Ii111
if 52 - 52: OoO0O00
if 4 - 4: Ii1I % I1ii11iIi11i + I11i - I1ii11iIi11i
if 98 - 98: Ii1I - O0 * oO0o * Ii1I * Ii1I
if 44 - 44: IiII + I11i
if 66 - 66: oO0o
if 34 - 34: iII111i % i11iIiiIii + i11iIiiIii - iII111i
def i11ii ( lisp_socket ) :
if 2 - 2: II111iiii + i1IIi
Oo0O0oooo = open ( "./lisp.config" , "r" ) ; I111iI = Oo0O0oooo . read ( ) ; Oo0O0oooo . close ( )
I111iI = I111iI . split ( "\n" )
if 68 - 68: OOooOOo + Ii1I
if 58 - 58: IiII * Ii1I . i1IIi
if 19 - 19: oO0o
if 85 - 85: ooOoO0o - I1IiiI / i1IIi / OoO0O00 / II111iiii
if 94 - 94: iIii1I11I1II1 + IiII
II11II = False
for OooOO in I111iI :
if ( OooOO [ 0 : 1 ] == "#-" and OooOO [ - 2 : - 1 ] == "-#" ) : break
if ( OooOO == "" or OooOO [ 0 ] == "#" ) : continue
if ( OooOO . find ( "decentralized-push-xtr = yes" ) == - 1 ) : continue
II11II = True
break
if 40 - 40: iII111i + O0
if ( II11II == False ) : return
if 18 - 18: iIii1I11I1II1 % iIii1I11I1II1 % oO0o + I1IiiI % ooOoO0o / Ii1I
if 36 - 36: OoOoOO00 . i11iIiiIii
if 81 - 81: Oo0Ooo * iII111i * OoO0O00
if 85 - 85: O0 * oO0o
if 39 - 39: II111iiii * I1IiiI - iIii1I11I1II1
Ii1 = [ ]
o0OOOoo0000 = False
for OooOO in I111iI :
if ( OooOO [ 0 : 1 ] == "#-" and OooOO [ - 2 : - 1 ] == "-#" ) : break
if ( OooOO == "" or OooOO [ 0 ] == "#" ) : continue
if 19 - 19: OoooooooOO . I1IiiI + I1Ii111 - I1IiiI / I1IiiI % IiII
if ( OooOO . find ( "lisp map-server" ) != - 1 ) :
o0OOOoo0000 = True
continue
if 4 - 4: i11iIiiIii * I1ii11iIi11i + OoooooooOO - IiII . ooOoO0o . iIii1I11I1II1
if ( OooOO [ 0 ] == "}" ) :
o0OOOoo0000 = False
continue
if 48 - 48: o0oOOo0O0Ooo * oO0o . I1IiiI - I1Ii111 + OOooOOo . Oo0Ooo
if 62 - 62: I11i + OoooooooOO * iIii1I11I1II1 / i1IIi * O0
if 10 - 10: iIii1I11I1II1 * OoooooooOO / OOooOOo
if 33 - 33: o0oOOo0O0Ooo % IiII - iIii1I11I1II1 % OOooOOo + I1Ii111 - i11iIiiIii
if 91 - 91: OoooooooOO . iIii1I11I1II1 / i11iIiiIii
if ( o0OOOoo0000 and OooOO . find ( "address = " ) != - 1 ) :
oOOOO = OooOO . split ( "address = " ) [ 1 ]
OoOOoo0 = int ( oOOOO . split ( "." ) [ 0 ] )
if ( OoOOoo0 >= 224 and OoOOoo0 < 240 ) : Ii1 . append ( oOOOO )
if 93 - 93: II111iiii * OoOoOO00 % o0oOOo0O0Ooo
if 67 - 67: o0oOOo0O0Ooo + Oo0Ooo . ooOoO0o - i1IIi . OoOoOO00
if ( oOOOO == [ ] ) : return
if 12 - 12: IiII / OoO0O00 / O0 * IiII
if 51 - 51: ooOoO0o * iII111i / i1IIi
if 2 - 2: oO0o + IiII . iII111i - i1IIi + I1Ii111
if 54 - 54: OoooooooOO . oO0o - iII111i
II1i111 = commands . getoutput ( 'ifconfig eth0 | egrep "inet "' )
if ( II1i111 == "" ) : return
oO0o00o000Oo0 = II1i111 . split ( ) [ 1 ]
if 1 - 1: I1IiiI - I1Ii111
if 62 - 62: OoO0O00 . iII111i . iII111i % i1IIi * oO0o % Oo0Ooo
if 20 - 20: ooOoO0o . IiII / I11i . OoooooooOO * OOooOOo + Ii1I
if 2 - 2: I1IiiI
I1i111iiIIIi = socket . inet_aton ( oO0o00o000Oo0 )
for oOOOO in Ii1 :
lisp_socket . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 )
lisp_socket . setsockopt ( socket . IPPROTO_IP , socket . IP_MULTICAST_IF , I1i111iiIIIi )
IIii1Ii = socket . inet_aton ( oOOOO ) + I1i111iiIIIi
lisp_socket . setsockopt ( socket . IPPROTO_IP , socket . IP_ADD_MEMBERSHIP , IIii1Ii )
lisp . lprint ( "Setting multicast listen socket for group {}" . format ( oOOOO ) )
if 98 - 98: II111iiii + Oo0Ooo * iIii1I11I1II1 * I1ii11iIi11i + OOooOOo * Ii1I
if 76 - 76: ooOoO0o . oO0o
return
if 60 - 60: OOooOOo * ooOoO0o * OoO0O00
if 64 - 64: I11i / II111iiii / OoO0O00 - ooOoO0o * iIii1I11I1II1 . iII111i
if 25 - 25: OOooOOo - Ii1I . I11i
if 57 - 57: o0oOOo0O0Ooo + Oo0Ooo * I1ii11iIi11i - ooOoO0o % iIii1I11I1II1 - Ii1I
III1I11II11I = int ( sys . argv [ 1 ] ) if ( len ( sys . argv ) > 1 ) else 8080
if 78 - 78: I1ii11iIi11i . I1Ii111 . I1Ii111 . I11i % iII111i
if 26 - 26: ooOoO0o + OoO0O00 / OoOoOO00 . II111iiii * Ii1I
if 21 - 21: I1IiiI - I1IiiI + iII111i % I1IiiI * oO0o
if 74 - 74: iII111i / I11i . I1IiiI - OoooooooOO + II111iiii + I11i
if ( Ii1iIIII1i ( III1I11II11I ) == False ) :
lisp . lprint ( "lisp_core_startup() failed" )
lisp . lisp_print_banner ( "lisp-core abnormal exit" )
exit ( 1 )
if 36 - 36: Ii1I * I1IiiI * I1ii11iIi11i . I11i * I1ii11iIi11i
if 76 - 76: OOooOOo + O0 / IiII - OoO0O00
while ( True ) :
if 27 - 27: Oo0Ooo - iIii1I11I1II1 * iII111i * II111iiii * I1ii11iIi11i
if 9 - 9: i11iIiiIii + OOooOOo - OoOoOO00 / ooOoO0o % i1IIi / oO0o
if 22 - 22: i1IIi
if 3 - 3: OoO0O00 * I1ii11iIi11i - iII111i + I1ii11iIi11i
if 63 - 63: I11i * ooOoO0o % II111iiii % I1Ii111 + I1IiiI * Oo0Ooo
iIii , ooo0O , oOoO0o00OO0 , Oo000 = lisp . lisp_receive ( Ii1iI , False )
if 96 - 96: IiII
if ( ooo0O == "" ) : break
if 99 - 99: iIii1I11I1II1 - ooOoO0o
if 79 - 79: I1IiiI + oO0o % I11i % oO0o
if 56 - 56: I1ii11iIi11i + oO0o . OoO0O00 + OoooooooOO * I1ii11iIi11i - O0
if 35 - 35: OOooOOo . I11i . I1Ii111 - I11i % I11i + I1Ii111
ooo0O = lisp . lisp_convert_6to4 ( ooo0O )
oooO0o ( Ooo , ooo0O , oOoO0o00OO0 , Oo000 )
if 99 - 99: o0oOOo0O0Ooo + OOooOOo
if 34 - 34: I1Ii111 * o0oOOo0O0Ooo . I1IiiI % i11iIiiIii
I1iiII1 ( )
lisp . lisp_print_banner ( "lisp-core normal exit" )
exit ( 0 )
if 61 - 61: iIii1I11I1II1 + oO0o * I11i - i1IIi % oO0o
if 76 - 76: oO0o / OoOoOO00
# dd678faae9ac167bc83abf78e5cb2f3f0688d3a3
| 45.970496
| 149
| 0.631524
|
acffcc199b0ffec3a1827455e739363b9111eae0
| 13,083
|
py
|
Python
|
openprocurement/auction/worker/auction.py
|
ProzorroUKR/openprocurement.auction.worker
|
26fe9ff4084f634227df7984a8dd7347fc839665
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auction/worker/auction.py
|
ProzorroUKR/openprocurement.auction.worker
|
26fe9ff4084f634227df7984a8dd7347fc839665
|
[
"Apache-2.0"
] | 4
|
2018-09-26T12:06:14.000Z
|
2019-10-31T07:58:44.000Z
|
openprocurement/auction/worker/auction.py
|
ProzorroUKR/openprocurement.auction.worker
|
26fe9ff4084f634227df7984a8dd7347fc839665
|
[
"Apache-2.0"
] | null | null | null |
import logging
from copy import deepcopy
from urlparse import urljoin
from datetime import datetime
from couchdb import Database, Session
from gevent import sleep
from gevent.event import Event
from gevent.lock import BoundedSemaphore
from yaml import safe_dump as yaml_dump
from requests import Session as RequestsSession
from dateutil.tz import tzlocal
from barbecue import cooking
from apscheduler.schedulers.gevent import GeventScheduler
from openprocurement.auction.worker.journal import (
AUCTION_WORKER_SERVICE_AUCTION_RESCHEDULE,
AUCTION_WORKER_SERVICE_AUCTION_NOT_FOUND,
AUCTION_WORKER_SERVICE_AUCTION_STATUS_CANCELED,
AUCTION_WORKER_SERVICE_AUCTION_CANCELED,
AUCTION_WORKER_SERVICE_END_AUCTION,
AUCTION_WORKER_SERVICE_START_AUCTION,
AUCTION_WORKER_SERVICE_STOP_AUCTION_WORKER,
AUCTION_WORKER_SERVICE_PREPARE_SERVER,
AUCTION_WORKER_SERVICE_END_FIRST_PAUSE
)
from openprocurement.auction.worker.server import run_server
from openprocurement.auction.executor import AuctionsExecutor
from openprocurement.auction.worker.mixins import\
DBServiceMixin, RequestIDServiceMixin, AuditServiceMixin,\
DateTimeServiceMixin, BiddersServiceMixin, PostAuctionServiceMixin,\
StagesServiceMixin, ROUNDS, TIMEZONE
from openprocurement.auction.worker.utils import \
prepare_initial_bid_stage, prepare_results_stage
from openprocurement.auction.utils import\
get_latest_bid_for_bidder, sorting_by_amount,\
sorting_start_bids_by_amount, delete_mapping
LOGGER = logging.getLogger('Auction Worker')
SCHEDULER = GeventScheduler(job_defaults={"misfire_grace_time": 100},
executors={'default': AuctionsExecutor()},
logger=LOGGER)
SCHEDULER.timezone = TIMEZONE
class Auction(DBServiceMixin,
RequestIDServiceMixin,
AuditServiceMixin,
BiddersServiceMixin,
DateTimeServiceMixin,
StagesServiceMixin,
PostAuctionServiceMixin):
"""Auction Worker Class"""
def __init__(self, tender_id,
worker_defaults,
auction_data={},
lot_id=None):
super(Auction, self).__init__()
self.generate_request_id()
self.tender_id = tender_id
self.lot_id = lot_id
if lot_id:
self.auction_doc_id = tender_id + "_" + lot_id
else:
self.auction_doc_id = tender_id
self.tender_url = urljoin(
worker_defaults["resource_api_server"],
'/api/{0}/{1}/{2}'.format(
worker_defaults["resource_api_version"],
worker_defaults["resource_name"],
tender_id
)
)
if auction_data:
self.debug = True
LOGGER.setLevel(logging.DEBUG)
self._auction_data = auction_data
else:
self.debug = False
self._end_auction_event = Event()
self.bids_actions = BoundedSemaphore()
self.session = RequestsSession()
self.worker_defaults = worker_defaults
if self.worker_defaults.get('with_document_service', False):
self.session_ds = RequestsSession()
self._bids_data = {}
self.db = Database(str(self.worker_defaults["COUCH_DATABASE"]),
session=Session(retry_delays=range(10)))
self.audit = {}
self.retries = 10
self.bidders_count = 0
self.bidders_data = []
self.bidders_features = {}
self.bidders_coeficient = {}
self.features = None
self.mapping = {}
self.rounds_stages = []
def schedule_auction(self):
self.generate_request_id()
self.get_auction_document()
if self.debug:
LOGGER.info("Get _auction_data from auction_document")
self._auction_data = self.auction_document.get('test_auction_data', {})
self.get_auction_info()
self.prepare_audit()
self.prepare_auction_stages()
self.save_auction_document()
round_number = 0
SCHEDULER.add_job(
self.start_auction, 'date',
kwargs={"switch_to_round": round_number},
run_date=self.convert_datetime(
self.auction_document['stages'][0]['start']
),
name="Start of Auction",
id="Start of Auction"
)
round_number += 1
SCHEDULER.add_job(
self.end_first_pause, 'date', kwargs={"switch_to_round": round_number},
run_date=self.convert_datetime(
self.auction_document['stages'][1]['start']
),
name="End of Pause Stage: [0 -> 1]",
id="End of Pause Stage: [0 -> 1]"
)
round_number += 1
for index in xrange(2, len(self.auction_document['stages'])):
if self.auction_document['stages'][index - 1]['type'] == 'bids':
SCHEDULER.add_job(
self.end_bids_stage, 'date',
kwargs={"switch_to_round": round_number},
run_date=self.convert_datetime(
self.auction_document['stages'][index]['start']
),
name="End of Bids Stage: [{} -> {}]".format(index - 1, index),
id="End of Bids Stage: [{} -> {}]".format(index - 1, index)
)
elif self.auction_document['stages'][index - 1]['type'] == 'pause':
SCHEDULER.add_job(
self.next_stage, 'date',
kwargs={"switch_to_round": round_number},
run_date=self.convert_datetime(
self.auction_document['stages'][index]['start']
),
name="End of Pause Stage: [{} -> {}]".format(index - 1, index),
id="End of Pause Stage: [{} -> {}]".format(index - 1, index)
)
round_number += 1
LOGGER.info(
"Prepare server ...",
extra={"JOURNAL_REQUEST_ID": self.request_id,
"MESSAGE_ID": AUCTION_WORKER_SERVICE_PREPARE_SERVER}
)
self.server = run_server(self, self.convert_datetime(self.auction_document['stages'][-2]['start']), LOGGER)
def wait_to_end(self):
self._end_auction_event.wait()
LOGGER.info("Stop auction worker",
extra={"JOURNAL_REQUEST_ID": self.request_id,
"MESSAGE_ID": AUCTION_WORKER_SERVICE_STOP_AUCTION_WORKER})
def start_auction(self, switch_to_round=None):
self.generate_request_id()
self.audit['timeline']['auction_start']['time'] = datetime.now(tzlocal()).isoformat()
LOGGER.info(
'---------------- Start auction ----------------',
extra={"JOURNAL_REQUEST_ID": self.request_id,
"MESSAGE_ID": AUCTION_WORKER_SERVICE_START_AUCTION}
)
self.get_auction_info()
self.get_auction_document()
# Initital Bids
bids = deepcopy(self.bidders_data)
self.auction_document["initial_bids"] = []
bids_info = sorting_start_bids_by_amount(bids, features=self.features)
for index, bid in enumerate(bids_info):
amount = bid["value"]["amount"]
audit_info = {
"bidder": bid["id"],
"date": bid["date"],
"amount": amount
}
if self.features:
amount_features = cooking(
amount,
self.features, self.bidders_features[bid["id"]]
)
coeficient = self.bidders_coeficient[bid["id"]]
audit_info["amount_features"] = str(amount_features)
audit_info["coeficient"] = str(coeficient)
else:
coeficient = None
amount_features = None
self.audit['timeline']['auction_start']['initial_bids'].append(
audit_info
)
self.auction_document["initial_bids"].append(
prepare_initial_bid_stage(
time=bid["date"] if "date" in bid else self.startDate,
bidder_id=bid["id"],
bidder_name=self.mapping[bid["id"]],
amount=amount,
coeficient=coeficient,
amount_features=amount_features
)
)
if isinstance(switch_to_round, int):
self.auction_document["current_stage"] = switch_to_round
else:
self.auction_document["current_stage"] = 0
all_bids = deepcopy(self.auction_document["initial_bids"])
minimal_bids = []
for bid_info in self.bidders_data:
minimal_bids.append(get_latest_bid_for_bidder(
all_bids, str(bid_info['id'])
))
minimal_bids = self.filter_bids_keys(sorting_by_amount(minimal_bids))
self.update_future_bidding_orders(minimal_bids)
self.save_auction_document()
def end_first_pause(self, switch_to_round=None):
self.generate_request_id()
LOGGER.info(
'---------------- End First Pause ----------------',
extra={"JOURNAL_REQUEST_ID": self.request_id,
"MESSAGE_ID": AUCTION_WORKER_SERVICE_END_FIRST_PAUSE}
)
self.bids_actions.acquire()
self.get_auction_document()
if isinstance(switch_to_round, int):
self.auction_document["current_stage"] = switch_to_round
else:
self.auction_document["current_stage"] += 1
self.save_auction_document()
self.bids_actions.release()
def end_auction(self):
LOGGER.info(
'---------------- End auction ----------------',
extra={"JOURNAL_REQUEST_ID": self.request_id,
"MESSAGE_ID": AUCTION_WORKER_SERVICE_END_AUCTION}
)
LOGGER.debug("Stop server", extra={"JOURNAL_REQUEST_ID": self.request_id})
if self.server:
self.server.stop()
LOGGER.debug(
"Clear mapping", extra={"JOURNAL_REQUEST_ID": self.request_id}
)
delete_mapping(self.worker_defaults,
self.auction_doc_id)
start_stage, end_stage = self.get_round_stages(ROUNDS)
minimal_bids = deepcopy(
self.auction_document["stages"][start_stage:end_stage]
)
minimal_bids = self.filter_bids_keys(sorting_by_amount(minimal_bids))
self.auction_document["results"] = []
for item in minimal_bids:
self.auction_document["results"].append(prepare_results_stage(**item))
self.auction_document["current_stage"] = (len(self.auction_document["stages"]) - 1)
LOGGER.debug(' '.join((
'Document in end_stage: \n', yaml_dump(dict(self.auction_document))
)), extra={"JOURNAL_REQUEST_ID": self.request_id})
self.approve_audit_info_on_announcement()
LOGGER.info('Audit data: \n {}'.format(yaml_dump(self.audit)), extra={"JOURNAL_REQUEST_ID": self.request_id})
if self.debug:
LOGGER.debug(
'Debug: put_auction_data disabled !!!',
extra={"JOURNAL_REQUEST_ID": self.request_id}
)
sleep(10)
self.save_auction_document()
else:
if self.put_auction_data():
self.save_auction_document()
LOGGER.debug(
"Fire 'stop auction worker' event",
extra={"JOURNAL_REQUEST_ID": self.request_id}
)
def cancel_auction(self):
self.generate_request_id()
if self.get_auction_document():
LOGGER.info("Auction {} canceled".format(self.auction_doc_id),
extra={'MESSAGE_ID': AUCTION_WORKER_SERVICE_AUCTION_CANCELED})
self.auction_document["current_stage"] = -100
self.auction_document["endDate"] = datetime.now(tzlocal()).isoformat()
LOGGER.info("Change auction {} status to 'canceled'".format(self.auction_doc_id),
extra={'MESSAGE_ID': AUCTION_WORKER_SERVICE_AUCTION_STATUS_CANCELED})
self.save_auction_document()
else:
LOGGER.info("Auction {} not found".format(self.auction_doc_id),
extra={'MESSAGE_ID': AUCTION_WORKER_SERVICE_AUCTION_NOT_FOUND})
def reschedule_auction(self):
self.generate_request_id()
if self.get_auction_document():
LOGGER.info("Auction {} has not started and will be rescheduled".format(self.auction_doc_id),
extra={'MESSAGE_ID': AUCTION_WORKER_SERVICE_AUCTION_RESCHEDULE})
self.auction_document["current_stage"] = -101
self.save_auction_document()
else:
LOGGER.info("Auction {} not found".format(self.auction_doc_id),
extra={'MESSAGE_ID': AUCTION_WORKER_SERVICE_AUCTION_NOT_FOUND})
| 41.012539
| 117
| 0.599633
|
acffcd9b42d667025f1fd81718da7ef4c3e1e15b
| 15,237
|
py
|
Python
|
mistral/tests/unit/engine/test_direct_workflow.py
|
rocky11030/mistral
|
fe18ebda9441b513bae9741b7b99db5556ccbbba
|
[
"Apache-2.0"
] | null | null | null |
mistral/tests/unit/engine/test_direct_workflow.py
|
rocky11030/mistral
|
fe18ebda9441b513bae9741b7b99db5556ccbbba
|
[
"Apache-2.0"
] | null | null | null |
mistral/tests/unit/engine/test_direct_workflow.py
|
rocky11030/mistral
|
fe18ebda9441b513bae9741b7b99db5556ccbbba
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from mistral.db.v2 import api as db_api
from mistral import exceptions as exc
from mistral.services import workflows as wf_service
from mistral.tests.unit.engine import base
from mistral.workflow import states
from mistral.workflow import utils as wf_utils
# Use the set_default method to set value otherwise in certain test cases
# the change in value is not permanent.
cfg.CONF.set_default('auth_enable', False, group='pecan')
class DirectWorkflowEngineTest(base.EngineTestCase):
def _run_workflow(self, workflow_yaml, state=states.ERROR):
wf_service.create_workflows(workflow_yaml)
wf_ex = self.engine.start_workflow('wf', {})
self._await(lambda: self.is_execution_in_state(wf_ex.id, state))
return db_api.get_workflow_execution(wf_ex.id)
def test_direct_workflow_on_closures(self):
wf_text = """
version: '2.0'
wf:
# type: direct - 'direct' is default
tasks:
task1:
description: |
Explicit 'fail' command should lead to workflow failure.
action: std.echo output="Echo"
on-success:
- task2
- succeed
on-complete:
- task3
- task4
- fail
- never_gets_here
task2:
action: std.noop
task3:
action: std.noop
task4:
action: std.noop
never_gets_here:
action: std.noop
"""
wf_ex = self._run_workflow(wf_text)
tasks = wf_ex.task_executions
task1 = self._assert_single_item(tasks, name='task1')
task3 = self._assert_single_item(tasks, name='task3')
task4 = self._assert_single_item(tasks, name='task4')
self.assertEqual(3, len(tasks))
self._await(lambda: self.is_task_success(task1.id))
self._await(lambda: self.is_task_success(task3.id))
self._await(lambda: self.is_task_success(task4.id))
self.assertTrue(wf_ex.state, states.ERROR)
def test_direct_workflow_condition_transition_not_triggering(self):
wf_text = """---
version: '2.0'
wf:
input:
- var: null
tasks:
task1:
action: std.fail
on-success:
- task2
on-error:
- task3: <% $.var != null %>
task2:
action: std.noop
task3:
action: std.noop
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', {})
self._await(lambda: self.is_execution_error(wf_ex.id))
wf_ex = db_api.get_workflow_execution(wf_ex.id)
tasks = wf_ex.task_executions
task1 = self._assert_single_item(tasks, name='task1')
self.assertEqual(1, len(tasks))
self._await(lambda: self.is_task_error(task1.id))
self.assertTrue(wf_ex.state, states.ERROR)
def test_direct_workflow_change_state_after_success(self):
wf_text = """
version: '2.0'
wf:
tasks:
task1:
action: std.echo output="Echo"
on-success:
- task2
task2:
action: std.noop
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', {})
self._await(lambda: self.is_execution_success(wf_ex.id))
self.assertEqual(
states.SUCCESS,
self.engine.resume_workflow(wf_ex.id).state
)
self.assertRaises(
exc.WorkflowException,
self.engine.pause_workflow, wf_ex.id
)
self.assertEqual(
states.SUCCESS,
self.engine.stop_workflow(wf_ex.id, states.ERROR).state
)
def test_wrong_task_input(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo output="Echo"
on-complete:
- task2
task2:
description: Wrong task output should lead to workflow failure
action: std.echo wrong_input="Hahaha"
"""
wf_ex = self._run_workflow(wf_text)
self.assertIn(
'Invalid input',
wf_ex.state_info
)
self.assertTrue(wf_ex.state, states.ERROR)
def test_wrong_first_task_input(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo wrong_input="Ha-ha"
"""
wf_ex = self._run_workflow(wf_text)
self.assertIn("Invalid input", wf_ex.state_info)
self.assertEqual(states.ERROR, wf_ex.state)
def test_wrong_action(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo output="Echo"
on-complete:
- task2
task2:
action: action.doesnt_exist
"""
wf_ex = self._run_workflow(wf_text)
# TODO(dzimine): Catch tasks caused error, and set them to ERROR:
# TODO(dzimine): self.assertTrue(task_ex.state, states.ERROR)
self.assertTrue(wf_ex.state, states.ERROR)
self.assertIn("Failed to find action", wf_ex.state_info)
def test_wrong_action_first_task(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: wrong.task
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', None)
self.assertIn(
"Failed to find action [action_name=wrong.task]",
wf_ex.state_info
)
self.assertEqual(states.ERROR, wf_ex.state)
def test_next_task_with_input_yaql_error(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo output="Echo"
on-complete:
- task2
task2:
action: std.echo output=<% wrong(yaql) %>
"""
# Invoke workflow and assert workflow is in ERROR.
wf_ex = self._run_workflow(wf_text)
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf_ex.state_info)
# Assert that there is only one task execution and it's SUCCESS.
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.SUCCESS, task_1_ex.state)
# Assert that there is only one action execution and it's SUCCESS.
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.SUCCESS, task_1_action_exs[0].state)
def test_async_next_task_with_input_yaql_error(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.async_noop
on-complete:
- task2
task2:
action: std.echo output=<% wrong(yaql) %>
"""
# Invoke workflow and assert workflow, task,
# and async action execution are RUNNING.
wf_ex = self._run_workflow(wf_text, states.RUNNING)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.RUNNING, task_1_ex.state)
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.RUNNING, task_1_action_exs[0].state)
# Update async action execution result.
result = wf_utils.Result(data='foobar')
self.assertRaises(
exc.YaqlEvaluationException,
self.engine.on_action_complete,
task_1_action_exs[0].id,
result
)
# Assert that task1 is SUCCESS and workflow is ERROR.
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf_ex.state_info)
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.SUCCESS, task_1_ex.state)
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.SUCCESS, task_1_action_exs[0].state)
def test_messed_yaql_in_first_task(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo output=<% wrong(yaql) %>
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', None)
self.assertIn(
"Can not evaluate YAQL expression: wrong(yaql)",
wf_ex.state_info
)
self.assertEqual(states.ERROR, wf_ex.state)
def test_mismatched_yaql_in_first_task(self):
wf_text = """
version: '2.0'
wf:
input:
- var
tasks:
task1:
action: std.echo output=<% $.var + $.var2 %>
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', {'var': 2})
self.assertIn("Can not evaluate YAQL expression", wf_ex.state_info)
self.assertEqual(states.ERROR, wf_ex.state)
def test_one_line_syntax_in_on_clauses(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.echo output=1
on-success: task2
task2:
action: std.echo output=1
on-complete: task3
task3:
action: std.fail
on-error: task4
task4:
action: std.echo output=4
"""
wf_service.create_workflows(wf_text)
wf_ex = self.engine.start_workflow('wf', {})
self._await(lambda: self.is_execution_success(wf_ex.id))
def test_task_on_clause_has_yaql_error(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.noop
on-success:
- task2: <% wrong(yaql) %>
task2:
action: std.noop
"""
# Invoke workflow and assert workflow is in ERROR.
wf_ex = self._run_workflow(wf_text)
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf_ex.state_info)
# Assert that there is only one task execution and it's SUCCESS.
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.SUCCESS, task_1_ex.state)
# Assert that there is only one action execution and it's SUCCESS.
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.SUCCESS, task_1_action_exs[0].state)
def test_async_task_on_clause_has_yaql_error(self):
wf_text = """
version: '2.0'
wf:
type: direct
tasks:
task1:
action: std.async_noop
on-complete:
- task2: <% wrong(yaql) %>
task2:
action: std.noop
"""
# Invoke workflow and assert workflow, task,
# and async action execution are RUNNING.
wf_ex = self._run_workflow(wf_text, states.RUNNING)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.RUNNING, task_1_ex.state)
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.RUNNING, task_1_action_exs[0].state)
# Update async action execution result.
result = wf_utils.Result(data='foobar')
self.assertRaises(
exc.YaqlEvaluationException,
self.engine.on_action_complete,
task_1_action_exs[0].id,
result
)
# Assert that task1 is SUCCESS and workflow is ERROR.
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIn('Can not evaluate YAQL expression', wf_ex.state_info)
self.assertEqual(1, len(wf_ex.task_executions))
task_1_ex = self._assert_single_item(
wf_ex.task_executions,
name='task1'
)
self.assertEqual(states.SUCCESS, task_1_ex.state)
task_1_action_exs = db_api.get_action_executions(
task_execution_id=task_1_ex.id
)
self.assertEqual(1, len(task_1_action_exs))
self.assertEqual(states.SUCCESS, task_1_action_exs[0].state)
def test_inconsistent_task_names(self):
wf_text = """
version: '2.0'
wf:
tasks:
task1:
action: std.noop
on-success: task3
task2:
action: std.noop
"""
exception = self.assertRaises(
exc.InvalidModelException,
wf_service.create_workflows,
wf_text
)
self.assertIn("Task 'task3' not found", exception.message)
| 26.968142
| 77
| 0.576557
|
acffcddf23bcdb5b1492e4bceca46516e6337018
| 715
|
py
|
Python
|
LeetCode/0125_Valid_Palindrome.py
|
Achyut-sudo/PythonAlgorithms
|
21fb6522510fde7a0877b19a8cedd4665938a4df
|
[
"MIT"
] | 144
|
2020-09-13T22:54:57.000Z
|
2022-02-24T21:54:25.000Z
|
LeetCode/0125_Valid_Palindrome.py
|
Achyut-sudo/PythonAlgorithms
|
21fb6522510fde7a0877b19a8cedd4665938a4df
|
[
"MIT"
] | 587
|
2020-05-06T18:55:07.000Z
|
2021-09-20T13:14:53.000Z
|
LeetCode/0125_Valid_Palindrome.py
|
Achyut-sudo/PythonAlgorithms
|
21fb6522510fde7a0877b19a8cedd4665938a4df
|
[
"MIT"
] | 523
|
2020-09-09T12:07:13.000Z
|
2022-02-24T21:54:31.000Z
|
class Solution:
def isPalindrome(self, s: str) -> bool:
alphnum = ""
#Extract only alphanumeric characters from the given string
for i in s:
#Check whether the character is a lowercase letter or a number
if ord(i)>=ord('a') and ord(i)<=ord('z') or ord(i)>=ord("0") and ord(i)<=ord("9"):
alphnum+=i
#Check whether the character is an uppercase letter.
#If yes,convert to lower case
elif ord(i)>=ord('A') and ord(i)<=ord('Z'):
i = chr(32+ord(i))
alphnum+=i
#Reverse the alphanumeric string and check whether it is a palindrome
rev= alphnum[::-1]
result= rev==alphnum
return result
| 39.722222
| 91
| 0.573427
|
acffce25096c6d50a184ee47375e322a7eea59e5
| 3,810
|
py
|
Python
|
examples/self_supervised/datasets.py
|
sandutsar/catalyst
|
55a3a557cb9276149cf2f70381878d87264e71c2
|
[
"Apache-2.0"
] | 4
|
2019-12-14T07:27:09.000Z
|
2021-03-23T14:34:37.000Z
|
examples/self_supervised/datasets.py
|
sandutsar/catalyst
|
55a3a557cb9276149cf2f70381878d87264e71c2
|
[
"Apache-2.0"
] | null | null | null |
examples/self_supervised/datasets.py
|
sandutsar/catalyst
|
55a3a557cb9276149cf2f70381878d87264e71c2
|
[
"Apache-2.0"
] | null | null | null |
from PIL import Image
import torchvision
from torchvision.datasets import CIFAR10, CIFAR100, STL10
datasets = {
"CIFAR-10": {
"dataset": CIFAR10,
"train_transform": torchvision.transforms.Compose(
[
torchvision.transforms.RandomApply(
[
torchvision.transforms.ColorJitter(
brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1
)
],
p=0.8,
),
torchvision.transforms.RandomGrayscale(p=0.1),
# torchvision.transforms.RandomResizedCrop(
# 64, scale=(0.2, 1.0), ratio=(0.75, (4 / 3)), interpolation=Image.BICUBIC,
# ),
torchvision.transforms.RandomHorizontalFlip(p=0.5),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.480, 0.448, 0.398), (0.277, 0.269, 0.282)),
]
),
"valid_transform": torchvision.transforms.Compose(
[
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
[0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010]
),
]
),
},
"CIFAR-100": {
"dataset": CIFAR100,
"train_transform": torchvision.transforms.Compose(
[
torchvision.transforms.RandomApply(
[
torchvision.transforms.ColorJitter(
brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1
)
],
p=0.8,
),
torchvision.transforms.RandomGrayscale(p=0.1),
# torchvision.transforms.RandomResizedCrop(
# 64, scale=(0.2, 1.0), ratio=(0.75, (4 / 3)), interpolation=Image.BICUBIC,
# ),
torchvision.transforms.RandomHorizontalFlip(p=0.5),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.480, 0.448, 0.398), (0.277, 0.269, 0.282)),
]
),
"valid_transform": torchvision.transforms.Compose(
[
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(
[0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010]
),
]
),
},
"STL10": {
"dataset": STL10,
"train_transform": torchvision.transforms.Compose(
[
torchvision.transforms.RandomApply(
[
torchvision.transforms.ColorJitter(
brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1
)
],
p=0.8,
),
torchvision.transforms.RandomGrayscale(p=0.1),
# torchvision.transforms.RandomResizedCrop(
# 64, scale=(0.2, 1.0), ratio=(0.75, (4 / 3)), interpolation=Image.BICUBIC,
# ),
torchvision.transforms.RandomHorizontalFlip(p=0.5),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.43, 0.42, 0.39), (0.27, 0.26, 0.27)),
]
),
"valid_transform": torchvision.transforms.Compose(
[
torchvision.transforms.Resize(70, interpolation=Image.BICUBIC),
torchvision.transforms.CenterCrop(64),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.43, 0.42, 0.39), (0.27, 0.26, 0.27)),
]
),
},
}
| 38.877551
| 95
| 0.476378
|
acffcef306f1e12cd8700b886ae4f4f62eb8e831
| 27,576
|
py
|
Python
|
Games/hex/legacy/hex_search.py
|
morozig/muzero
|
9798cf9ac2ab46cf6da81827607b8fa8dafbeae4
|
[
"MIT"
] | null | null | null |
Games/hex/legacy/hex_search.py
|
morozig/muzero
|
9798cf9ac2ab46cf6da81827607b8fa8dafbeae4
|
[
"MIT"
] | null | null | null |
Games/hex/legacy/hex_search.py
|
morozig/muzero
|
9798cf9ac2ab46cf6da81827607b8fa8dafbeae4
|
[
"MIT"
] | null | null | null |
"""
This Python file defines the Classes for search engines that search for
moves for playing the game of hex. Currently Minimax by the
Negamax alpha-beta method is implemented along with Monte-Carlo Tree Search.
:version: FINAL
:date: 07-02-2020
:author: Joery de Vries
:edited by: Joery de Vries, Oliver Konig, Siyuan Dong
:bibliography:
1. Stuart Russell and Peter Norvig. 2009. Artificial Intelligence: A Modern
Approach (3rd. ed.). Prentice Hall Press, USA. http://aima.cs.berkeley.edu/
"""
from .hex_utils import available_moves, make_move, unmake_move, no_moves, emplace
import time
import numpy as np
class Searcher(object):
"""
Abstract class to provide a format for an environment state searcher.
As the search can be time-bounded the class must store its best current
move in a class variable, retrievable by get_move. If the search is
terminated mid-search, then there will always be an available move.
"""
def __init__(self):
self.move = None
def get_move(self):
return self.move
class MinimaxSearcher(Searcher):
"""
Derived class of Searcher which implements the Minimax algorithm with the
Negamax alpha-beta method. This class implements:
- Transposition table functionality
- Iterative deepening
- Fixed-depth search
- Move-ordering
- Budgeting of the heuristic evaluation function calls.
"""
TERMINAL = (-1, -1)
_VALUE_IDX = 0
_MOVE_IDX = 1
_EVAL_DEPTH_IDX = 2
def __init__(self, player, heuristic):
"""
Initalize the search engine with a player perspective (ally) along with
a heuristic scoring functions for board states.
:param player: int HexBoard player color.
:param heuristic: HexHeuristic Derived class that implements a scoring function.
:see: HexHeuristic in .hex_heuristics
"""
super().__init__()
self.evaluations = self.budget = 0
self.itd_depth = 1
self.player = player
self.heuristic = heuristic
self.move = self.TERMINAL
self.hashseed = None
self.transpositions = [{}]
@property
def use_hashing(self):
"""
If the transposition table hashseed is not initialized it is implied
that no hashing of states is used.
:return: boolean True if it is implied that transpositions are used.
"""
return self.hashseed is not None
@property
def _undefined_node(self):
"""
Undefined node property. Search defaults to this node when resources run out.
:return: (int, tuple, int) Heuristic value, Coordinate, and Search depth.
"""
return -np.inf, self.TERMINAL, np.inf
@property
def hashtable(self):
"""
Retrieve the transposition table at the CURRENT search iteration.
With iterative deepening the algorithm uses transpositions of PREVIOUS
iterations for move ordering, and not for retrieving values; this would
cause the algorithm to get stuck at the first layer.
:return: int Heuristic table at the current search iteration.
"""
return self.transpositions[self.itd_depth - 1]
def evaluate(self, hex_board, player):
"""
Call the heuristic scoring function to evaluate the beneficiality of the
current board-state for the given player.
(Also increments the evaluation function call counter.)
:param hex_board: HexBoard Class for game-logic.
:param player: int HexBoard player color.
:return: int Heuristic score of the provided board state for player.
"""
self.evaluations += 1
return self.heuristic.evaluate(hex_board, player)
def initalize_transposition(self, size):
"""
(Re-)Initialize the transposition table and generate a hashseed fitting matching
the board's size.
The hashseed contains random integers in a matrix of dimensionality size^2 x 2.
Every position (flattened into a 1-d array) for every player then has its own seed.
:param size: int Size of the game-board.
"""
self.transpositions = [dict()] # Empty transposition table to remove earlier moves
self.hashseed = np.random.randint(0, 2 ** 31, size * size * 2).reshape((2, size * size))
def zobrist(self, hex_board):
"""
Compute a hash-value for a given board state. The hashing is performed by consequently
XOR-ing the hasheeds of each board-position -- which is deterministic, and usually unique
for most board-states. I.e., zobrist: U -> Z maps a universe to distinct integers.
:param hex_board: HexBoard Class for game-logic.
:return: int Hashed value of the HexBoard.
:references: https://en.wikipedia.org/wiki/Zobrist_hashing
"""
board_hash = 0
for pos, value in np.ndenumerate(hex_board.board): # format= (row, col): value
if not hex_board.is_empty(pos):
# Select the indices of the hashseed corresponding to the current coordinate.
type_index = 0 if hex_board.is_color(pos, self.player) else 1
linear_hash_index = pos[0] * hex_board.size + pos[1]
# Subsequently keep XOR-ing the hash-value with the selected hashseed.
board_hash = np.bitwise_xor(board_hash, self.hashseed[type_index, linear_hash_index])
return board_hash
def order_moves(self, hex_board, moves, player):
"""
Orders the available moves at the current state of the hex_board for the player
based on available transposition table values (only works with iterative deepening).
Ordering of moves will help the alpha-beta methodology omit more nodes during search.
If no iterative deepening is used the moves are simply uniformly scrambled so that
all moves are uniformly likely to be the i'th expanded node in the minimax search.
As the available moves are initially ordered by their generating function, this
shuffling prevents biasedness in the search.
:param hex_board: HexBoard Class for game-logic.
:param moves: list All available moves at the current state of the HexBoard.
:param player: int HexBoard player color.
:return: list An ordered list of moves.
:see: negamax_alpha_beta
"""
# The moves are initially shuffled to prevent bias.
np.random.shuffle(moves)
# Move ordering can only be done if there are transposition tables from previous iterations.
if not self.use_hashing or not len(self.hashtable) > 1:
return moves # No data to order moves on.
move_scores = list()
for move in moves:
make_move(hex_board, move, player) # --> To child Node
hashed_value = self.zobrist(hex_board) # Calculate hash of the child Node
unmake_move(hex_board, move) # --> Back to Parent Node
# Get the most recent hashed score. (only applicable for iterative deepening)
heuristic_values = [layer[hashed_value] for layer in self.transpositions if hashed_value in layer]
move_scores.append(heuristic_values[-1] if len(heuristic_values) > 0 else 0)
# Order Ascending by heuristic value such that the best moves are sorted at the front,
# unexplored moves shuffled in the middle and detrimental moves at the very end.
ordering = sorted(zip(move_scores, moves), key=lambda pair: pair[0], reverse=True)
# Flattens the sorted list of (move_value, moves) list to
# [(tuple of move_values), (tuple of moves)] and returns the now ordered (tuple of moves)
return list(zip(*ordering))[1]
def negamax_alpha_beta(self, hex_board, depth, alpha, beta, player):
"""
Compute the heuristically evaluated Minimax optimal value-move-depth triple and
return this triple to the previous (root) depth. Minimax is implemented
using negamax along with alpha-beta pruning.
Note that if at a root state multiple children are found with the highest score,
the best node is then selected on value and the lowest evaluation depth. If both
children have the same best heuristic value and evaluation depth, either one is
equiprobable of being chosen.
:param hex_board: HexBoard Class for game-logic.
:param depth: int Current search depth.
:param alpha: int Lower-bound on the heuristic value of a state.
:param beta: int Upper-bound on the heuristic value of a state.
:param player: int HexBoard player color.
:return: (int, tuple, int) Minimax optimal heuristic value, Coordinate, and eval depth.
:see: move_ordering
:references: https://en.wikipedia.org/wiki/Minimax
:references: https://en.wikipedia.org/wiki/Negamax
:references: https://en.wikipedia.org/wiki/Alpha%E2%80%93beta_pruning
"""
hash_value = 0
heuristic_best = self._undefined_node
if self.use_hashing:
# Fetch transposition table value if exists
hash_value = self.zobrist(hex_board)
if hash_value in self.hashtable:
# Node already encountered and should NOT be expanded
# Note that for iterative deepening only moves of the CURRENT
# search depth can be found in the hashtable. Nodes encountered
# at previous depths (= older age) are used for move ordering.
return self.hashtable[hash_value], self.TERMINAL, depth
elif self.evaluations > self.budget:
# If no more function calls are available, return a default value.
return self._undefined_node
elif depth == 0 or hex_board.game_over or no_moves(hex_board):
# Terminal Node or Depth Limit reached
heuristic_best = (self.evaluate(hex_board, player), self.TERMINAL, depth)
else:
# Node should be expanded
moves = available_moves(hex_board)
moves = self.order_moves(hex_board, moves, player)
for move in moves:
# --> To child Node
make_move(hex_board, move, player)
# Expand into child Node
recursive_value, _, eval_depth = self.negamax_alpha_beta(
hex_board, depth - 1, -beta, -alpha, hex_board.get_opposite_color(player))
# Negate value due to players switching, format: (minmax-value, move)
recursive_best = (-recursive_value, move, eval_depth)
# --> Back to Parent Node
unmake_move(hex_board, move)
# Perform Minimax selection and alpha-beta pruning.
# Select best child on value first, on evaluation depth second (shallow preferred)
heuristic_best = max(heuristic_best, recursive_best,
key=lambda x: (x[self._VALUE_IDX], x[self._EVAL_DEPTH_IDX]))
alpha = max(alpha, heuristic_best[self._VALUE_IDX])
if alpha >= beta:
break
# Store calculated heuristic value into transposition table (if used)
if self.use_hashing:
self.hashtable[hash_value] = heuristic_best[self._VALUE_IDX]
# Returns (value, (col, row), evaluation_depth)
return heuristic_best
def search(self, hex_board, depth, budget=None):
"""
Perform fixed-depth minimax search. Puts an upper-bound on the budget
if no budget is provided to prevent infinite computation time.
:param hex_board: HexBoard Class for game-logic.
:param depth: int The depth to search in the game-tree.
:param budget: int Maximum amount of heuristic function evaluations
:see: negamax_alpha_beta
"""
# Calculate the maximum amount of nodes that can be searched at the given depth
n_empty = len(available_moves(hex_board))
max_nodes = np.math.factorial(n_empty) / np.math.factorial(np.max([0, n_empty - depth]))
# Clip the budget to prevent infinite computation time
self.budget = np.min([10 ** 9, max_nodes]) if not budget else np.min([budget, max_nodes])
# Perform the search with fixed depth
_, self.move, _ = self.negamax_alpha_beta(
hex_board, depth, alpha=-np.inf, beta=np.inf, player=self.player)
if self.move == self.TERMINAL: # If search failed, choose a random move.
self.move = np.random.choice(available_moves(hex_board))
def iterative_deepening(self, hex_board, budget, depth=1, steps=1):
"""
Search for a move using Minimax on the given HexBoard with increasing depth.
This allows the algorithm to retain the best move from a previous depth such that
the search can be safely terminated if stuck in higher depths.
:param hex_board:HexBoard Class for game-logic.
:param budget: int Maximum amount of heuristic function evaluations.
:param depth: int Starting depth for the alpha-beta search.
:param steps: int The increase to make for search depth at each iteration.
:see: negamax_alpha_beta
:references: https://en.wikipedia.org/wiki/Iterative_deepening_depth-first_search
"""
# Made a class variable to keep track of actual depth during search -- e.g., for
# transposition table age.
self.itd_depth = depth
# For irregular depths (depth > 1) pad the list of TTs with empty tables so that
# the class can fetch the current iteration's TT using self.itd_depth.
any(self.transpositions.append(dict()) for _ in range(depth-1))
# In late game scenarios search-depth may exceed the max possible depth as
# there are less positions to be searched. Bounding depth precludes redundant search.
max_depth = len(available_moves(hex_board))
best_node = self._undefined_node
self.budget = budget
while self.evaluations < budget and self.itd_depth <= max_depth:
new_node = self.negamax_alpha_beta(
hex_board, depth=self.itd_depth, alpha=-np.inf, beta=np.inf, player=self.player)
# Don't keep the most recent move if the search failed or had to back-track prematurely.
if new_node[self._MOVE_IDX] != self.TERMINAL and self.evaluations < budget:
if new_node[self._VALUE_IDX] >= best_node[self._VALUE_IDX]:
best_node = new_node
self.move = best_node[self._MOVE_IDX]
# Debugging line:
print("ID depth-evaluations:", self.itd_depth, self.evaluations,
"update:", new_node[self._MOVE_IDX] != self.TERMINAL and self.evaluations < budget)
print("Found:",new_node[self._MOVE_IDX], new_node[self._VALUE_IDX])
print("Keep:", self.move, best_node[self._VALUE_IDX])
self.itd_depth += steps
# Add a new emptpy transposition table for the next depths. The transposition tables
# from previous layers are now solely used for move ordering.
any(self.transpositions.append(dict()) for _ in range(steps))
class MCTSSearcher(Searcher):
"""
Derived class of Searcher which implements the MCTS algorithm.
This implementation additionally provides the option to memorize
the subtree of earlier MCTS searches, so that a new search can
exploit previously accumulated node statistics; which may improve
empirical performance.
"""
class Node(object):
"""
Wrapper class for the Children of a boardstate that stores
the statistics requisite for the MCTS algorithm.
The class stores its children, its visit count, win count, etc.
Additionally the class implements the methods required
for Node selection, expansion, and updating.
"""
def __init__(self, boardstate, player):
"""
Initializes the wrapper class Node for the given boardstate.
:param boardstate: HexBoard Class for game-logic.
:param player: int HexBoard player color currently to move.
"""
self.state = boardstate
self.parentNode = self.move = None
self.visits = self.wins = 0
self.untriedMoves = available_moves(boardstate)
self.childNodes = list()
self.player = player
def select(self, c_p, n_i):
"""
Performs the child Selection step of the MCTS algorithm.
Select a Node from the list self.childNodes based on its UCT-value
:param c_p: float Exploration parameter for the UCT formula.
:param n_i: int The amount of visits to the parent node of node.
:return: Node Childnode of this class with the highest UCT value.
:see: MCTSSearcher._uct
"""
return max(self.childNodes, key=lambda node: MCTSSearcher._uct(node, c_p, n_i))
def expand(self, move, state):
"""
Performs the Child Expansion step of the MCTS algorihthm.
Create a Node class from the given move and board-state. This function
creates a Childnode for state (state is a child-state of 'self'). The child
receives this class as its parent, so that the parent can be traced back later
during backtracking. The child receives the move variable to memorize which move
caused the state of the current class to reach the argument 'state'.
Additonally the statistics of this class's children is updated.
:param move: tuple Coordinates on the Hexboard = state that lead to this state.
:param state: HexBoard Class for game-logic in the state after playing 'move'.
:return: Node The newly created childnode of the current class.
"""
child = MCTSSearcher.Node(state, state.get_opposite_color(self.player))
child.move = move
child.parentNode = self
self.childNodes.append(child)
self.untriedMoves.remove(move)
return child
def update(self, result):
"""
Update the statistics of this class based on the result of a rollout/ random playout.
The visit count is updated, and if the MCTSSearcher's player has won, the win count is
also incremented.
:param result: bool True if MCTSSearcher.player has won, otherwise False.
"""
self.visits += 1
if result:
self.wins += 1
def __init__(self, player, memorized_tree=None):
"""
Initialize the MCTS searcher with a player perspective to score
the explored nodes on. Additionally a subtree from a previous search
can be provided to exploit the accumulated statistics from multiple searches.
:param player: int HexBoard player color.
:param memorized_tree: Node MCTS tree structure expanded from a previous search.
"""
super().__init__()
self.player = player
self.memorized_tree = memorized_tree
if self.memorized_tree:
self.memorized_tree.parentNode = None # Set the memorized tree to a Root node.
@staticmethod
def _uct(node, c_p, n_i):
"""
Computes the upper confidence bound 1 applied to trees equation for a Node.
:param node: Node Class for wrapping the children of the HexBoard state/ storing statistics for MCTS.
:param c_p: float Exploration parameter for the UCT formula.
:param n_i: int The amount of visits to the parent node of node.
:return: UCT value
:see: Node
"""
return node.wins / node.visits + c_p * np.sqrt(np.log(n_i + 1) / node.visits)
@staticmethod
def _hex_playout_fill(hex_board, player):
"""
Perform a rollout for the MCTS algorithm by completely filling the HexBoard
uniformly at random with either players.
All empty positions on the board are collected, scrambled, and uniformly
divided to the player and its adversary. If the amount of moves modulo 2 = 1,
the player will have an additional move. The board is filled with the
divided moves and afterwards the winner is checked. This is possible
due to Hex being a deterministic game. If one player wins, the other
automatically loses. After determining the result, the board is emptied
to its argument state 'hex_board', and the result is returned.
This filling mimicks random-play, however is a magnitude more efficient than
normal random play. Seeing as the board doesn't need to check who won
after every move but only once after the game is finished. We found a
10-fold speed-up over normal random-play using this method.
:param hex_board: HexBoard Class for game-logic
:param player: int HexBoard player color. Current player to move.
:return: bool True if player wins after the random game.
"""
# Generate all possible moves and permute them to
# efficient select moves uniformly random.
move_set = available_moves(hex_board)
np.random.shuffle(move_set)
# Evenly split the moves among the two players.
# Array split always give the first element the residual
# element if the length of the move list is uneven.
player_moves, adversary_moves = np.array_split(move_set, 2)
adversary = hex_board.get_opposite_color(player)
# Fill the entire board with the permuted possible moves.
any(emplace(hex_board, tuple(move), player) for move in player_moves)
any(emplace(hex_board, tuple(move), adversary) for move in adversary_moves)
# Get the result of the random playout. If False --> adversary won.
player_won = hex_board.check_win(player)
# Reset the board to its original state.
any(unmake_move(hex_board, tuple(move)) for move in player_moves)
any(unmake_move(hex_board, tuple(move)) for move in adversary_moves)
return player_won # And return the result...
@staticmethod
def _backtrack(state, node, result):
"""
Perform the backtracking step of the MCTS algorithm.
"Climb" from the child 'node' back to the rootnode while updating the visited
nodes with the requisite statistics provided by 'result'. The board-state
is simultaneously returned to the root-state.
:param state: HexBoard Class for game-logic in the state of the expansion of 'node'.
:param node: Node Class for wrapping the children of the HexBoard state/ storing statistics for MCTS.
:param result: bool True if 'self.player' won, otherwise False.
:return: Node Returns the node returned to its rootstate with updated statistics.
"""
while node.parentNode: # Only the rootstate has parentNode = None.
node.update(result)
unmake_move(state, node.move)
node = node.parentNode
return node
@staticmethod
def find_next_subtree(node, new_state):
"""
Given a subtree Node and a HexBoard state that is exactly ONE move ahead of node,
find the subtree of node belonging to state. This function is used for MCTS tree
memorization without requiring external memorization of previously made moves.
:param node: Node Class for wrapping the children of the HexBoard state/ storing statistics for MCTS.
:param state: HexBoard Class for game-logic that is one game-state ahead of the tree in Node.
:return:
"""
node_moves = [move for move in node.untriedMoves] + [child.move for child in node.childNodes]
moves = available_moves(new_state)
# Get the difference in moves (i.e., the previous move).
difference = set(node_moves) - set(moves)
if not difference:
return None
# Find the MCTS subtree belonging to the previous move
move_difference = difference.pop()
subtree = [child for child in node.childNodes if child.move == move_difference]
# If the child exists, subtree has only one element.
return subtree[0] if subtree else None
def search(self, hex_board, exploration=1.0, budget=1_000, monitor=False):
"""
Performs the main procedure of the MCTS algorithm.
:param hex_board: HexBoard Class for game-logic.
:param exploration: float The exploration parameter for the expansion procedure (C_p).
:param budget: int The amount of MCTS simulations to perform.
:param monitor: bool Whether to print out intermediary progress and statistics.
"""
# Initialize the rootstate with the subtree of a previous search or generate a new tree.
node = self.memorized_tree if self.memorized_tree else MCTSSearcher.Node(hex_board, self.player)
runtime = 0
if monitor:
runtime = time.time()
# Budget != N. Budget is the amount of simulations to perform during this call of
# the function. While N is the total amount of simulations. If the rootnode is
# a subtree of a previous search, the total amount of simulations is the budget
# plus the amount of visits to the current rootnode.
for sim_i in range(node.visits + 1, node.visits + budget + 1):
if monitor:
if sim_i % 1_000 == 0:
print("At iteration {} with ~{:.4f} seconds per iteration".format(
sim_i, (time.time() - runtime) / sim_i))
to_move = self.player
# Selection
while not node.untriedMoves and node.childNodes:
node = node.select(exploration, node.visits)
make_move(hex_board, node.move, to_move)
to_move = hex_board.get_opposite_color(to_move)
# Expand
if node.untriedMoves:
move = node.untriedMoves[np.random.randint(len(node.untriedMoves))]
make_move(hex_board, move, to_move)
to_move = hex_board.get_opposite_color(to_move)
node = node.expand(move, hex_board)
# Playout
# If to_move is the adversary than the result should be loss.
# If to_move is the player than the result should be win.
result = MCTSSearcher._hex_playout_fill(hex_board, to_move)
# Backpropagate
# Uses result = True if self.player won.
node = MCTSSearcher._backtrack(hex_board, node, (to_move == self.player) == result)
# Update the currently best move after every iteration and the tree.
self.memorized_tree = max(node.childNodes, key=lambda n: n.visits)
self.move = self.memorized_tree.move
if monitor:
runtime = time.time() - runtime
print("{} iterations done in {:.4f} seconds with {:.4f} seconds per iteration".format(
budget, runtime, runtime / budget))
print("Best move: {} with a visit count of {} and a winrate of {:.3f}".format(
self.move, self.memorized_tree.visits, self.memorized_tree.wins / self.memorized_tree.visits))
| 47.219178
| 110
| 0.654917
|
acffcf3ab7906fb4a01c178c6dfe620790c5220f
| 3,048
|
py
|
Python
|
lib/node_modules/@stdlib/math/special/abs/benchmark/python/numpy/benchmark.ndarray_nd_singleton_dims_float64.py
|
mhmdaminraeisi/stdlib
|
776f925d702132007f6f3ea02574999bb482a81d
|
[
"Apache-2.0"
] | 3,428
|
2016-07-14T13:48:46.000Z
|
2022-03-31T22:32:13.000Z
|
lib/node_modules/@stdlib/math/special/abs/benchmark/python/numpy/benchmark.ndarray_nd_singleton_dims_float64.py
|
mhmdaminraeisi/stdlib
|
776f925d702132007f6f3ea02574999bb482a81d
|
[
"Apache-2.0"
] | 435
|
2016-04-07T18:12:45.000Z
|
2022-03-22T15:43:17.000Z
|
lib/node_modules/@stdlib/math/special/abs/benchmark/python/numpy/benchmark.ndarray_nd_singleton_dims_float64.py
|
sthagen/stdlib
|
042b6215818db0e2a784e72c7e054167dcefcd2a
|
[
"BSL-1.0"
] | 188
|
2016-11-29T22:58:11.000Z
|
2022-03-17T06:46:43.000Z
|
#!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2021 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark numpy.fabs."""
from __future__ import print_function, division
import timeit
NAME = "abs"
REPEATS = 3
ITERATIONS = 1000000
MAX = 6
MIN = 1
COUNT = [0] # use a list to allow modification within nested scopes
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(iterations, elapsed):
"""Print benchmark results.
# Arguments
* `iterations`: number of iterations
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(100000, 0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark(name, setup, stmt, iterations):
"""Run the benchmark and print benchmark results.
# Arguments
* `name`: benchmark name (suffix)
* `setup`: benchmark setup
* `stmt`: statement to benchmark
* `iterations`: number of iterations
# Examples
``` python
python> benchmark("::random", "from random import random;", "y = random()", 1000000)
```
"""
t = timeit.Timer(stmt, setup=setup)
i = 0
while i < REPEATS:
print("# python::numpy::" + NAME + name)
COUNT[0] += 1
elapsed = t.timeit(number=iterations)
print_results(iterations, elapsed)
print("ok " + str(COUNT[0]) + " benchmark finished")
i += 1
def main():
"""Run the benchmarks."""
print_version()
iters = ITERATIONS
p = MIN
while p <= MAX:
n = 10**p
p += 1
name = ":contiguous=false,ndims=3,singleton_dims=2,dtype=float64,len="+str(n)
setup = "import numpy as np;"
setup += "x = -1.0*np.ndarray(shape=("+str(n)+", 1, 1), strides=(2, 1, 1), buffer=np.zeros(("+str(n*2)+")), dtype='float64');"
stmt = "y = np.fabs(x)"
benchmark(name, setup, stmt, iters)
iters //= 4
print_summary(COUNT[0], COUNT[0])
if __name__ == "__main__":
main()
| 24.190476
| 134
| 0.612533
|
acffcf53619e26669504e49deff51b0379e2f5fb
| 2,311
|
py
|
Python
|
supervisor/medusa/unix_user_handler.py
|
LexMachinaInc/supervisor
|
65221682eb2590b2de504586f5736d55327e81d6
|
[
"ZPL-2.1"
] | 365
|
2015-01-26T13:56:42.000Z
|
2022-03-28T06:36:31.000Z
|
supervisor/medusa/unix_user_handler.py
|
LexMachinaInc/supervisor
|
65221682eb2590b2de504586f5736d55327e81d6
|
[
"ZPL-2.1"
] | 3
|
2015-12-29T07:44:24.000Z
|
2021-03-18T06:13:07.000Z
|
supervisor/medusa/unix_user_handler.py
|
LexMachinaInc/supervisor
|
65221682eb2590b2de504586f5736d55327e81d6
|
[
"ZPL-2.1"
] | 135
|
2015-01-31T00:46:51.000Z
|
2022-03-03T06:31:09.000Z
|
# -*- Mode: Python -*-
#
# Author: Sam Rushing <rushing@nightmare.com>
# Copyright 1996, 1997 by Sam Rushing
# All Rights Reserved.
#
RCS_ID = '$Id: unix_user_handler.py,v 1.4 2002/11/25 00:09:23 akuchling Exp $'
# support for `~user/public_html'.
import re
import string
import default_handler
import filesys
import os
import pwd
get_header = default_handler.get_header
user_dir = re.compile ('/~([^/]+)(.*)')
class unix_user_handler (default_handler.default_handler):
def __init__ (self, public_html = 'public_html'):
self.public_html = public_html
default_handler.default_handler.__init__ (self, None)
# cache userdir-filesystem objects
fs_cache = {}
def match (self, request):
m = user_dir.match (request.uri)
return m and (m.end() == len (request.uri))
def handle_request (self, request):
# get the user name
m = user_dir.match (request.uri)
user = m.group(1)
rest = m.group(2)
# special hack to catch those lazy URL typers
if not rest:
request['Location'] = '/~%s/' % user
request.error (301)
return
# have we already built a userdir fs for this user?
if self.fs_cache.has_key (user):
fs = self.fs_cache[user]
else:
# no, well then, let's build one.
# first, find out where the user directory is
try:
info = pwd.getpwnam (user)
except KeyError:
request.error (404)
return
ud = info[5] + '/' + self.public_html
if os.path.isdir (ud):
fs = filesys.os_filesystem (ud)
self.fs_cache[user] = fs
else:
request.error (404)
return
# fake out default_handler
self.filesystem = fs
# massage the request URI
request.uri = '/' + rest
return default_handler.default_handler.handle_request (self, request)
def __repr__ (self):
return '<Unix User Directory Handler at %08x [~user/%s, %d filesystems loaded]>' % (
id(self),
self.public_html,
len(self.fs_cache)
)
| 29.253165
| 92
| 0.555171
|
acffcf85618d88dcd14cebdbab945427d9360c49
| 1,274
|
py
|
Python
|
python-{{cookiecutter.service_name}}client/{{cookiecutter.service_name}}client/v1/shell.py
|
shu-mutou/cookiecutter-python-os_serviceclient
|
9439a988d639b03c745a077a8acf53a907818768
|
[
"Apache-1.1"
] | null | null | null |
python-{{cookiecutter.service_name}}client/{{cookiecutter.service_name}}client/v1/shell.py
|
shu-mutou/cookiecutter-python-os_serviceclient
|
9439a988d639b03c745a077a8acf53a907818768
|
[
"Apache-1.1"
] | null | null | null |
python-{{cookiecutter.service_name}}client/{{cookiecutter.service_name}}client/v1/shell.py
|
shu-mutou/cookiecutter-python-os_serviceclient
|
9439a988d639b03c745a077a8acf53a907818768
|
[
"Apache-1.1"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from {{cookiecutter.service_name}}client.common import utils
from {{cookiecutter.service_name}}client.v1 import sample_shell
COMMAND_MODULES = [
sample_shell,
]
def enhance_parser(parser, subparsers, cmd_mapper):
"""Enhance parser with API version specific options.
Take a basic (nonversioned) parser and enhance it with
commands and options specific for this version of API.
:param parser: top level parser :param subparsers: top level
parser's subparsers collection where subcommands will go
"""
for command_module in COMMAND_MODULES:
utils.define_commands_from_module(subparsers, command_module,
cmd_mapper)
| 39.8125
| 78
| 0.72135
|
acffd213125eacbbda5d0671ec33dcec06fc6982
| 14,583
|
py
|
Python
|
neutron/api/rpc/handlers/dhcp_rpc.py
|
ljzjohnson/neutron
|
d78664321482c15981a09642985a540195e754e3
|
[
"Apache-2.0"
] | 1
|
2018-07-04T07:59:31.000Z
|
2018-07-04T07:59:31.000Z
|
neutron/api/rpc/handlers/dhcp_rpc.py
|
ljzjohnson/neutron
|
d78664321482c15981a09642985a540195e754e3
|
[
"Apache-2.0"
] | null | null | null |
neutron/api/rpc/handlers/dhcp_rpc.py
|
ljzjohnson/neutron
|
d78664321482c15981a09642985a540195e754e3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import itertools
import operator
from neutron_lib.api.definitions import portbindings
from neutron_lib.api import extensions
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib import exceptions
from neutron_lib.plugins import directory
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import excutils
from neutron._i18n import _
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.db import api as db_api
from neutron.db import provisioning_blocks
from neutron.extensions import segment as segment_ext
from neutron.plugins.common import utils as p_utils
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
class DhcpRpcCallback(object):
"""DHCP agent RPC callback in plugin implementations.
This class implements the server side of an rpc interface. The client
side of this interface can be found in
neutron.agent.dhcp.agent.DhcpPluginApi. For more information about
changing rpc interfaces, see doc/source/contributor/internals/rpc_api.rst.
"""
# API version history:
# 1.0 - Initial version.
# 1.1 - Added get_active_networks_info, create_dhcp_port,
# and update_dhcp_port methods.
# 1.2 - Removed get_dhcp_port. When removing a method (Making a
# backwards incompatible change) you would normally bump the
# major version. However, since the method was unused in the
# RPC client for many releases, it should be OK to bump the
# minor release instead and claim RPC compatibility with the
# last few client versions.
# 1.3 - Removed release_port_fixed_ip. It's not used by reference DHCP
# agent since Juno, so similar rationale for not bumping the
# major version as above applies here too.
# 1.4 - Removed update_lease_expiration. It's not used by reference
# DHCP agent since Juno, so similar rationale for not bumping the
# major version as above applies here too.
# 1.5 - Added dhcp_ready_on_ports.
# 1.6 - Removed get_active_networks. It's not used by reference
# DHCP agent since Havana, so similar rationale for not bumping
# the major version as above applies here too.
target = oslo_messaging.Target(
namespace=n_const.RPC_NAMESPACE_DHCP_PLUGIN,
version='1.6')
def _get_active_networks(self, context, **kwargs):
"""Retrieve and return a list of the active networks."""
host = kwargs.get('host')
plugin = directory.get_plugin()
if extensions.is_extension_supported(
plugin, constants.DHCP_AGENT_SCHEDULER_EXT_ALIAS):
if cfg.CONF.network_auto_schedule:
plugin.auto_schedule_networks(context, host)
nets = plugin.list_active_networks_on_active_dhcp_agent(
context, host)
else:
filters = dict(admin_state_up=[True])
nets = plugin.get_networks(context, filters=filters)
return nets
def _port_action(self, plugin, context, port, action):
"""Perform port operations taking care of concurrency issues."""
try:
if action == 'create_port':
return p_utils.create_port(plugin, context, port)
elif action == 'update_port':
return plugin.update_port(context, port['id'], port)
else:
msg = _('Unrecognized action')
raise exceptions.Invalid(message=msg)
except (db_exc.DBReferenceError,
exceptions.NetworkNotFound,
exceptions.SubnetNotFound,
exceptions.InvalidInput,
exceptions.IpAddressGenerationFailure) as e:
with excutils.save_and_reraise_exception(reraise=False) as ctxt:
if isinstance(e, exceptions.IpAddressGenerationFailure):
# Check if the subnet still exists and if it does not,
# this is the reason why the ip address generation failed.
# In any other unlikely event re-raise
try:
subnet_id = port['port']['fixed_ips'][0]['subnet_id']
plugin.get_subnet(context, subnet_id)
except exceptions.SubnetNotFound:
pass
else:
ctxt.reraise = True
if ctxt.reraise:
net_id = port['port']['network_id']
LOG.warning("Action %(action)s for network %(net_id)s "
"could not complete successfully: "
"%(reason)s",
{"action": action,
"net_id": net_id,
'reason': e})
def _group_by_network_id(self, res):
grouped = {}
keyfunc = operator.itemgetter('network_id')
for net_id, values in itertools.groupby(sorted(res, key=keyfunc),
keyfunc):
grouped[net_id] = list(values)
return grouped
def get_active_networks_info(self, context, **kwargs):
"""Returns all the networks/subnets/ports in system."""
host = kwargs.get('host')
LOG.debug('get_active_networks_info from %s', host)
networks = self._get_active_networks(context, **kwargs)
plugin = directory.get_plugin()
filters = {'network_id': [network['id'] for network in networks]}
ports = plugin.get_ports(context, filters=filters)
# default is to filter subnets based on 'enable_dhcp' flag
if kwargs.get('enable_dhcp_filter', True):
filters['enable_dhcp'] = [True]
# NOTE(kevinbenton): we sort these because the agent builds tags
# based on position in the list and has to restart the process if
# the order changes.
subnets = sorted(plugin.get_subnets(context, filters=filters),
key=operator.itemgetter('id'))
# Handle the possibility that the dhcp agent(s) only has connectivity
# inside a segment. If the segment service plugin is loaded and
# there are active dhcp enabled subnets, then filter out the subnets
# that are not on the host's segment.
seg_plug = directory.get_plugin(
segment_ext.SegmentPluginBase.get_plugin_type())
seg_subnets = [subnet for subnet in subnets
if subnet.get('segment_id')]
nonlocal_subnets = []
if seg_plug and seg_subnets:
host_segment_ids = seg_plug.get_segments_by_hosts(context, [host])
# Gather the ids of all the subnets that are on a segment that
# this host touches
seg_subnet_ids = {subnet['id'] for subnet in seg_subnets
if subnet['segment_id'] in host_segment_ids}
# Gather the ids of all the networks that are routed
routed_net_ids = {seg_subnet['network_id']
for seg_subnet in seg_subnets}
# Remove the subnets with segments that are not in the same
# segments as the host. Do this only for the networks that are
# routed because we want non-routed networks to work as
# before.
nonlocal_subnets = [subnet for subnet in seg_subnets
if subnet['id'] not in seg_subnet_ids]
subnets = [subnet for subnet in subnets
if subnet['network_id'] not in routed_net_ids or
subnet['id'] in seg_subnet_ids]
grouped_subnets = self._group_by_network_id(subnets)
grouped_nonlocal_subnets = self._group_by_network_id(nonlocal_subnets)
grouped_ports = self._group_by_network_id(ports)
for network in networks:
network['subnets'] = grouped_subnets.get(network['id'], [])
network['non_local_subnets'] = (
grouped_nonlocal_subnets.get(network['id'], []))
network['ports'] = grouped_ports.get(network['id'], [])
return networks
def get_network_info(self, context, **kwargs):
"""Retrieve and return extended information about a network."""
network_id = kwargs.get('network_id')
host = kwargs.get('host')
LOG.debug('Network %(network_id)s requested from '
'%(host)s', {'network_id': network_id,
'host': host})
plugin = directory.get_plugin()
try:
network = plugin.get_network(context, network_id)
except exceptions.NetworkNotFound:
LOG.debug("Network %s could not be found, it might have "
"been deleted concurrently.", network_id)
return
filters = dict(network_id=[network_id])
subnets = plugin.get_subnets(context, filters=filters)
seg_plug = directory.get_plugin(
segment_ext.SegmentPluginBase.get_plugin_type())
nonlocal_subnets = []
if seg_plug and subnets:
seg_subnets = [subnet for subnet in subnets
if subnet.get('segment_id')]
# If there are no subnets with segments, then this is not a routed
# network and no filtering should take place.
if seg_subnets:
segment_ids = seg_plug.get_segments_by_hosts(context, [host])
# There might be something to do if no segment_ids exist that
# are mapped to this host. However, it seems that if this
# host is not mapped to any segments and this is a routed
# network, then this host shouldn't have even been scheduled
# to.
nonlocal_subnets = [subnet for subnet in seg_subnets
if subnet['segment_id'] not in segment_ids]
subnets = [subnet for subnet in seg_subnets
if subnet['segment_id'] in segment_ids]
# NOTE(kevinbenton): we sort these because the agent builds tags
# based on position in the list and has to restart the process if
# the order changes.
network['subnets'] = sorted(subnets, key=operator.itemgetter('id'))
network['non_local_subnets'] = sorted(nonlocal_subnets,
key=operator.itemgetter('id'))
network['ports'] = plugin.get_ports(context, filters=filters)
return network
@db_api.retry_db_errors
def release_dhcp_port(self, context, **kwargs):
"""Release the port currently being used by a DHCP agent."""
host = kwargs.get('host')
network_id = kwargs.get('network_id')
device_id = kwargs.get('device_id')
LOG.debug('DHCP port deletion for %(network_id)s request from '
'%(host)s',
{'network_id': network_id, 'host': host})
plugin = directory.get_plugin()
plugin.delete_ports_by_device_id(context, device_id, network_id)
@oslo_messaging.expected_exceptions(exceptions.IpAddressGenerationFailure)
@db_api.retry_db_errors
@resource_registry.mark_resources_dirty
def create_dhcp_port(self, context, **kwargs):
"""Create and return dhcp port information.
If an expected failure occurs, a None port is returned.
"""
host = kwargs.get('host')
# Note(pbondar): Create deep copy of port to prevent operating
# on changed dict if RetryRequest is raised
port = copy.deepcopy(kwargs.get('port'))
LOG.debug('Create dhcp port %(port)s '
'from %(host)s.',
{'port': port,
'host': host})
port['port']['device_owner'] = constants.DEVICE_OWNER_DHCP
port['port'][portbindings.HOST_ID] = host
if 'mac_address' not in port['port']:
port['port']['mac_address'] = constants.ATTR_NOT_SPECIFIED
plugin = directory.get_plugin()
return self._port_action(plugin, context, port, 'create_port')
@oslo_messaging.expected_exceptions(exceptions.IpAddressGenerationFailure)
@db_api.retry_db_errors
def update_dhcp_port(self, context, **kwargs):
"""Update the dhcp port."""
host = kwargs.get('host')
port = kwargs.get('port')
port['id'] = kwargs.get('port_id')
port['port'][portbindings.HOST_ID] = host
plugin = directory.get_plugin()
try:
old_port = plugin.get_port(context, port['id'])
if (old_port['device_id'] !=
constants.DEVICE_ID_RESERVED_DHCP_PORT and
old_port['device_id'] !=
utils.get_dhcp_agent_device_id(port['port']['network_id'],
host)):
raise n_exc.DhcpPortInUse(port_id=port['id'])
LOG.debug('Update dhcp port %(port)s '
'from %(host)s.',
{'port': port,
'host': host})
return self._port_action(plugin, context, port, 'update_port')
except exceptions.PortNotFound:
LOG.debug('Host %(host)s tried to update port '
'%(port_id)s which no longer exists.',
{'host': host, 'port_id': port['id']})
return None
@db_api.retry_db_errors
def dhcp_ready_on_ports(self, context, port_ids):
for port_id in port_ids:
provisioning_blocks.provisioning_complete(
context, port_id, resources.PORT,
provisioning_blocks.DHCP_ENTITY)
| 47.041935
| 79
| 0.614894
|
acffd2188bb840eaa79109d2f3efec7f8d54de4c
| 806
|
py
|
Python
|
meiduo_mall/apps/meiduo_admin/utils.py
|
zhengcongreal/meiduo_project
|
5899a344859a0c1455e45ed4fb699d2ace08ba18
|
[
"Apache-2.0"
] | null | null | null |
meiduo_mall/apps/meiduo_admin/utils.py
|
zhengcongreal/meiduo_project
|
5899a344859a0c1455e45ed4fb699d2ace08ba18
|
[
"Apache-2.0"
] | 1
|
2020-11-29T13:33:12.000Z
|
2020-11-29T13:33:12.000Z
|
meiduo_mall/apps/meiduo_admin/utils.py
|
zhengcongreal/meiduo_project
|
5899a344859a0c1455e45ed4fb699d2ace08ba18
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
def jwt_response_payload_handler(token, user=None, request=None):
"""
自定义jwt认证成功返回数据
"""
return {
'token': token,
'id': user.id,
'username': user.username
}
# 自定义分页类
class PageNum(PageNumberPagination):
page_size = 1 # 后端指定每页显示数量
page_size_query_param = 'pagesize'
max_page_size = 10
# 重写分页返回方法,按照指定的字段进行分页数据返回
def get_paginated_response(self, data):
return Response({
'count': self.page.paginator.count, # 总数量
'lists': data, # 用户数据
'page' : self.page.number, # 当前页数
'pages' : self.page.paginator.num_pages, # 总页数
'pagesize':self.page_size # 后端指定的页容量
})
| 26
| 65
| 0.629032
|
acffd29e88fdfd30ee9b47106c6cbbb80b6555dd
| 2,530
|
py
|
Python
|
gwcs/tags/geometry_models.py
|
jdavies-st/gwcs
|
14a8c0b83603a4590a1fecb155467e6029fdc3e9
|
[
"BSD-3-Clause"
] | 31
|
2015-04-21T21:40:38.000Z
|
2022-03-30T20:03:08.000Z
|
gwcs/tags/geometry_models.py
|
jdavies-st/gwcs
|
14a8c0b83603a4590a1fecb155467e6029fdc3e9
|
[
"BSD-3-Clause"
] | 331
|
2015-01-23T17:57:55.000Z
|
2022-03-11T22:54:43.000Z
|
gwcs/tags/geometry_models.py
|
jdavies-st/gwcs
|
14a8c0b83603a4590a1fecb155467e6029fdc3e9
|
[
"BSD-3-Clause"
] | 38
|
2015-01-13T21:22:54.000Z
|
2021-09-01T01:52:19.000Z
|
"""
ASDF tags for geometry related models.
"""
from asdf import yamlutil
from ..gwcs_types import GWCSTransformType
from .. geometry import (ToDirectionCosines, FromDirectionCosines,
SphericalToCartesian, CartesianToSpherical)
__all__ = ['DirectionCosinesType', 'SphericalCartesianType']
class DirectionCosinesType(GWCSTransformType):
name = "direction_cosines"
types = [ToDirectionCosines, FromDirectionCosines]
version = "1.1.0"
@classmethod
def from_tree_transform(cls, node, ctx):
transform_type = node['transform_type']
if transform_type == 'to_direction_cosines':
return ToDirectionCosines()
elif transform_type == 'from_direction_cosines':
return FromDirectionCosines()
else:
raise TypeError(f"Unknown model_type {transform_type}")
@classmethod
def to_tree_transform(cls, model, ctx):
if isinstance(model, FromDirectionCosines):
transform_type = 'from_direction_cosines'
elif isinstance(model, ToDirectionCosines):
transform_type = 'to_direction_cosines'
else:
raise TypeError(f"Model of type {model.__class__} is not supported.")
node = {'transform_type': transform_type}
return yamlutil.custom_tree_to_tagged_tree(node, ctx)
class SphericalCartesianType(GWCSTransformType):
name = "spherical_cartesian"
types = [SphericalToCartesian, CartesianToSpherical]
version = "1.1.0"
@classmethod
def from_tree_transform(cls, node, ctx):
transform_type = node['transform_type']
wrap_lon_at = node['wrap_lon_at']
if transform_type == 'spherical_to_cartesian':
return SphericalToCartesian(wrap_lon_at=wrap_lon_at)
elif transform_type == 'cartesian_to_spherical':
return CartesianToSpherical(wrap_lon_at=wrap_lon_at)
else:
raise TypeError(f"Unknown model_type {transform_type}")
@classmethod
def to_tree_transform(cls, model, ctx):
if isinstance(model, SphericalToCartesian):
transform_type = 'spherical_to_cartesian'
elif isinstance(model, CartesianToSpherical):
transform_type = 'cartesian_to_spherical'
else:
raise TypeError(f"Model of type {model.__class__} is not supported.")
node = {
'transform_type': transform_type,
'wrap_lon_at': model.wrap_lon_at
}
return yamlutil.custom_tree_to_tagged_tree(node, ctx)
| 36.142857
| 81
| 0.682609
|
acffd2c374da912fc80e45f92459f6bd75a25d9d
| 12,243
|
py
|
Python
|
testscripts/RDKB/component/RBUS/TS_RBUS_Property_GetName.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/RBUS/TS_RBUS_Property_GetName.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/RBUS/TS_RBUS_Property_GetName.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2020 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version='1.0' encoding='utf-8'?>
<xml>
<id></id>
<!-- Do not edit id. This will be auto filled while exporting. If you are adding a new script keep the id empty -->
<version>6</version>
<!-- Do not edit version. This will be auto incremented while updating. If you are adding a new script you can keep the vresion as 1 -->
<name>TS_RBUS_Property_GetName</name>
<!-- If you are adding a new script you can specify the script name. Script Name should be unique same as this file name with out .py extension -->
<primitive_test_id> </primitive_test_id>
<!-- Do not change primitive_test_id if you are editing an existing script. -->
<primitive_test_name>RBUS_PropertyCommands</primitive_test_name>
<!-- -->
<primitive_test_version>1</primitive_test_version>
<!-- -->
<status>FREE</status>
<!-- -->
<synopsis>To Validate the RBUS 2.0 API rbusProperty_GetName</synopsis>
<!-- -->
<groups_id />
<!-- -->
<execution_time>15</execution_time>
<!-- -->
<long_duration>false</long_duration>
<!-- -->
<advanced_script>false</advanced_script>
<!-- execution_time is the time out time for test execution -->
<remarks></remarks>
<!-- Reason for skipping the tests if marked to skip -->
<skip>false</skip>
<!-- -->
<box_types>
<box_type>Broadband</box_type>
<!-- -->
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
<!-- -->
</rdk_versions>
<test_cases>
<test_case_id>TC_RBUS_52</test_case_id>
<test_objective>To Validate the RBUS 2.0 API rbusProperty_GetName</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1. Ccsp Components should be in a running state of DUT
2. TDK Agent should be in running state or invoke it through StartTdk.sh script
3. The DUT should be in RBUS mode</pre_requisite>
<api_or_interface_used>rbusProperty_GetName</api_or_interface_used>
<input_parameters>N/A</input_parameters>
<automation_approch>1. Open the RBUS connection using rbus_open API
2. Initiate the RBUS Property using rbusProperty_Init API and return status should be success
3. Get the RBUS Property Name using rbusProperty_GetName API and return status should be success
4. Compare the get value with initial value, both value should be same
5. Release the RBUS Property using rbusProperty_Release API and return status should be success
6. Close the RBUS connection using rbus_close API</automation_approch>
<expected_output>Should be able to get the property name using rbusProperty_GetName API </expected_output>
<priority>High</priority>
<test_stub_interface>rbus</test_stub_interface>
<test_script>TS_RBUS_Property_GetName</test_script>
<skipped>No</skipped>
<release_version>M84</release_version>
<remarks>None</remarks>
</test_cases>
<script_tags />
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("rbus","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_RBUS_Property_GetName');
#Get the result of connection with test component and DUT
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus ;
if "SUCCESS" in loadmodulestatus.upper() :
obj.setLoadModuleStatus("SUCCESS");
prop_name = "Device.rbusPropertyTest1"
print "\n********** Start of RBUS Open ****************"
tdkTestObj = obj.createTestStep('RBUS_Open');
expectedresult = "SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
print "RBUS Open Detail is ",details
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Open the RBUS connection";
print "EXPECTED RESULT 1: rbus_open Should be success";
print "ACTUAL RESULT 1: rbus_open was success";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "RBUS status is %s" %details;
print "\n********** Start of RBUS Property Init (Prop1)****************"
print "Initialize the Prop1 with Property Name ",prop_name
tdkTestObj = obj.createTestStep('RBUS_PropertyCommands');
tdkTestObj.addParameter("operation","rbusProperty_Init");
tdkTestObj.addParameter("prop_count",1);
tdkTestObj.addParameter("property_name",prop_name);
expectedresult = "SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 2: Validate rbusProperty_Init function";
print "EXPECTED RESULT 2: rbusProperty_Init should be success";
print "ACTUAL RESULT 2: rbusProperty_Init was Success";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "\n********** Start of RBUS Property Get Name ****************"
tdkTestObj = obj.createTestStep('RBUS_PropertyCommands');
tdkTestObj.addParameter("operation","rbusProperty_GetName");
tdkTestObj.addParameter("prop_count",1);
tdkTestObj.addParameter("property_name","dummy");
expectedresult = "SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
name = tdkTestObj.getResultDetails();
print "rbusProperty_GetName Value for prop1 is ", name
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 3: Validate rbusProperty_GetName function";
print "EXPECTED RESULT 3: rbusProperty_GetName should be success";
print "ACTUAL RESULT 3: rbusProperty_GetName was Success";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
if name == prop_name:
print "\n ***** Initial Property Name (%s) and value retrieved from Get Name (%s) is Matching ***** \n" %(prop_name,name)
tdkTestObj.setResultStatus("SUCCESS");
else:
print "\n ***** Initial Property Name (%s) and value retrieved from Get Name (%s) is NOT Matching ***** \n" %(prop_name,name)
tdkTestObj.setResultStatus("FAILURE");
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 3: Validate rbusProperty_GetName function";
print "EXPECTED RESULT 3: rbusProperty_GetName should be success";
print "ACTUAL RESULT 3: rbusProperty_GetName was Failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "********** End of RBUS Property Get Name ****************"
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 2: Validate rbusProperty_Init function";
print "EXPECTED RESULT 2: rbusProperty_Init should be success";
print "ACTUAL RESULT 2: rbusProperty_Init was Failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "********** End of RBUS Property Init (Prop1) ****************"
print "\n********** Start of RBUS Property Release(Prop1) ****************"
#Release the property, even step 2 was failed
tdkTestObj = obj.createTestStep('RBUS_PropertyCommands');
tdkTestObj.addParameter("operation","rbusProperty_Release");
tdkTestObj.addParameter("prop_count",1);
tdkTestObj.addParameter("property_name","Device.rbusPropertyTest1");
expectedresult = "SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 4: Initiate rbusProperty_Release function";
print "EXPECTED RESULT 4: rbusProperty_Release should be success";
print "ACTUAL RESULT 4: rbusProperty_Release was Success";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 4: Initiate rbusProperty_Release function";
print "EXPECTED RESULT 4: rbusProperty_Release should be success";
print "ACTUAL RESULT 4: rbusProperty_Release was Failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "********** End of RBUS Property Release (Prop1) ****************\n"
print "********** Start of RBUS Close ****************"
tdkTestObj = obj.createTestStep('RBUS_Close');
expectedresult = "SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
print "RBUS close Detail is ",details
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 5: Close the RBUS connection";
print "EXPECTED RESULT 5: rbus_close should be success";
print "ACTUAL RESULT 5: rbus_close was success";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 5: Close the RBUS connection";
print "EXPECTED RESULT 5: rbus_close should be success";
print "ACTUAL RESULT 5: rbus_close was Failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "********** End of RBUS Close ****************"
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Open the RBUS connection";
print "EXPECTED RESULT 1: rbus_open Should be success";
print "ACTUAL RESULT 1: rbus_open was Failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : %s" %actualresult ;
print "********** End of RBUS Open ****************\n"
obj.unloadModule("rbus");
else:
print "Failed to load the module";
obj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| 47.824219
| 149
| 0.647064
|
acffd2e9b2e35eb3750e11ca27d9edee652b970b
| 1,636
|
py
|
Python
|
src/htsql/tweak/meta/command.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 15
|
2020-02-11T11:24:34.000Z
|
2022-03-03T20:46:34.000Z
|
src/htsql/tweak/meta/command.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 1
|
2020-02-13T14:08:34.000Z
|
2020-02-13T14:16:04.000Z
|
src/htsql/tweak/meta/command.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 2
|
2020-02-13T14:10:06.000Z
|
2021-02-25T04:36:05.000Z
|
#
# Copyright (c) 2006-2013, Prometheus Research, LLC
#
from ...core.context import context
from ...core.cache import once
from ...core.adapter import adapt, call
from ...core.error import Error
from ...core.cmd.command import Command
from ...core.cmd.act import Act, Action, RenderAction, act
from ...core.cmd.summon import Summon, recognize
import weakref
@once
def get_slave_app():
from htsql import HTSQL
master = weakref.ref(context.app)
slave = HTSQL(None, {'tweak.meta.slave': {'master': master}})
return slave
class MetaCmd(Command):
def __init__(self, command):
self.command = command
class SummonMeta(Summon):
call('meta')
def __call__(self):
if len(self.arguments) != 1:
raise Error("Expected 1 argument")
[syntax] = self.arguments
slave_app = get_slave_app()
with slave_app:
command = recognize(syntax)
return MetaCmd(command)
class ActMeta(Act):
adapt(MetaCmd, Action)
@classmethod
def __matches__(component, dispatch_key):
command_type, action_type = dispatch_key
if isinstance(action_type, RenderAction):
return False
return super(ActMeta, component).__matches__(dispatch_key)
def __call__(self):
can_read = context.env.can_read
can_write = context.env.can_write
slave_app = get_slave_app()
with slave_app:
with context.env(can_read=context.env.can_read and can_read,
can_write=context.env.can_write and can_write):
return act(self.command.command, self.action)
| 25.169231
| 76
| 0.656479
|
acffd31eda474e4fd0210dfc7d4c69c79ec3d610
| 4,477
|
py
|
Python
|
pysswords/__main__.py
|
chtiprog/pysswords
|
1afb8de12662094b79669c541aee2726cda0e9c8
|
[
"MIT"
] | null | null | null |
pysswords/__main__.py
|
chtiprog/pysswords
|
1afb8de12662094b79669c541aee2726cda0e9c8
|
[
"MIT"
] | null | null | null |
pysswords/__main__.py
|
chtiprog/pysswords
|
1afb8de12662094b79669c541aee2726cda0e9c8
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import argparse
import logging
import os
from pkg_resources import get_distribution
from .cli import CLI
from .db import (
CredentialExistsError,
CredentialNotFoundError,
DatabaseExistsError
)
from .utils import which
__project__ = 'pysswords'
__version__ = get_distribution('pysswords').version
def default_db():
return os.path.join(os.path.expanduser("~"), ".pysswords")
def parse_args(cli_args=None):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Database options")
group_db.add_argument("-I", "--init", action="store_true",
help="create a new Pysswords database")
group_db.add_argument("-D", "--database", default=default_db(),
help="specify path to database")
group_db.add_argument("--export", dest="exportdb", metavar="DATABASE_FILE",
help="export encrypted Pysswords database")
group_db.add_argument("--import", dest="importdb", metavar="DATABASE_FILE",
help="import encrypted Pysswords database")
group_db.add_argument("--clean", action="store_true",
help="delete database, cleaning all files")
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true",
help="add new credential")
group_cred.add_argument("-g", "--get", metavar="FULLNAME",
help="get credentials by name")
group_cred.add_argument("-u", "--update", metavar="FULLNAME",
help="update credentials")
group_cred.add_argument("-r", "--remove", metavar="FULLNAME",
help="remove credentials")
group_cred.add_argument("-c", "--clipboard", metavar="FULLNAME",
help="copy credential password to clipboard")
group_cred.add_argument("-s", "--search",
help="search credentials. [regex supported]")
group_cred.add_argument("-P", "--show-password", action="store_true",
help="show credentials passwords as plain text")
group_cred.add_argument("-R", "--random", action="store_true",
help="randomly generate a password for credential")
group_runtime = parser.add_argument_group("Default options")
group_runtime.add_argument("--version", action="version",
version="Pysswords {}".format(__version__),
help="Print version")
group_runtime.add_argument("--verbose", "-v", action="store_true",
help="Print verbose output")
args = parser.parse_args(cli_args)
return args
def main(cli_args=None):
if not which("gpg"):
logging.error("GPG not installed: https://gnupg.org/download")
exit(1)
args = parse_args(cli_args)
if args.verbose:
logger = logging.getLogger()
logger.setLevel(logging.INFO)
try:
interface = CLI(
database_path=args.database,
show_password=args.show_password,
init=args.init,
randompass=args.random
)
if args.exportdb:
interface.exportdb(args.exportdb)
elif args.importdb:
interface.importdb(args.importdb)
elif args.clean:
interface.clean_database()
elif args.add:
interface.add_credential()
elif args.clipboard:
interface.copy_to_clipboard(fullname=args.clipboard)
elif args.get:
interface.get_credentials(fullname=args.get)
elif args.search:
interface.search_credentials(query=args.search)
elif args.update:
interface.update_credentials(fullname=args.update)
elif args.remove:
interface.remove_credentials(fullname=args.remove)
else:
interface.show()
except CredentialExistsError as e:
logging.error("Credential '{}' exists".format(e))
except CredentialNotFoundError as e:
logging.error("Credential '{}' not found".format(e))
except DatabaseExistsError as e:
logging.error(str(e))
except ValueError as e:
logging.error(str(e))
except KeyboardInterrupt:
logging.info("Keyboard interrupt")
if __name__ == "__main__":
main()
| 37
| 79
| 0.616931
|
acffd333f163c3f294934544851b45fa5529d5a2
| 475
|
py
|
Python
|
src/octopus/core/enums/rendernode.py
|
smaragden/OpenRenderManagement
|
cf3ab356f96969d7952b60417b48e941955e435c
|
[
"BSD-3-Clause"
] | 35
|
2015-02-23T23:13:13.000Z
|
2021-01-03T05:56:39.000Z
|
src/octopus/core/enums/rendernode.py
|
smaragden/OpenRenderManagement
|
cf3ab356f96969d7952b60417b48e941955e435c
|
[
"BSD-3-Clause"
] | 15
|
2015-01-12T12:58:29.000Z
|
2016-03-30T13:10:19.000Z
|
src/octopus/core/enums/rendernode.py
|
mikrosimage/OpenRenderManagement
|
6f9237a86cb8e4b206313f9c22424c8002fd5e4d
|
[
"BSD-3-Clause"
] | 20
|
2015-03-18T06:57:13.000Z
|
2020-07-01T15:09:36.000Z
|
RN_STATUS = (RN_UNKNOWN,
RN_BOOTING,
RN_PAUSED,
RN_IDLE,
RN_ASSIGNED,
RN_WORKING,
RN_FINISHING) = range(7)
RN_STATUS_NAMES = ('Unknown', 'Booting', 'Paused', 'Idle', 'Assigned', 'Working', 'Finishing')
RN_STATUS_SHORT_NAMES = ("U", "B", "P", "I", "A", "W", "F")
# 0 --> RN_UNKNOWN
# 1 --> RN_BOOTING
# 2 --> RN_PAUSED
# 3 --> RN_IDLE
# 4 --> RN_ASSIGNED
# 5 --> RN_WORKING
# 6 --> RN_FINISHING
| 23.75
| 94
| 0.528421
|
acffd3bf45456880cbc36bd51890753b3ff69b4b
| 508
|
py
|
Python
|
src/user_data/models.py
|
Ahmedelmonady/ASURT_Backend_Django_Task
|
83162ca774462176621fac638c41b537684527cc
|
[
"bzip2-1.0.6"
] | null | null | null |
src/user_data/models.py
|
Ahmedelmonady/ASURT_Backend_Django_Task
|
83162ca774462176621fac638c41b537684527cc
|
[
"bzip2-1.0.6"
] | null | null | null |
src/user_data/models.py
|
Ahmedelmonady/ASURT_Backend_Django_Task
|
83162ca774462176621fac638c41b537684527cc
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.db import models
# Create your models here.
class User(models.Model):
GENDER_CHOICES = (
('M', 'Male'),
('F', 'Female'),
)
Name = models.CharField(max_length=100, blank=False)
Email = models.EmailField(blank=False)
Phone = models.CharField(max_length=15, blank=False, null=True)
Age = models.IntegerField(blank=True, null=True )
Gender = models.CharField(max_length=1,choices=GENDER_CHOICES, blank=True)
def get_details_url(self):
return f"/user/details/{self.id}/"
| 31.75
| 75
| 0.706693
|
acffd433942cf5f8f7eb6417c6dffad6c62a5198
| 3,713
|
py
|
Python
|
caffe2onnx/src/OPs/Conv.py
|
kumardesappan/caffe2onnx
|
b7e73feed3bbc5ddbdf25b87af93a2bae596055d
|
[
"BSD-3-Clause"
] | null | null | null |
caffe2onnx/src/OPs/Conv.py
|
kumardesappan/caffe2onnx
|
b7e73feed3bbc5ddbdf25b87af93a2bae596055d
|
[
"BSD-3-Clause"
] | null | null | null |
caffe2onnx/src/OPs/Conv.py
|
kumardesappan/caffe2onnx
|
b7e73feed3bbc5ddbdf25b87af93a2bae596055d
|
[
"BSD-3-Clause"
] | 1
|
2022-01-20T05:18:29.000Z
|
2022-01-20T05:18:29.000Z
|
import numpy as np
import caffe2onnx.src.c2oObject as Node
##---------------------------------------------------Conv-------------------------------------------------------##
# Get hyperparameters
def getConvAttri(layer):
# Expansion coefficient dilations
dilations = [1, 1]
if layer.convolution_param.dilation != []:
dilation = layer.convolution_param.dilation[0]
dilations = [dilation, dilation]
# Fill pads
pads = [0, 0, 0, 0] # default is 0
if layer.convolution_param.pad != []: # If there is a pad, the value is assigned according to the pad
pads = np.array([layer.convolution_param.pad] * 4).flatten().tolist()
elif layer.convolution_param.pad_h != 0 or layer.convolution_param.pad_w != 0: # If there is pad_w, pad_h is assigned according to it
pads = [layer.convolution_param.pad_h, layer.convolution_param.pad_w, layer.convolution_param.pad_h,
layer.convolution_param.pad_w]
# Strides
strides = [1, 1] # default is 1
if layer.convolution_param.stride != []:
strides = np.array([layer.convolution_param.stride] * 2).flatten().tolist()
# Convolution kernel size kernel_shape
kernel_shape = np.array([layer.convolution_param.kernel_size] * 2).flatten().tolist()
if layer.convolution_param.kernel_size == []:
kernel_shape = [layer.convolution_param.kernel_h, layer.convolution_param.kernel_w]
# Group
group = layer.convolution_param.group
# Hyperparameter dictionary
dict = { # "auto_pad":"NOTSET",
"dilations": dilations,
"group": group,
"kernel_shape": kernel_shape,
"pads": pads,
"strides": strides
}
return dict
# Calculate the output dimension
def getConvOutShape(input_shape,layer,dict):
dilations = dict["dilations"]
kernel_shape = dict["kernel_shape"]
pads = dict["pads"]
strides = dict["strides"]
## Number of convolution kernelskernel_num
kernel_num = layer.convolution_param.num_output
# Calculate input dimensions output_shape
h = (input_shape[0][2] - kernel_shape[0] + pads[0] + pads[2] - (kernel_shape[0]-1)*(dilations[0]-1))/strides[0] + 1
# Output dimension N = ((input dimension I-convolution kernel dimension K + 2 * padding P-(convolution kernel dimension -1) * (expansion coefficient -1)) / step size S) + 1
# When h is not an integer and pad is not set, when the output is a non-integer, round up, that is, fill 1 on the right and bottom
if h > int(h) and layer.convolution_param.pad == []:
output_shape_h = int(h) + 1
pads = [0,0,1,1]
else:
output_shape_h = int(h)
w = (input_shape[0][3] - kernel_shape[1] + pads[1] + pads[3] - (kernel_shape[1]-1)*(dilations[1]-1))/strides[1] + 1
# Output dimension N = ((input dimension I-convolution kernel dimension K + 2 * padding P-(convolution kernel dimension -1) * (expansion coefficient -1)) / step size S) + 1
# When h is not an integer and pad is not set, when the output is a non-integer, round up, that is, fill 1 on the right and bottom
if w > int(w) and layer.convolution_param.pad == []:
output_shape_w = int(w) + 1
pads = [0,0,1,1]
else:
output_shape_w = int(w)
output_shape = [[input_shape[0][0],kernel_num,output_shape_h,output_shape_w]]
return output_shape
# Build node
def createConv(layer, nodename, inname, outname, input_shape):
dict = getConvAttri(layer)
output_shape = getConvOutShape(input_shape, layer, dict)
# Build node
node = Node.c2oNode(layer, nodename, "Conv", inname, outname, input_shape, output_shape, dict)
#print(nodename, " node construction completed")
return node
| 45.839506
| 176
| 0.652572
|
acffd516d219eb7d61bb8ea15a948f1580939bb9
| 1,490
|
py
|
Python
|
homeassistant/components/config/device_registry.py
|
phispi/home-assistant
|
dcf1e67d5d7536fcfdf336002a5107a4e9ac00c0
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/config/device_registry.py
|
phispi/home-assistant
|
dcf1e67d5d7536fcfdf336002a5107a4e9ac00c0
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/config/device_registry.py
|
phispi/home-assistant
|
dcf1e67d5d7536fcfdf336002a5107a4e9ac00c0
|
[
"Apache-2.0"
] | null | null | null |
"""HTTP views to interact with the device registry."""
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.helpers.device_registry import async_get_registry
from homeassistant.components import websocket_api
DEPENDENCIES = ['websocket_api']
WS_TYPE_LIST = 'config/device_registry/list'
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
vol.Required('type'): WS_TYPE_LIST,
})
async def async_setup(hass):
"""Enable the Entity Registry views."""
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list_devices,
SCHEMA_WS_LIST
)
return True
@callback
def websocket_list_devices(hass, connection, msg):
"""Handle list devices command.
Async friendly.
"""
async def retrieve_entities():
"""Get devices from registry."""
registry = await async_get_registry(hass)
connection.send_message(websocket_api.result_message(
msg['id'], [{
'config_entries': list(entry.config_entries),
'connections': list(entry.connections),
'manufacturer': entry.manufacturer,
'model': entry.model,
'name': entry.name,
'sw_version': entry.sw_version,
'id': entry.id,
'hub_device_id': entry.hub_device_id,
} for entry in registry.devices.values()]
))
hass.async_create_task(retrieve_entities())
| 31.041667
| 68
| 0.667114
|
acffd532fa21753121e1df56e4dfd971424f3131
| 44,286
|
py
|
Python
|
cvat/apps/dataset_manager/formats/cvat.py
|
ACHultman/cvat
|
01eaf362aa7e03f5623e80cb12ad0b9a429ae588
|
[
"Intel",
"MIT"
] | 4,197
|
2018-06-30T05:47:50.000Z
|
2020-09-08T07:34:22.000Z
|
cvat/apps/dataset_manager/formats/cvat.py
|
ACHultman/cvat
|
01eaf362aa7e03f5623e80cb12ad0b9a429ae588
|
[
"Intel",
"MIT"
] | 1,653
|
2018-07-04T00:10:44.000Z
|
2020-09-08T09:01:58.000Z
|
cvat/apps/dataset_manager/formats/cvat.py
|
ACHultman/cvat
|
01eaf362aa7e03f5623e80cb12ad0b9a429ae588
|
[
"Intel",
"MIT"
] | 1,253
|
2018-06-30T05:47:58.000Z
|
2020-09-08T02:19:52.000Z
|
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
from io import BufferedWriter
import os
import os.path as osp
from glob import glob
from typing import Callable
import zipfile
from collections import OrderedDict
from tempfile import TemporaryDirectory
from defusedxml import ElementTree
from datumaro.components.dataset import Dataset, DatasetItem
from datumaro.components.extractor import Importer, Extractor, DEFAULT_SUBSET_NAME
from datumaro.components.annotation import (
AnnotationType, Bbox, Points, Polygon, PolyLine, Label, LabelCategories,
)
from datumaro.util.image import Image
from cvat.apps.dataset_manager.bindings import TaskData, match_dm_item, ProjectData, get_defaulted_subset, import_dm_annotations
from cvat.apps.dataset_manager.util import make_zip_archive
from cvat.apps.engine.frame_provider import FrameProvider
from .registry import exporter, importer, dm_env
class CvatPath:
IMAGES_DIR = 'images'
MEDIA_EXTS = ('.jpg', '.jpeg', '.png')
BUILTIN_ATTRS = {'occluded', 'outside', 'keyframe', 'track_id'}
class CvatExtractor(Extractor):
_SUPPORTED_SHAPES = ('box', 'polygon', 'polyline', 'points')
def __init__(self, path, subsets=None):
assert osp.isfile(path), path
rootpath = osp.dirname(path)
images_dir = ''
if osp.isdir(osp.join(rootpath, CvatPath.IMAGES_DIR)):
images_dir = osp.join(rootpath, CvatPath.IMAGES_DIR)
self._images_dir = images_dir
self._path = path
if not subsets:
subsets = self._get_subsets_from_anno(path)
self._subsets = subsets
super().__init__(subsets=self._subsets)
image_items = self._parse_images(images_dir, self._subsets)
items, categories = self._parse(path)
self._items = list(self._load_items(items, image_items).values())
self._categories = categories
def categories(self):
return self._categories
def __iter__(self):
yield from self._items
def __len__(self):
return len(self._items)
def get(self, _id, subset=DEFAULT_SUBSET_NAME):
assert subset in self._subsets, '{} not in {}'.format(subset, ', '.join(self._subsets))
return super().get(_id, subset)
@staticmethod
def _get_subsets_from_anno(path):
context = ElementTree.iterparse(path, events=("start", "end"))
context = iter(context)
for ev, el in context:
if ev == 'start':
if el.tag == 'subsets':
if el.text is not None:
subsets = el.text.split('\n')
return subsets
if ev == 'end':
if el.tag == 'meta':
return [DEFAULT_SUBSET_NAME]
el.clear()
return [DEFAULT_SUBSET_NAME]
@staticmethod
def _parse_images(image_dir, subsets):
items = OrderedDict()
def parse_image_dir(image_dir, subset):
for file in sorted(glob(image_dir), key=osp.basename):
name, ext = osp.splitext(osp.basename(file))
if ext.lower() in CvatPath.MEDIA_EXTS:
items[(subset, name)] = DatasetItem(id=name, annotations=[],
image=Image(path=file), subset=subset or DEFAULT_SUBSET_NAME,
)
if subsets == [DEFAULT_SUBSET_NAME] and not osp.isdir(osp.join(image_dir, DEFAULT_SUBSET_NAME)):
parse_image_dir(osp.join(image_dir, '*.*'), None)
else:
for subset in subsets:
parse_image_dir(osp.join(image_dir, subset, '*.*'), subset)
return items
@classmethod
def _parse(cls, path):
context = ElementTree.iterparse(path, events=("start", "end"))
context = iter(context)
categories, tasks_info, attribute_types = cls._parse_meta(context)
items = OrderedDict()
track = None
shape = None
tag = None
attributes = None
image = None
subset = None
for ev, el in context:
if ev == 'start':
if el.tag == 'track':
frame_size = tasks_info[int(el.attrib.get('task_id'))]['frame_size'] if el.attrib.get('task_id') else tuple(tasks_info.values())[0]['frame_size']
track = {
'id': el.attrib['id'],
'label': el.attrib.get('label'),
'group': int(el.attrib.get('group_id', 0)),
'height': frame_size[0],
'width': frame_size[1],
}
subset = el.attrib.get('subset')
elif el.tag == 'image':
image = {
'name': el.attrib.get('name'),
'frame': el.attrib['id'],
'width': el.attrib.get('width'),
'height': el.attrib.get('height'),
}
subset = el.attrib.get('subset')
elif el.tag in cls._SUPPORTED_SHAPES and (track or image):
attributes = {}
shape = {
'type': None,
'attributes': attributes,
}
if track:
shape.update(track)
shape['track_id'] = int(track['id'])
if image:
shape.update(image)
elif el.tag == 'tag' and image:
attributes = {}
tag = {
'frame': image['frame'],
'attributes': attributes,
'group': int(el.attrib.get('group_id', 0)),
'label': el.attrib['label'],
}
subset = el.attrib.get('subset')
elif ev == 'end':
if el.tag == 'attribute' and attributes is not None:
attr_value = el.text or ''
attr_type = attribute_types.get(el.attrib['name'])
if el.text in ['true', 'false']:
attr_value = attr_value == 'true'
elif attr_type is not None and attr_type != 'text':
try:
attr_value = float(attr_value)
except ValueError:
pass
attributes[el.attrib['name']] = attr_value
elif el.tag in cls._SUPPORTED_SHAPES:
if track is not None:
shape['frame'] = el.attrib['frame']
shape['outside'] = (el.attrib.get('outside') == '1')
shape['keyframe'] = (el.attrib.get('keyframe') == '1')
if image is not None:
shape['label'] = el.attrib.get('label')
shape['group'] = int(el.attrib.get('group_id', 0))
shape['type'] = el.tag
shape['occluded'] = (el.attrib.get('occluded') == '1')
shape['z_order'] = int(el.attrib.get('z_order', 0))
if el.tag == 'box':
shape['points'] = list(map(float, [
el.attrib['xtl'], el.attrib['ytl'],
el.attrib['xbr'], el.attrib['ybr'],
]))
else:
shape['points'] = []
for pair in el.attrib['points'].split(';'):
shape['points'].extend(map(float, pair.split(',')))
frame_desc = items.get((subset, shape['frame']), {'annotations': []})
frame_desc['annotations'].append(
cls._parse_shape_ann(shape, categories))
items[(subset, shape['frame'])] = frame_desc
shape = None
elif el.tag == 'tag':
frame_desc = items.get((subset, tag['frame']), {'annotations': []})
frame_desc['annotations'].append(
cls._parse_tag_ann(tag, categories))
items[(subset, tag['frame'])] = frame_desc
tag = None
elif el.tag == 'track':
track = None
elif el.tag == 'image':
frame_desc = items.get((subset, image['frame']), {'annotations': []})
frame_desc.update({
'name': image.get('name'),
'height': image.get('height'),
'width': image.get('width'),
'subset': subset,
})
items[(subset, image['frame'])] = frame_desc
image = None
el.clear()
return items, categories
@staticmethod
def _parse_meta(context):
ev, el = next(context)
if not (ev == 'start' and el.tag == 'annotations'):
raise Exception("Unexpected token ")
categories = {}
tasks_info = {}
frame_size = [None, None]
task_id = None
mode = None
labels = OrderedDict()
label = None
# Recursive descent parser
el = None
states = ['annotations']
def accepted(expected_state, tag, next_state=None):
state = states[-1]
if state == expected_state and el is not None and el.tag == tag:
if not next_state:
next_state = tag
states.append(next_state)
return True
return False
def consumed(expected_state, tag):
state = states[-1]
if state == expected_state and el is not None and el.tag == tag:
states.pop()
return True
return False
for ev, el in context:
if ev == 'start':
if accepted('annotations', 'meta'): pass
elif accepted('meta', 'task'): pass
elif accepted('meta', 'project'): pass
elif accepted('project', 'tasks'): pass
elif accepted('tasks', 'task'): pass
elif accepted('task', 'id', next_state='task_id'): pass
elif accepted('task', 'segment'): pass
elif accepted('task', 'mode'): pass
elif accepted('task', 'original_size'): pass
elif accepted('original_size', 'height', next_state='frame_height'): pass
elif accepted('original_size', 'width', next_state='frame_width'): pass
elif accepted('task', 'labels'): pass
elif accepted('project', 'labels'): pass
elif accepted('labels', 'label'):
label = { 'name': None, 'attributes': [] }
elif accepted('label', 'name', next_state='label_name'): pass
elif accepted('label', 'attributes'): pass
elif accepted('attributes', 'attribute'): pass
elif accepted('attribute', 'name', next_state='attr_name'): pass
elif accepted('attribute', 'input_type', next_state='attr_type'): pass
elif accepted('annotations', 'image') or \
accepted('annotations', 'track') or \
accepted('annotations', 'tag'):
break
else:
pass
elif ev == 'end':
if consumed('meta', 'meta'):
break
elif consumed('project', 'project'): pass
elif consumed('tasks', 'tasks'): pass
elif consumed('task', 'task'):
tasks_info[task_id] = {
'frame_size': frame_size,
'mode': mode,
}
frame_size = [None, None]
mode = None
elif consumed('task_id', 'id'):
task_id = int(el.text)
elif consumed('segment', 'segment'): pass
elif consumed('mode', 'mode'):
mode = el.text
elif consumed('original_size', 'original_size'): pass
elif consumed('frame_height', 'height'):
frame_size[0] = int(el.text)
elif consumed('frame_width', 'width'):
frame_size[1] = int(el.text)
elif consumed('label_name', 'name'):
label['name'] = el.text
elif consumed('attr_name', 'name'):
label['attributes'].append({'name': el.text})
elif consumed('attr_type', 'input_type'):
label['attributes'][-1]['input_type'] = el.text
elif consumed('attribute', 'attribute'): pass
elif consumed('attributes', 'attributes'): pass
elif consumed('label', 'label'):
labels[label['name']] = label['attributes']
label = None
elif consumed('labels', 'labels'): pass
else:
pass
assert len(states) == 1 and states[0] == 'annotations', \
"Expected 'meta' section in the annotation file, path: %s" % states
common_attrs = ['occluded']
if 'interpolation' in map(lambda t: t['mode'], tasks_info.values()):
common_attrs.append('keyframe')
common_attrs.append('outside')
common_attrs.append('track_id')
label_cat = LabelCategories(attributes=common_attrs)
attribute_types = {}
for label, attrs in labels.items():
attr_names = {v['name'] for v in attrs}
label_cat.add(label, attributes=attr_names)
for attr in attrs:
attribute_types[attr['name']] = attr['input_type']
categories[AnnotationType.label] = label_cat
return categories, tasks_info, attribute_types
@classmethod
def _parse_shape_ann(cls, ann, categories):
ann_id = ann.get('id', 0)
ann_type = ann['type']
attributes = ann.get('attributes') or {}
if 'occluded' in categories[AnnotationType.label].attributes:
attributes['occluded'] = ann.get('occluded', False)
if 'outside' in ann:
attributes['outside'] = ann['outside']
if 'keyframe' in ann:
attributes['keyframe'] = ann['keyframe']
if 'track_id' in ann:
attributes['track_id'] = ann['track_id']
group = ann.get('group')
label = ann.get('label')
label_id = categories[AnnotationType.label].find(label)[0]
z_order = ann.get('z_order', 0)
points = ann.get('points', [])
if ann_type == 'polyline':
return PolyLine(points, label=label_id, z_order=z_order,
id=ann_id, attributes=attributes, group=group)
elif ann_type == 'polygon':
return Polygon(points, label=label_id, z_order=z_order,
id=ann_id, attributes=attributes, group=group)
elif ann_type == 'points':
return Points(points, label=label_id, z_order=z_order,
id=ann_id, attributes=attributes, group=group)
elif ann_type == 'box':
x, y = points[0], points[1]
w, h = points[2] - x, points[3] - y
return Bbox(x, y, w, h, label=label_id, z_order=z_order,
id=ann_id, attributes=attributes, group=group)
else:
raise NotImplementedError("Unknown annotation type '%s'" % ann_type)
@classmethod
def _parse_tag_ann(cls, ann, categories):
label = ann.get('label')
label_id = categories[AnnotationType.label].find(label)[0]
group = ann.get('group')
attributes = ann.get('attributes')
return Label(label_id, attributes=attributes, group=group)
def _load_items(self, parsed, image_items):
for (subset, frame_id), item_desc in parsed.items():
name = item_desc.get('name', 'frame_%06d.PNG' % int(frame_id))
image = osp.join(self._images_dir, subset, name) if subset else osp.join(self._images_dir, name)
image_size = (item_desc.get('height'), item_desc.get('width'))
if all(image_size):
image = Image(path=image, size=tuple(map(int, image_size)))
di = image_items.get((subset, osp.splitext(name)[0]), DatasetItem(
id=name, annotations=[],
))
di.subset = subset or DEFAULT_SUBSET_NAME
di.annotations = item_desc.get('annotations')
di.attributes = {'frame': int(frame_id)}
di.image = image if isinstance(image, Image) else di.image
image_items[(subset, osp.splitext(name)[0])] = di
return image_items
dm_env.extractors.register('cvat', CvatExtractor)
class CvatImporter(Importer):
@classmethod
def find_sources(cls, path):
return cls._find_sources_recursive(path, '.xml', 'cvat')
dm_env.importers.register('cvat', CvatImporter)
def pairwise(iterable):
a = iter(iterable)
return zip(a, a)
def create_xml_dumper(file_object):
from xml.sax.saxutils import XMLGenerator
class XmlAnnotationWriter:
def __init__(self, file):
self.version = "1.1"
self.file = file
self.xmlgen = XMLGenerator(self.file, 'utf-8')
self._level = 0
def _indent(self, newline = True):
if newline:
self.xmlgen.ignorableWhitespace("\n")
self.xmlgen.ignorableWhitespace(" " * self._level)
def _add_version(self):
self._indent()
self.xmlgen.startElement("version", {})
self.xmlgen.characters(self.version)
self.xmlgen.endElement("version")
def open_document(self):
self.xmlgen.startDocument()
def open_root(self):
self.xmlgen.startElement("annotations", {})
self._level += 1
self._add_version()
def _add_meta(self, meta):
self._level += 1
for k, v in meta.items():
if isinstance(v, OrderedDict):
self._indent()
self.xmlgen.startElement(k, {})
self._add_meta(v)
self._indent()
self.xmlgen.endElement(k)
elif isinstance(v, list):
self._indent()
self.xmlgen.startElement(k, {})
for tup in v:
self._add_meta(OrderedDict([tup]))
self._indent()
self.xmlgen.endElement(k)
else:
self._indent()
self.xmlgen.startElement(k, {})
self.xmlgen.characters(v)
self.xmlgen.endElement(k)
self._level -= 1
def add_meta(self, meta):
self._indent()
self.xmlgen.startElement("meta", {})
self._add_meta(meta)
self._indent()
self.xmlgen.endElement("meta")
def open_track(self, track):
self._indent()
self.xmlgen.startElement("track", track)
self._level += 1
def open_image(self, image):
self._indent()
self.xmlgen.startElement("image", image)
self._level += 1
def open_box(self, box):
self._indent()
self.xmlgen.startElement("box", box)
self._level += 1
def open_ellipse(self, ellipse):
self._indent()
self.xmlgen.startElement("ellipse", ellipse)
self._level += 1
def open_polygon(self, polygon):
self._indent()
self.xmlgen.startElement("polygon", polygon)
self._level += 1
def open_polyline(self, polyline):
self._indent()
self.xmlgen.startElement("polyline", polyline)
self._level += 1
def open_points(self, points):
self._indent()
self.xmlgen.startElement("points", points)
self._level += 1
def open_cuboid(self, cuboid):
self._indent()
self.xmlgen.startElement("cuboid", cuboid)
self._level += 1
def open_tag(self, tag):
self._indent()
self.xmlgen.startElement("tag", tag)
self._level += 1
def add_attribute(self, attribute):
self._indent()
self.xmlgen.startElement("attribute", {"name": attribute["name"]})
self.xmlgen.characters(attribute["value"])
self.xmlgen.endElement("attribute")
def close_box(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("box")
def close_ellipse(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("ellipse")
def close_polygon(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("polygon")
def close_polyline(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("polyline")
def close_points(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("points")
def close_cuboid(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("cuboid")
def close_tag(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("tag")
def close_image(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("image")
def close_track(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("track")
def close_root(self):
self._level -= 1
self._indent()
self.xmlgen.endElement("annotations")
self._indent()
def close_document(self):
self.xmlgen.endDocument()
return XmlAnnotationWriter(file_object)
def dump_as_cvat_annotation(dumper, annotations):
dumper.open_root()
dumper.add_meta(annotations.meta)
for frame_annotation in annotations.group_by_frame(include_empty=True):
frame_id = frame_annotation.frame
image_attrs = OrderedDict([
("id", str(frame_id)),
("name", frame_annotation.name),
])
if isinstance(annotations, ProjectData):
image_attrs.update(OrderedDict([
("subset", frame_annotation.subset),
("task_id", str(frame_annotation.task_id)),
]))
image_attrs.update(OrderedDict([
("width", str(frame_annotation.width)),
("height", str(frame_annotation.height))
]))
dumper.open_image(image_attrs)
for shape in frame_annotation.labeled_shapes:
dump_data = OrderedDict([
("label", shape.label),
("occluded", str(int(shape.occluded))),
("source", shape.source),
])
if shape.type == "rectangle":
dump_data.update(OrderedDict([
("xtl", "{:.2f}".format(shape.points[0])),
("ytl", "{:.2f}".format(shape.points[1])),
("xbr", "{:.2f}".format(shape.points[2])),
("ybr", "{:.2f}".format(shape.points[3]))
]))
if shape.rotation:
dump_data.update(OrderedDict([
("rotation", "{:.2f}".format(shape.rotation))
]))
elif shape.type == "ellipse":
dump_data.update(OrderedDict([
("cx", "{:.2f}".format(shape.points[0])),
("cy", "{:.2f}".format(shape.points[1])),
("rx", "{:.2f}".format(shape.points[2] - shape.points[0])),
("ry", "{:.2f}".format(shape.points[1] - shape.points[3]))
]))
if shape.rotation:
dump_data.update(OrderedDict([
("rotation", "{:.2f}".format(shape.rotation))
]))
elif shape.type == "cuboid":
dump_data.update(OrderedDict([
("xtl1", "{:.2f}".format(shape.points[0])),
("ytl1", "{:.2f}".format(shape.points[1])),
("xbl1", "{:.2f}".format(shape.points[2])),
("ybl1", "{:.2f}".format(shape.points[3])),
("xtr1", "{:.2f}".format(shape.points[4])),
("ytr1", "{:.2f}".format(shape.points[5])),
("xbr1", "{:.2f}".format(shape.points[6])),
("ybr1", "{:.2f}".format(shape.points[7])),
("xtl2", "{:.2f}".format(shape.points[8])),
("ytl2", "{:.2f}".format(shape.points[9])),
("xbl2", "{:.2f}".format(shape.points[10])),
("ybl2", "{:.2f}".format(shape.points[11])),
("xtr2", "{:.2f}".format(shape.points[12])),
("ytr2", "{:.2f}".format(shape.points[13])),
("xbr2", "{:.2f}".format(shape.points[14])),
("ybr2", "{:.2f}".format(shape.points[15]))
]))
else:
dump_data.update(OrderedDict([
("points", ';'.join((
','.join((
"{:.2f}".format(x),
"{:.2f}".format(y)
)) for x, y in pairwise(shape.points))
)),
]))
dump_data['z_order'] = str(shape.z_order)
if shape.group:
dump_data['group_id'] = str(shape.group)
if shape.type == "rectangle":
dumper.open_box(dump_data)
elif shape.type == "ellipse":
dumper.open_ellipse(dump_data)
elif shape.type == "polygon":
dumper.open_polygon(dump_data)
elif shape.type == "polyline":
dumper.open_polyline(dump_data)
elif shape.type == "points":
dumper.open_points(dump_data)
elif shape.type == "cuboid":
dumper.open_cuboid(dump_data)
else:
raise NotImplementedError("unknown shape type")
for attr in shape.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
if shape.type == "rectangle":
dumper.close_box()
elif shape.type == "ellipse":
dumper.close_ellipse()
elif shape.type == "polygon":
dumper.close_polygon()
elif shape.type == "polyline":
dumper.close_polyline()
elif shape.type == "points":
dumper.close_points()
elif shape.type == "cuboid":
dumper.close_cuboid()
else:
raise NotImplementedError("unknown shape type")
for tag in frame_annotation.tags:
tag_data = OrderedDict([
("label", tag.label),
("source", tag.source),
])
if tag.group:
tag_data["group_id"] = str(tag.group)
dumper.open_tag(tag_data)
for attr in tag.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
dumper.close_tag()
dumper.close_image()
dumper.close_root()
def dump_as_cvat_interpolation(dumper, annotations):
dumper.open_root()
dumper.add_meta(annotations.meta)
def dump_track(idx, track):
track_id = idx
dump_data = OrderedDict([
("id", str(track_id)),
("label", track.label),
("source", track.source),
])
if hasattr(track, 'task_id'):
task, = filter(lambda task: task.id == track.task_id, annotations.tasks)
dump_data.update(OrderedDict([
('task_id', str(track.task_id)),
('subset', get_defaulted_subset(task.subset, annotations.subsets)),
]))
if track.group:
dump_data['group_id'] = str(track.group)
dumper.open_track(dump_data)
for shape in track.shapes:
dump_data = OrderedDict([
("frame", str(shape.frame)),
("outside", str(int(shape.outside))),
("occluded", str(int(shape.occluded))),
("keyframe", str(int(shape.keyframe))),
])
if shape.type == "rectangle":
dump_data.update(OrderedDict([
("xtl", "{:.2f}".format(shape.points[0])),
("ytl", "{:.2f}".format(shape.points[1])),
("xbr", "{:.2f}".format(shape.points[2])),
("ybr", "{:.2f}".format(shape.points[3])),
]))
if shape.rotation:
dump_data.update(OrderedDict([
("rotation", "{:.2f}".format(shape.rotation))
]))
elif shape.type == "ellipse":
dump_data.update(OrderedDict([
("cx", "{:.2f}".format(shape.points[0])),
("cy", "{:.2f}".format(shape.points[1])),
("rx", "{:.2f}".format(shape.points[2] - shape.points[0])),
("ry", "{:.2f}".format(shape.points[1] - shape.points[3]))
]))
if shape.rotation:
dump_data.update(OrderedDict([
("rotation", "{:.2f}".format(shape.rotation))
]))
elif shape.type == "cuboid":
dump_data.update(OrderedDict([
("xtl1", "{:.2f}".format(shape.points[0])),
("ytl1", "{:.2f}".format(shape.points[1])),
("xbl1", "{:.2f}".format(shape.points[2])),
("ybl1", "{:.2f}".format(shape.points[3])),
("xtr1", "{:.2f}".format(shape.points[4])),
("ytr1", "{:.2f}".format(shape.points[5])),
("xbr1", "{:.2f}".format(shape.points[6])),
("ybr1", "{:.2f}".format(shape.points[7])),
("xtl2", "{:.2f}".format(shape.points[8])),
("ytl2", "{:.2f}".format(shape.points[9])),
("xbl2", "{:.2f}".format(shape.points[10])),
("ybl2", "{:.2f}".format(shape.points[11])),
("xtr2", "{:.2f}".format(shape.points[12])),
("ytr2", "{:.2f}".format(shape.points[13])),
("xbr2", "{:.2f}".format(shape.points[14])),
("ybr2", "{:.2f}".format(shape.points[15]))
]))
else:
dump_data.update(OrderedDict([
("points", ';'.join(['{:.2f},{:.2f}'.format(x, y)
for x,y in pairwise(shape.points)]))
]))
dump_data["z_order"] = str(shape.z_order)
if shape.type == "rectangle":
dumper.open_box(dump_data)
elif shape.type == "ellipse":
dumper.open_ellipse(dump_data)
elif shape.type == "polygon":
dumper.open_polygon(dump_data)
elif shape.type == "polyline":
dumper.open_polyline(dump_data)
elif shape.type == "points":
dumper.open_points(dump_data)
elif shape.type == "cuboid":
dumper.open_cuboid(dump_data)
else:
raise NotImplementedError("unknown shape type")
for attr in shape.attributes:
dumper.add_attribute(OrderedDict([
("name", attr.name),
("value", attr.value)
]))
if shape.type == "rectangle":
dumper.close_box()
elif shape.type == "ellipse":
dumper.close_ellipse()
elif shape.type == "polygon":
dumper.close_polygon()
elif shape.type == "polyline":
dumper.close_polyline()
elif shape.type == "points":
dumper.close_points()
elif shape.type == "cuboid":
dumper.close_cuboid()
else:
raise NotImplementedError("unknown shape type")
dumper.close_track()
counter = 0
for track in annotations.tracks:
dump_track(counter, track)
counter += 1
for shape in annotations.shapes:
frame_step = annotations.frame_step if isinstance(annotations, TaskData) else annotations.frame_step[shape.task_id]
if isinstance(annotations, TaskData):
stop_frame = int(annotations.meta['task']['stop_frame'])
else:
task_meta = list(filter(lambda task: int(task[1]['id']) == shape.task_id, annotations.meta['project']['tasks']))[0][1]
stop_frame = int(task_meta['stop_frame'])
track = {
'label': shape.label,
'group': shape.group,
'source': shape.source,
'shapes': [annotations.TrackedShape(
type=shape.type,
points=shape.points,
rotation=shape.rotation,
occluded=shape.occluded,
outside=False,
keyframe=True,
z_order=shape.z_order,
frame=shape.frame,
attributes=shape.attributes,
)] +
( # add a finishing frame if it does not hop over the last frame
[annotations.TrackedShape(
type=shape.type,
points=shape.points,
rotation=shape.rotation,
occluded=shape.occluded,
outside=True,
keyframe=True,
z_order=shape.z_order,
frame=shape.frame + frame_step,
attributes=shape.attributes,
)] if shape.frame + frame_step < \
stop_frame \
else []
),
}
if isinstance(annotations, ProjectData): track['task_id'] = shape.task_id
dump_track(counter, annotations.Track(**track))
counter += 1
dumper.close_root()
def load_anno(file_object, annotations):
supported_shapes = ('box', 'ellipse', 'polygon', 'polyline', 'points', 'cuboid')
context = ElementTree.iterparse(file_object, events=("start", "end"))
context = iter(context)
next(context)
track = None
shape = None
tag = None
image_is_opened = False
attributes = None
for ev, el in context:
if ev == 'start':
if el.tag == 'track':
track = annotations.Track(
label=el.attrib['label'],
group=int(el.attrib.get('group_id', 0)),
source=el.attrib.get('source', 'manual'),
shapes=[],
)
elif el.tag == 'image':
image_is_opened = True
frame_id = annotations.abs_frame_id(match_dm_item(
DatasetItem(id=osp.splitext(el.attrib['name'])[0],
attributes={'frame': el.attrib['id']},
image=el.attrib['name']
),
task_data=annotations
))
elif el.tag in supported_shapes and (track is not None or image_is_opened):
attributes = []
shape = {
'attributes': attributes,
'points': [],
}
elif el.tag == 'tag' and image_is_opened:
attributes = []
tag = {
'frame': frame_id,
'label': el.attrib['label'],
'group': int(el.attrib.get('group_id', 0)),
'attributes': attributes,
'source': str(el.attrib.get('source', 'manual'))
}
elif ev == 'end':
if el.tag == 'attribute' and attributes is not None:
attributes.append(annotations.Attribute(
name=el.attrib['name'],
value=el.text or "",
))
if el.tag in supported_shapes:
if track is not None:
shape['frame'] = el.attrib['frame']
shape['outside'] = el.attrib['outside'] == "1"
shape['keyframe'] = el.attrib['keyframe'] == "1"
else:
shape['frame'] = frame_id
shape['label'] = el.attrib['label']
shape['group'] = int(el.attrib.get('group_id', 0))
shape['source'] = str(el.attrib.get('source', 'manual'))
shape['type'] = 'rectangle' if el.tag == 'box' else el.tag
shape['occluded'] = el.attrib['occluded'] == '1'
shape['z_order'] = int(el.attrib.get('z_order', 0))
shape['rotation'] = float(el.attrib.get('rotation', 0))
if el.tag == 'box':
shape['points'].append(el.attrib['xtl'])
shape['points'].append(el.attrib['ytl'])
shape['points'].append(el.attrib['xbr'])
shape['points'].append(el.attrib['ybr'])
elif el.tag == 'ellipse':
shape['points'].append(el.attrib['cx'])
shape['points'].append(el.attrib['cy'])
shape['points'].append("{:.2f}".format(float(el.attrib['cx']) + float(el.attrib['rx'])))
shape['points'].append("{:.2f}".format(float(el.attrib['cy']) - float(el.attrib['ry'])))
elif el.tag == 'cuboid':
shape['points'].append(el.attrib['xtl1'])
shape['points'].append(el.attrib['ytl1'])
shape['points'].append(el.attrib['xbl1'])
shape['points'].append(el.attrib['ybl1'])
shape['points'].append(el.attrib['xtr1'])
shape['points'].append(el.attrib['ytr1'])
shape['points'].append(el.attrib['xbr1'])
shape['points'].append(el.attrib['ybr1'])
shape['points'].append(el.attrib['xtl2'])
shape['points'].append(el.attrib['ytl2'])
shape['points'].append(el.attrib['xbl2'])
shape['points'].append(el.attrib['ybl2'])
shape['points'].append(el.attrib['xtr2'])
shape['points'].append(el.attrib['ytr2'])
shape['points'].append(el.attrib['xbr2'])
shape['points'].append(el.attrib['ybr2'])
else:
for pair in el.attrib['points'].split(';'):
shape['points'].extend(map(float, pair.split(',')))
if track is not None:
if shape["keyframe"]:
track.shapes.append(annotations.TrackedShape(**shape))
else:
annotations.add_shape(annotations.LabeledShape(**shape))
shape = None
elif el.tag == 'track':
annotations.add_track(track)
track = None
elif el.tag == 'image':
image_is_opened = False
elif el.tag == 'tag':
annotations.add_tag(annotations.Tag(**tag))
tag = None
el.clear()
def dump_task_anno(dst_file, task_data, callback):
dumper = create_xml_dumper(dst_file)
dumper.open_document()
callback(dumper, task_data)
dumper.close_document()
def dump_project_anno(dst_file: BufferedWriter, project_data: ProjectData, callback: Callable):
dumper = create_xml_dumper(dst_file)
dumper.open_document()
callback(dumper, project_data)
dumper.close_document()
def dump_media_files(task_data: TaskData, img_dir: str, project_data: ProjectData = None):
ext = ''
if task_data.meta['task']['mode'] == 'interpolation':
ext = FrameProvider.VIDEO_FRAME_EXT
frame_provider = FrameProvider(task_data.db_task.data)
frames = frame_provider.get_frames(
frame_provider.Quality.ORIGINAL,
frame_provider.Type.BUFFER)
for frame_id, (frame_data, _) in enumerate(frames):
frame_name = task_data.frame_info[frame_id]['path'] if project_data is None \
else project_data.frame_info[(task_data.db_task.id, frame_id)]['path']
img_path = osp.join(img_dir, frame_name + ext)
os.makedirs(osp.dirname(img_path), exist_ok=True)
with open(img_path, 'wb') as f:
f.write(frame_data.getvalue())
def _export_task(dst_file, task_data, anno_callback, save_images=False):
with TemporaryDirectory() as temp_dir:
with open(osp.join(temp_dir, 'annotations.xml'), 'wb') as f:
dump_task_anno(f, task_data, anno_callback)
if save_images:
dump_media_files(task_data, osp.join(temp_dir, 'images'))
make_zip_archive(temp_dir, dst_file)
def _export_project(dst_file: str, project_data: ProjectData, anno_callback: Callable, save_images: bool=False):
with TemporaryDirectory() as temp_dir:
with open(osp.join(temp_dir, 'annotations.xml'), 'wb') as f:
dump_project_anno(f, project_data, anno_callback)
if save_images:
for task_data in project_data.task_data:
subset = get_defaulted_subset(task_data.db_task.subset, project_data.subsets)
subset_dir = osp.join(temp_dir, 'images', subset)
os.makedirs(subset_dir, exist_ok=True)
dump_media_files(task_data, subset_dir, project_data)
make_zip_archive(temp_dir, dst_file)
@exporter(name='CVAT for video', ext='ZIP', version='1.1')
def _export_video(dst_file, instance_data, save_images=False):
if isinstance(instance_data, ProjectData):
_export_project(dst_file, instance_data,
anno_callback=dump_as_cvat_interpolation, save_images=save_images)
else:
_export_task(dst_file, instance_data,
anno_callback=dump_as_cvat_interpolation, save_images=save_images)
@exporter(name='CVAT for images', ext='ZIP', version='1.1')
def _export_images(dst_file, instance_data, save_images=False):
if isinstance(instance_data, ProjectData):
_export_project(dst_file, instance_data,
anno_callback=dump_as_cvat_annotation, save_images=save_images)
else:
_export_task(dst_file, instance_data,
anno_callback=dump_as_cvat_annotation, save_images=save_images)
@importer(name='CVAT', ext='XML, ZIP', version='1.1')
def _import(src_file, instance_data, load_data_callback=None):
is_zip = zipfile.is_zipfile(src_file)
src_file.seek(0)
if is_zip:
with TemporaryDirectory() as tmp_dir:
zipfile.ZipFile(src_file).extractall(tmp_dir)
if isinstance(instance_data, ProjectData):
dataset = Dataset.import_from(tmp_dir, 'cvat', env=dm_env)
if load_data_callback is not None:
load_data_callback(dataset, instance_data)
import_dm_annotations(dataset, instance_data)
else:
anno_paths = glob(osp.join(tmp_dir, '**', '*.xml'), recursive=True)
for p in anno_paths:
load_anno(p, instance_data)
else:
load_anno(src_file, instance_data)
| 40.077828
| 165
| 0.509552
|
acffd5d933b90f15df52b21e61c32fd7b09dddb4
| 2,033
|
py
|
Python
|
backend/users/views.py
|
ryabant/foodgram
|
2e6483eee9968fe64caf9502e6154a62e205edb6
|
[
"BSD-2-Clause"
] | null | null | null |
backend/users/views.py
|
ryabant/foodgram
|
2e6483eee9968fe64caf9502e6154a62e205edb6
|
[
"BSD-2-Clause"
] | null | null | null |
backend/users/views.py
|
ryabant/foodgram
|
2e6483eee9968fe64caf9502e6154a62e205edb6
|
[
"BSD-2-Clause"
] | null | null | null |
from djoser.views import UserViewSet
from django.contrib.auth import get_user_model
from rest_framework.decorators import action
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework import status
from recipes.serializers import FollowAuthorSerializer
from .models import FollowAuthor
User = get_user_model()
class CustomUserViewSet(UserViewSet):
@action(detail=False, methods=["get"], permission_classes=[IsAuthenticated])
def subscriptions(self, request):
follows = FollowAuthor.objects.filter(user=request.user)
queryset = User.objects.filter(pk__in=follows.values_list("author"))
page = self.paginate_queryset(queryset)
if page is not None:
serializer = FollowAuthorSerializer(
page, many=True, context={"request": request}
)
return self.get_paginated_response(serializer.data)
serializer = FollowAuthorSerializer(
queryset, many=True, context={"request": request}
)
return Response(serializer.data)
@action(
detail=True, methods=["get", "delete"], permission_classes=[IsAuthenticated]
)
def subscribe(self, request, id=None):
author = get_object_or_404(User, id=id)
if request.method == "GET":
if request.user == author:
data = {"errors": "Нельзя подписаться на самого себя"}
return Response(data=data, status=status.HTTP_400_BAD_REQUEST)
FollowAuthor.objects.get_or_create(user=request.user, author=author)
serializer = FollowAuthorSerializer(author, context={"request": request})
return Response(serializer.data)
else:
subscribed_author = get_object_or_404(
FollowAuthor, author=author, user=request.user
)
subscribed_author.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| 41.489796
| 85
| 0.689621
|
acffd61616c0c5e49abe559f936ff96833e1c707
| 1,011
|
py
|
Python
|
HW6/submission/code/main.py
|
ardaduz/math-cgv
|
bc89c0ce9beca9a9f02ca23bcf4a9116be187882
|
[
"MIT"
] | null | null | null |
HW6/submission/code/main.py
|
ardaduz/math-cgv
|
bc89c0ce9beca9a9f02ca23bcf4a9116be187882
|
[
"MIT"
] | null | null | null |
HW6/submission/code/main.py
|
ardaduz/math-cgv
|
bc89c0ce9beca9a9f02ca23bcf4a9116be187882
|
[
"MIT"
] | 1
|
2021-02-14T10:41:17.000Z
|
2021-02-14T10:41:17.000Z
|
from datetime import datetime
import glob
import numpy as np
import os
from PIL import Image
import skimage
import tensorflow as tf
from tensorflow import keras
from model import Model
from trainer import Trainer
from dataset import Dataset
from config import Config
def main():
train_data_path = '../dataset/train'
val_data_path = '../dataset/val'
test_data_path = '../dataset/test'
# Read the data
train_dataset = Dataset(train_data_path, is_training_set=True)
val_dataset = Dataset(val_data_path)
test_dataset = Dataset(test_data_path)
# Create dataset
batch_size = Config.batch_size
train_dataset.create_tf_dataset(batch_size)
val_dataset.create_tf_dataset(batch_size)
test_dataset.create_tf_dataset(batch_size)
# Create the model
model = Model([128, 128, 3])
# Train the model
trainer = Trainer(model, train_dataset, val_dataset, test_dataset)
trainer.train(n_epochs=Config.n_epochs)
if __name__ == '__main__':
main()
| 24.071429
| 70
| 0.738872
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.