hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
655a2db3fa6c2ead3f03ffdd5feda111f37b2963 | 885 | py | Python | Mobley_logP/pythonScripts/makeDataFile.py | MobleyLab/SAMPL5_logD_PredictionAnalysis | c7675a8f183a465bee89599a6df9e360476ef868 | [
"MIT"
] | 1 | 2018-03-21T16:48:41.000Z | 2018-03-21T16:48:41.000Z | Mobley_logP/pythonScripts/makeDataFile.py | MobleyLab/SAMPL5_logD_PredictionAnalysis | c7675a8f183a465bee89599a6df9e360476ef868 | [
"MIT"
] | null | null | null | Mobley_logP/pythonScripts/makeDataFile.py | MobleyLab/SAMPL5_logD_PredictionAnalysis | c7675a8f183a465bee89599a6df9e360476ef868 | [
"MIT"
] | null | null | null | # Written by Caitlin C Bannan
# Mobley Group, UC Irvine
# January/February 2016 I don't remember
# Prints calculated logD and associated solvation free energies to text file. It is the same script I used to make our SAMPL5 submission, solvation free energies were add later when discussing results with other participants
import pickle as p
a = p.load(open('dictionary_allResults.p','rb'))
tF = open('SAMPL5_withFreeEnergy.txt','w')
rms = 1.4
print >> tF, "# SAMPL ID, logD, stat unc., model unc., dF wat, ddF wat, dF cyc, ddF cyc"
dataKeys = sorted([k for k in a.keys()])
for num in dataKeys:
logD = a[num]['LogD_calc'][0]
dlogD = a[num]['LogD_calc'][1]
wat = a[num]['water']
cyc = a[num]['cyclohexane']
data = "%s, %.2f, %.2f, 1.4, %.1f, %.1f, %.1f, %.1f" % (num, logD, dlogD, -wat[0], wat[1], -cyc[0], cyc[1])
print data
print >>tF, data
tF.close()
| 36.875 | 224 | 0.658757 |
7ae8b377b8a9d82c890d09b97597c94927dfe100 | 481 | py | Python | morepath_static/tests/test_morepath_static.py | morepath/morepath_static | 77f0496d6d1119c91ebebdd683b0fd443d789596 | [
"BSD-3-Clause"
] | null | null | null | morepath_static/tests/test_morepath_static.py | morepath/morepath_static | 77f0496d6d1119c91ebebdd683b0fd443d789596 | [
"BSD-3-Clause"
] | 4 | 2020-09-07T13:35:53.000Z | 2021-04-18T14:32:31.000Z | morepath_static/tests/test_morepath_static.py | morepath/morepath_static | 77f0496d6d1119c91ebebdd683b0fd443d789596 | [
"BSD-3-Clause"
] | 2 | 2016-07-05T06:26:56.000Z | 2020-09-30T16:17:28.000Z | from webtest import TestApp as Client
from .. import App
from ..__main__ import run
def test_static_assets():
c = Client(App())
r = c.get("/")
scripts = r.html.select("script")
assert len(scripts) == 2
for s in scripts:
c.get(s["src"])
def test_run(monkeypatch):
import morepath
instances = []
monkeypatch.setattr(morepath, "run", lambda app: instances.append(app))
run()
(app,) = instances
assert isinstance(app, App)
| 17.178571 | 75 | 0.627859 |
6a74fc6b55779064da01a4a98c72939870076699 | 3,802 | py | Python | basestation/flopper.py | jakeh12/hackisu2018-flopper | 46d1ea47546f36c0573e2a662b1bf717794c431c | [
"MIT"
] | 1 | 2018-10-14T18:19:50.000Z | 2018-10-14T18:19:50.000Z | basestation/flopper.py | jakeh12/hackisu2018-flopper | 46d1ea47546f36c0573e2a662b1bf717794c431c | [
"MIT"
] | null | null | null | basestation/flopper.py | jakeh12/hackisu2018-flopper | 46d1ea47546f36c0573e2a662b1bf717794c431c | [
"MIT"
] | null | null | null | from header_chunk import HeaderChunk
from track_chunk import TrackChunk
from helpers import HexArrayToDecimal
from drive_system import DriveSystem
import time
with open('/Users/jhladik/Downloads/test.mid', 'rb') as midiFile:
# Read entire file and save into a list of ascii/hex chars
fileContent = list(midiFile.read())
# Parse the header chunk
header_chunk = HeaderChunk(HexArrayToDecimal(fileContent[0:14]))
# Print header chunk
print("Header:")
print("Type: " + str(header_chunk.type))
print("Length: " + str(header_chunk.length))
print("Format: " + str(header_chunk.data.format))
print("# Tracks: " + str(header_chunk.data.tracks))
print("Division Format: " + str(header_chunk.data.division_format))
if header_chunk.data.division_format == 1:
print("-frames/second: " + str(header_chunk.data.frames_per_second))
print("ticks/frame: " + str(header_chunk.data.ticks_per_frame))
else:
print("ticks per quarter note: " + str(header_chunk.data.ticks_per_quarter_note))
# Create the list to hold all the tracks
track_chunks = list()
# Pointer to next track starts directly after header chunk (14 bytes)
pointer = 14
# Iterate through entire file and capture track chunks
while len(track_chunks) < header_chunk.data.tracks:
next_track_size = HexArrayToDecimal(fileContent[(pointer + 4):(pointer + 8)])
track_chunks.append(TrackChunk(fileContent[pointer:(pointer + next_track_size + 8)]))
pointer += next_track_size + 8
print("Track chunks found: " + str(len(track_chunks)))
song_tempo = 0
all_notes = list()
for track_chunk in track_chunks:
print("Track length: " + str(track_chunk.length))
#print("Hex dump: " + str([hex(ord(x)) for x in track_chunk.data.raw_data]))
track_notes = track_chunk.data.notes
#print("Notes: " + str(track_notes))
if len(track_notes) >= 5:
all_notes += track_notes
temp_tempo = track_chunk.data.tempo
if temp_tempo != 0:
song_tempo = temp_tempo
song_tempo = 200000
print("Tempo (us/qn): " + str(song_tempo))
# Order notes
for i in range(len(all_notes) - 1, 0, -1):
for x in range(i):
if all_notes[x][1] > all_notes[x + 1][1]:
temp = all_notes[x]
all_notes[x] = all_notes[x + 1]
all_notes[x + 1] = temp
#print("All notes: " + str(all_notes))
current_time = 0
drive_system = DriveSystem()
startTime = time.time()
for note in all_notes:
if note[1] <= current_time:
if note[2] == 1:
#for i in range(0, 2):
drive_num = drive_system.find_available_drive()
drive_system.lock_drive(drive_num, note)
else:
#for i in range(0, 2):
drive_num = drive_system.find_playing_drive(note)
drive_system.unlock_drive(drive_num, note)
else:
delta_time = note[1] - current_time
sleep_time = ((song_tempo / header_chunk.data.ticks_per_quarter_note) * delta_time) / float(100000)
print("Sleeping: " + str(sleep_time) + " seconds")
time.sleep(sleep_time)
current_time += delta_time
if note[2] == 1:
#for i in range(0, 2):
drive_num = drive_system.find_available_drive()
drive_system.lock_drive(drive_num, note)
else:
#for i in range(0, 2):
drive_num = drive_system.find_playing_drive(note)
drive_system.unlock_drive(drive_num, note)
endTime = time.time()
print("Total time: " + str(endTime - startTime))
| 36.912621 | 111 | 0.613887 |
fc7a44c80e51887c9363a64965d34f0f37f898ca | 3,453 | py | Python | src/worker/utils/flashcard.py | dailyideas/mobile-flashcards | ea9e95557471b54abd2dbec4d23c9a87ffc848cd | [
"MIT"
] | null | null | null | src/worker/utils/flashcard.py | dailyideas/mobile-flashcards | ea9e95557471b54abd2dbec4d23c9a87ffc848cd | [
"MIT"
] | null | null | null | src/worker/utils/flashcard.py | dailyideas/mobile-flashcards | ea9e95557471b54abd2dbec4d23c9a87ffc848cd | [
"MIT"
] | null | null | null | from __future__ import annotations
import datetime, logging, os, pathlib, sys, time
import random
from dataclasses import dataclass, InitVar
from os import path
from typing import ClassVar
#### #### #### #### ####
#### Global constants ####
#### #### #### #### ####
SCRIPT_NAME = path.basename(__file__).split(".")[0]
SCRIPT_DIRECTORY = path.dirname(path.abspath(__file__) )
ROOT_DIRECTORY = pathlib.Path(SCRIPT_DIRECTORY).parent.absolute()
#### #### #### #### ####
#### Global variables ####
#### #### #### #### ####
#### Logging
log = logging.getLogger(name=SCRIPT_NAME)
#### #### #### #### ####
#### Global Setups ####
#### #### #### #### ####
#### Paths
sys.path.insert(1, str(ROOT_DIRECTORY) )
#### #### #### #### ####
#### Class ####
#### #### #### #### ####
@dataclass
class Flashcard:
ID_TAG:ClassVar[str] = "_id"
KEY_TAG:ClassVar[str] = "key"
VALUE_TAG:ClassVar[str] = "value"
REMARKS_TAG:ClassVar[str] = "remarks"
PRIORITY_TAG:ClassVar[str] = "priority"
INSERTED_TIME_TAG:ClassVar[str] = "inserted"
MODIFIED_TIME_TAG:ClassVar[str] = "modified"
LOWEST_PRIORITY:ClassVar[int] = 0
HIGHEST_PRIORITY:ClassVar[int] = 99
Id:int = -1
Key:str = ""
Value:str = ""
Remarks:str = ""
InsertedTime:int = 0 ## Unix timestamp
ModifiedTime:int = 0 ## Unix timestamp
priority:InitVar[int] = HIGHEST_PRIORITY
def __post_init__(self, priority:int):
## Main
self._priority = 0
## Post-processing
self.Priority = priority
@property
def Priority(self):
return self._priority
@Priority.setter
def Priority(self, value:int):
## Variables initialization
cls = type(self)
## Main
self._priority = max(cls.LOWEST_PRIORITY,
min(value, cls.HIGHEST_PRIORITY) )
def ToDict(self) -> dict:
## Variables initialization
cls = type(self)
## Main
return {
cls.ID_TAG: self.Id,
cls.KEY_TAG: self.Key,
cls.VALUE_TAG: self.Value,
cls.REMARKS_TAG: self.Remarks,
cls.PRIORITY_TAG: self.Priority,
cls.INSERTED_TIME_TAG: self.InsertedTime,
cls.MODIFIED_TIME_TAG: self.ModifiedTime
}
@classmethod
def FromDict(cls, data:dict) -> Flashcard:
## Pre-condition
if not isinstance(data, dict):
log.error(f"{cls.__name__}.FromDict aborted. Reason: data is not a dict object")
return None
for tag in [cls.ID_TAG, cls.KEY_TAG, cls.VALUE_TAG,
cls.REMARKS_TAG, cls.PRIORITY_TAG, cls.INSERTED_TIME_TAG,
cls.MODIFIED_TIME_TAG
]:
if not tag in data:
log.error(f"{cls.__name__}.FromDict aborted. Reason: Tag \"{tag}\" not found in data")
return None
## Main
return Flashcard(
Id=int(data[cls.ID_TAG] ),
Key=data[cls.KEY_TAG],
Value=data[cls.VALUE_TAG],
Remarks=data[cls.REMARKS_TAG],
InsertedTime=int(data[cls.INSERTED_TIME_TAG] ),
ModifiedTime=int(data[cls.MODIFIED_TIME_TAG] ),
priority=int(data[cls.PRIORITY_TAG] )
)
@classmethod
def GetRandomPriorityValue(cls):
return random.randint(cls.LOWEST_PRIORITY, cls.HIGHEST_PRIORITY) ## NOTE: Include endpoint
| 29.262712 | 102 | 0.573414 |
2b56a081e1eabde51def5b0ff9c64ec1024e4fc8 | 5,945 | py | Python | py3canvas/tests/external_tools.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | py3canvas/tests/external_tools.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | py3canvas/tests/external_tools.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | """ExternalTools API Tests for Version 1.0.
This is a testing template for the generated ExternalToolsAPI Class.
"""
import unittest
import requests
import secrets
from py3canvas.apis.external_tools import ExternalToolsAPI
class TestExternalToolsAPI(unittest.TestCase):
"""Tests for the ExternalToolsAPI."""
def setUp(self):
self.client = ExternalToolsAPI(secrets.instance_address, secrets.access_token)
def test_list_external_tools_courses(self):
"""Integration test for the ExternalToolsAPI.list_external_tools_courses method."""
course_id = None # Change me!!
r = self.client.list_external_tools_courses(
course_id, include_parents=None, search_term=None, selectable=None
)
def test_list_external_tools_accounts(self):
"""Integration test for the ExternalToolsAPI.list_external_tools_accounts method."""
account_id = None # Change me!!
r = self.client.list_external_tools_accounts(
account_id, include_parents=None, search_term=None, selectable=None
)
def test_list_external_tools_groups(self):
"""Integration test for the ExternalToolsAPI.list_external_tools_groups method."""
group_id = None # Change me!!
r = self.client.list_external_tools_groups(
group_id, include_parents=None, search_term=None, selectable=None
)
def test_get_sessionless_launch_url_for_external_tool_courses(self):
"""Integration test for the ExternalToolsAPI.get_sessionless_launch_url_for_external_tool_courses method."""
course_id = None # Change me!!
r = self.client.get_sessionless_launch_url_for_external_tool_courses(
course_id,
assignment_id=None,
id=None,
launch_type=None,
module_item_id=None,
url=None,
)
def test_get_sessionless_launch_url_for_external_tool_accounts(self):
"""Integration test for the ExternalToolsAPI.get_sessionless_launch_url_for_external_tool_accounts method."""
account_id = None # Change me!!
r = self.client.get_sessionless_launch_url_for_external_tool_accounts(
account_id,
assignment_id=None,
id=None,
launch_type=None,
module_item_id=None,
url=None,
)
def test_get_single_external_tool_courses(self):
"""Integration test for the ExternalToolsAPI.get_single_external_tool_courses method."""
course_id = None # Change me!!
external_tool_id = None # Change me!!
r = self.client.get_single_external_tool_courses(course_id, external_tool_id)
def test_get_single_external_tool_accounts(self):
"""Integration test for the ExternalToolsAPI.get_single_external_tool_accounts method."""
account_id = None # Change me!!
external_tool_id = None # Change me!!
r = self.client.get_single_external_tool_accounts(account_id, external_tool_id)
def test_create_external_tool_courses(self):
"""Integration test for the ExternalToolsAPI.create_external_tool_courses method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_external_tool_accounts(self):
"""Integration test for the ExternalToolsAPI.create_external_tool_accounts method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_edit_external_tool_courses(self):
"""Integration test for the ExternalToolsAPI.edit_external_tool_courses method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_edit_external_tool_accounts(self):
"""Integration test for the ExternalToolsAPI.edit_external_tool_accounts method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_delete_external_tool_courses(self):
"""Integration test for the ExternalToolsAPI.delete_external_tool_courses method."""
course_id = None # Change me!!
external_tool_id = None # Change me!!
r = self.client.delete_external_tool_courses(course_id, external_tool_id)
def test_delete_external_tool_accounts(self):
"""Integration test for the ExternalToolsAPI.delete_external_tool_accounts method."""
account_id = None # Change me!!
external_tool_id = None # Change me!!
r = self.client.delete_external_tool_accounts(account_id, external_tool_id)
def test_add_tool_to_rce_favorites(self):
"""Integration test for the ExternalToolsAPI.add_tool_to_rce_favorites method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_remove_tool_from_rce_favorites(self):
"""Integration test for the ExternalToolsAPI.remove_tool_from_rce_favorites method."""
account_id = None # Change me!!
id = None # Change me!!
r = self.client.remove_tool_from_rce_favorites(account_id, id)
def test_get_visible_course_navigation_tools(self):
"""Integration test for the ExternalToolsAPI.get_visible_course_navigation_tools method."""
context_codes = None # Change me!!
r = self.client.get_visible_course_navigation_tools(context_codes)
def test_get_visible_course_navigation_tools_for_single_course(self):
"""Integration test for the ExternalToolsAPI.get_visible_course_navigation_tools_for_single_course method."""
course_id = None # Change me!!
r = self.client.get_visible_course_navigation_tools_for_single_course(course_id)
| 43.07971 | 126 | 0.721447 |
0ae38deeb22769dd4c74616c77ab46ebb5718512 | 3,152 | py | Python | camp/codecs/graphviz.py | pedrovelho/camp | 98105c9054b8db3377cb6a06e7b5451b97c6c285 | [
"MIT"
] | null | null | null | camp/codecs/graphviz.py | pedrovelho/camp | 98105c9054b8db3377cb6a06e7b5451b97c6c285 | [
"MIT"
] | null | null | null | camp/codecs/graphviz.py | pedrovelho/camp | 98105c9054b8db3377cb6a06e7b5451b97c6c285 | [
"MIT"
] | 1 | 2019-02-05T08:49:41.000Z | 2019-02-05T08:49:41.000Z | #
# CAMP
#
# Copyright (C) 2017, 2018 SINTEF Digital
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
#
from camp.codecs.commons import Codec
class Graphviz(Codec):
def __init__(self, indentation=2):
self._indentation_level = 0
self._indentation_width = indentation
self._stream = None
def save_configuration(self, configuration, stream):
self._stream = stream
self._write("digraph Config {")
self._indent()
self._declare_nodes_options()
for index, each_stack in enumerate(configuration.stacks, 1):
self._declare_cluster(each_stack, index)
for any_instance in configuration.instances:
if any_instance.service_providers:
for each_provider in any_instance.service_providers:
self._declare_egde(any_instance, each_provider)
self._dedent()
self._write("}")
def _write(self, text):
if not self._stream:
raise AssertionError("Cannot write, no stream is defined.")
self._stream.write(" " * self._indentation_level * self._indentation_width)
self._stream.write(text)
self._stream.write("\n")
def _indent(self):
self._indentation_level += 1
def _declare_nodes_options(self):
self._write("node [shape=\"record\","
"style=\"filled\","
"fillcolor=\"white\"];")
def _declare_cluster(self, stack, index):
self._write("subgraph cluster_%d {" % index)
self._indent()
self._declare_container_options(index)
for each_instance in stack:
self._declare_node(each_instance)
for each_instance in stack:
if each_instance.feature_provider:
self._declare_egde(each_instance, each_instance.feature_provider)
self._dedent()
self._write("}")
def _declare_container_options(self, index):
self._write("label=\"container %d\";" % index)
self._write("style=\"filled\";")
self._write("color=\"lightgrey\";")
def _declare_node(self, instance):
if instance.configuration:
options = "\l".join(["%s=%s" % (k.name,v) for k,v in instance.configuration])
self._write(
"%s [label=\"{%s|%s}\"];" % (
self._escape(instance.name),
instance.definition.name,
options))
else:
self._write(
"%s [label=\"%s\"];" % (
self._escape(instance.name),
instance.definition.name))
def _declare_egde(self, source, target):
self._write("%s -> %s;" % (self._escape(source.name),
self._escape(target.name)))
def _dedent(self):
if self._indentation_level == 0:
raise AssertionError("Invalid dedent operation!")
self._indentation_level -= 1
@staticmethod
def _escape(text):
return text.replace("-", "_")
| 27.172414 | 89 | 0.587246 |
ab4538d5d9786d69450483ac7a5faba0bad2cf3a | 3,416 | py | Python | detect_cycle.py | UPstartDeveloper/Problem_Solving_Practice | bd61333b3b056e82a94297e02bc05a17552e3496 | [
"MIT"
] | null | null | null | detect_cycle.py | UPstartDeveloper/Problem_Solving_Practice | bd61333b3b056e82a94297e02bc05a17552e3496 | [
"MIT"
] | null | null | null | detect_cycle.py | UPstartDeveloper/Problem_Solving_Practice | bd61333b3b056e82a94297e02bc05a17552e3496 | [
"MIT"
] | null | null | null | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def detect_cycle(self, head: ListNode):
"""
detect a cycle in a linked list, return the 0-index
position of the repeated node
- -1 if no cycle occurs
are there duplicate elements in the linked list nodes?
Assume:
- there can not be duplicates
- there will be at least one node in the list
Stepping Stone Problems (help anchor my mind in the problem, what we need to do):
- detecting a cycle in a linked list with NO Duplicates -
keep track of items in a Python set(), and an index
- return the index if a duplicate appears, or -1
- detecting a cycle in a linked list with duplicates
- find index of the tail - len(list) = tail_index + 1 - O(n)
- use two pointers - while loop O(n)
- 1 moves faster than the other
- if they overlap at any point, that determines if a cycle is found
- how to track the index?
- return the index of p1 % len(list)
[3,2,0,-4]
p1_index |. p2_index
1 0
3 1
5 2
7 3
9 4
what's the runtime of the while loop?
n = 10,000 --> pretty long
- detecting a cycle in a linked list with duplicates, constant space
"""
# A: calculate the index of the tail
node = head
tail_index = -1
node_items = set()
while node.val not in node_items:
node_items.add(node.val)
node = node.next
tail_index += 1
print(f"Tail index: {tail_index}, set: {node_items}")
LIST_LENGTH = tail_index + 1
# B: init two pointers to traverse the list
faster, slower = head.next, head
faster_index, slower_index = 1, 0
# C: see if the pointers overlap
while faster is not None and faster.next is not None and faster != slower:
# move the pointers up
faster = faster.next.next
slower = slower.next
faster_index += 2
slower_index += 1
print(
f"Faster: {faster_index, faster.val}, Slower: {slower_index, slower.val}"
)
# D: see if a cycle detected
if faster == slower:
return faster_index % LIST_LENGTH
else:
return -1
"""
ll = [3,2,0,-4]
node = None
tail_index = 3
LIST_LENGTH = 4
slower | faster | slower_index | faster_index
3 2 0 1
2 -4 1 3
0 0 2 5
"""
if __name__ == "__main__":
# set up the list
nodes = [
ListNode(3),
ListNode(2),
ListNode(0),
ListNode(-4),
]
nodes[0].next = nodes[1]
nodes[1].next = nodes[2]
nodes[2].next = nodes[3]
nodes[3].next = nodes[1]
# detect the cycle
sol = Solution()
print(sol.detect_cycle(nodes[0]))
| 32.846154 | 91 | 0.492681 |
576216dab94c1680fc81a5ad044ac58da271032c | 12,249 | py | Python | workflow/migrations/0053_auto_20191001_2329.py | mercycorps/toladata | 4d5f9b45905a81af9981b586690e020d5b3bfc60 | [
"Apache-2.0"
] | null | null | null | workflow/migrations/0053_auto_20191001_2329.py | mercycorps/toladata | 4d5f9b45905a81af9981b586690e020d5b3bfc60 | [
"Apache-2.0"
] | 268 | 2020-03-31T15:46:59.000Z | 2022-03-31T18:01:08.000Z | workflow/migrations/0053_auto_20191001_2329.py | Falliatcom-sa/falliatcom | 39fb926de072c296ed32d50cccfb8003ca870739 | [
"Apache-2.0"
] | 1 | 2021-01-05T01:58:24.000Z | 2021-01-05T01:58:24.000Z | # Generated by Django 2.2.5 on 2019-10-02 06:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('workflow', '0052_delete_tolasites'),
]
operations = [
migrations.RemoveField(
model_name='adminlevelthree',
name='district',
),
migrations.RemoveField(
model_name='approvalauthority',
name='approval_user',
),
migrations.RemoveField(
model_name='approvalauthority',
name='country',
),
migrations.RemoveField(
model_name='contact',
name='country',
),
migrations.RemoveField(
model_name='district',
name='province',
),
migrations.DeleteModel(
name='FormGuidance',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='approved_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='checked_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='estimated_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='finance_reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='history_user',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='me_reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='office',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='program',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='project_type',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectagreement',
name='sector',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='approved_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='checked_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='estimated_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='history_user',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='office',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='program',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='project_agreement',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='project_type',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='reviewed_by',
),
migrations.RemoveField(
model_name='historicalprojectcomplete',
name='sector',
),
migrations.RemoveField(
model_name='office',
name='province',
),
migrations.RemoveField(
model_name='projectagreement',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='approved_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='capacity',
),
migrations.RemoveField(
model_name='projectagreement',
name='checked_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='estimated_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='evaluate',
),
migrations.RemoveField(
model_name='projectagreement',
name='finance_reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='me_reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='office',
),
migrations.RemoveField(
model_name='projectagreement',
name='program',
),
migrations.RemoveField(
model_name='projectagreement',
name='project_type',
),
migrations.RemoveField(
model_name='projectagreement',
name='reviewed_by',
),
migrations.RemoveField(
model_name='projectagreement',
name='sector',
),
migrations.RemoveField(
model_name='projectagreement',
name='site',
),
migrations.RemoveField(
model_name='projectagreement',
name='stakeholder',
),
migrations.RemoveField(
model_name='projectcomplete',
name='approval_submitted_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='approved_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='checked_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='estimated_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='office',
),
migrations.RemoveField(
model_name='projectcomplete',
name='program',
),
migrations.RemoveField(
model_name='projectcomplete',
name='project_agreement',
),
migrations.RemoveField(
model_name='projectcomplete',
name='project_type',
),
migrations.RemoveField(
model_name='projectcomplete',
name='reviewed_by',
),
migrations.RemoveField(
model_name='projectcomplete',
name='sector',
),
migrations.RemoveField(
model_name='projectcomplete',
name='site',
),
migrations.RemoveField(
model_name='projectcomplete',
name='stakeholder',
),
migrations.RemoveField(
model_name='province',
name='country',
),
migrations.RemoveField(
model_name='stakeholder',
name='approved_by',
),
migrations.RemoveField(
model_name='stakeholder',
name='contact',
),
migrations.RemoveField(
model_name='stakeholder',
name='country',
),
migrations.RemoveField(
model_name='stakeholder',
name='filled_by',
),
migrations.RemoveField(
model_name='stakeholder',
name='formal_relationship_document',
),
migrations.RemoveField(
model_name='stakeholder',
name='sectors',
),
migrations.RemoveField(
model_name='stakeholder',
name='type',
),
migrations.RemoveField(
model_name='stakeholder',
name='vetting_document',
),
migrations.RemoveField(
model_name='village',
name='admin_3',
),
migrations.RemoveField(
model_name='village',
name='district',
),
migrations.AlterModelOptions(
name='checklist',
options={},
),
migrations.RemoveField(
model_name='benchmarks',
name='agreement',
),
migrations.RemoveField(
model_name='benchmarks',
name='complete',
),
migrations.RemoveField(
model_name='budget',
name='agreement',
),
migrations.RemoveField(
model_name='budget',
name='complete',
),
migrations.RemoveField(
model_name='checklist',
name='agreement',
),
migrations.RemoveField(
model_name='documentation',
name='project',
),
migrations.RemoveField(
model_name='documentation',
name='template',
),
migrations.RemoveField(
model_name='historicalbudget',
name='agreement',
),
migrations.RemoveField(
model_name='historicalbudget',
name='complete',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='admin_level_three',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='district',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='office',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='province',
),
migrations.RemoveField(
model_name='historicalsiteprofile',
name='village',
),
migrations.RemoveField(
model_name='monitor',
name='agreement',
),
migrations.RemoveField(
model_name='monitor',
name='complete',
),
migrations.RemoveField(
model_name='program',
name='fund_code',
),
migrations.RemoveField(
model_name='siteprofile',
name='admin_level_three',
),
migrations.RemoveField(
model_name='siteprofile',
name='district',
),
migrations.RemoveField(
model_name='siteprofile',
name='office',
),
migrations.RemoveField(
model_name='siteprofile',
name='province',
),
migrations.RemoveField(
model_name='siteprofile',
name='village',
),
migrations.DeleteModel(
name='AdminLevelThree',
),
migrations.DeleteModel(
name='ApprovalAuthority',
),
migrations.DeleteModel(
name='Capacity',
),
migrations.DeleteModel(
name='Contact',
),
migrations.DeleteModel(
name='District',
),
migrations.DeleteModel(
name='Evaluate',
),
migrations.DeleteModel(
name='FundCode',
),
migrations.DeleteModel(
name='HistoricalProjectAgreement',
),
migrations.DeleteModel(
name='HistoricalProjectComplete',
),
migrations.DeleteModel(
name='Office',
),
migrations.DeleteModel(
name='ProjectAgreement',
),
migrations.DeleteModel(
name='ProjectComplete',
),
migrations.DeleteModel(
name='ProjectType',
),
migrations.DeleteModel(
name='Province',
),
migrations.DeleteModel(
name='Stakeholder',
),
migrations.DeleteModel(
name='StakeholderType',
),
migrations.DeleteModel(
name='Template',
),
migrations.DeleteModel(
name='Village',
),
]
| 28.419954 | 52 | 0.521675 |
64c6d6095b43ec8de63830ed72f230d3d2c38cc7 | 785 | py | Python | quantum_systems/__init__.py | Benedicte/quantum-systems | 31e6f78dceb03f5d01092f4008fbab38516a0623 | [
"MIT"
] | null | null | null | quantum_systems/__init__.py | Benedicte/quantum-systems | 31e6f78dceb03f5d01092f4008fbab38516a0623 | [
"MIT"
] | null | null | null | quantum_systems/__init__.py | Benedicte/quantum-systems | 31e6f78dceb03f5d01092f4008fbab38516a0623 | [
"MIT"
] | null | null | null | from .system import QuantumSystem
from .general_orbital_system import GeneralOrbitalSystem
from .spatial_orbital_system import SpatialOrbitalSystem
from .quest_system import QuestSystem
from .basis_set import BasisSet
from .custom_system import (
# construct_psi4_system,
# construct_pyscf_system,
construct_pyscf_system_ao,
construct_pyscf_system_rhf,
construct_quest_system,
construct_quest_system_rhf,
)
from .random_basis import RandomBasisSet
from quantum_systems.sinc_dvr.one_dim.sinc_dvr import ODSincDVR
from quantum_systems.quantum_dots.one_dim.one_dim_qd import ODQD
from quantum_systems.quantum_dots.two_dim.two_dim_ho import (
TwoDimensionalHarmonicOscillator,
TwoDimensionalDoubleWell,
TwoDimSmoothDoubleWell,
TwoDimHarmonicOscB,
)
| 34.130435 | 64 | 0.83949 |
fee1fcaa33925dc39c10ce02a49691251c095af2 | 3,817 | py | Python | timetrackerctl/rofi/main.py | theCapypara/timetrackerctl | 791954bfb57d400b7e99ff49ce6ef3313646f51b | [
"MIT"
] | null | null | null | timetrackerctl/rofi/main.py | theCapypara/timetrackerctl | 791954bfb57d400b7e99ff49ce6ef3313646f51b | [
"MIT"
] | null | null | null | timetrackerctl/rofi/main.py | theCapypara/timetrackerctl | 791954bfb57d400b7e99ff49ce6ef3313646f51b | [
"MIT"
] | null | null | null | import sys
import traceback
from datetime import datetime
from typing import Callable, Tuple, List, Dict, Optional
from rofi import Rofi
from zenipy import zenipy
from timetrackerctl.config import Config
from timetrackerctl.mngmnt.jira_tempo_manager import JiraTempoManager
from timetrackerctl.mngmnt.ticket_manager import TicketManager
from timetrackerctl.model.ticket import Ticket
from timetrackerctl.storage import Storage
from timetrackerctl.util.timeutil import format_timeframe
r = Rofi(rofi_args=['-i', '-columns', '1'])
SEP1 = '------'
SEP2 = '------ '
SEPS = [SEP1, SEP2]
BACK = '< Back'
def start_quick(tm: TicketManager):
options = {}
for t in tm.quick_list():
if t.msg:
options[f'{t.key} - {t.title}: {t.msg} [#{t.source.name}]'] = t
else:
options[f'{t.key} - {t.title} [#{t.source.name}]'] = t
options[SEP2] = None
options[BACK] = None
selected = r.text_entry('Select', options=options.keys())
_try_quick_start(tm, options, selected)
def start_saved(tm: TicketManager):
options = {}
for saved in tm.saved():
options[f'{saved.msg}: {saved.key} - {saved.title} [#S]'] = saved
options[SEP1] = None
for recent in tm.recent():
options[f'{recent.msg}: {recent.key} - {recent.title} [#R]'] = recent
options[SEP2] = None
options[BACK] = None
selected = r.text_entry('Select', options=options.keys())
_try_quick_start(tm, options, selected)
def start_search(tm: TicketManager, searched: str = None):
search_result = []
options = {}
if searched is not None:
search_result = tm.search(searched)
if len(search_result) == 1:
return _start_concrete(tm, search_result[0])
for res in search_result:
options[f'{res.key} - {res.title}'] = res
options[SEP2] = None
options[BACK] = None
message = 'Ctrl+Enter: Search for current input'
if searched:
message = f'> {searched}\n' + message
selected = r.text_entry('Search', message=message, options=options.keys())
_try_quick_start(tm, options, selected)
def _try_quick_start(tm: TicketManager, options: Dict[str, Optional[Ticket]], selected: str):
if selected is None or selected in SEPS:
return
if selected == BACK:
return run()
if selected in options:
assert isinstance(options[selected], Ticket)
return _start_concrete(tm, options[selected])
return start_search(tm, selected)
def _start_concrete(tm: TicketManager, t: Ticket):
raise NotImplementedError()
def dismiss(tm: TicketManager):
raise NotImplementedError()
def finish(tm: TicketManager):
raise NotImplementedError()
def track(tm: TicketManager):
raise NotImplementedError()
def open(tm: TicketManager):
JiraTempoManager(Config()).show_tempo()
ACTION_MAP: List[Tuple[str, Callable[[TicketManager], None]]] = [
('Start: Quick', start_quick),
('Start: Recent & Saved Tasks', start_saved),
('Start: Full Search', start_search),
('Dismiss', dismiss),
('Finish', finish),
('Track Manually', track),
('Open Tempo', open)
]
def run():
tm = TicketManager(Config(), Storage())
message = None
if tm.open():
t = tm.current()
message = f'Current ({format_timeframe(datetime.now() - t.start)}): {t.key} - {t.title}'
if t.msg:
message += f': {t.msg}'
index, key = r.select('timetrackerctl >', [a[0] for a in ACTION_MAP], message=message)
if key != 0:
return
ACTION_MAP[index][1](tm)
def handle_inquiry(tm: TicketManager):
raise NotImplementedError()
if __name__ == '__main__':
try:
run()
except:
zenipy.error(title='Error!', text=''.join(traceback.format_exception(*sys.exc_info())).replace('<', '(').replace('>', ')'))
| 28.916667 | 131 | 0.651821 |
270f765caebd02ac86f679424d70feb7c1206c1b | 5,825 | py | Python | pandas/tests/util/test_assert_index_equal.py | LauraCollard/pandas | b1c3a9031569334cafc4e8d45d35408421f7dea4 | [
"BSD-3-Clause"
] | 5 | 2019-07-26T15:22:41.000Z | 2021-09-28T09:22:17.000Z | pandas/tests/util/test_assert_index_equal.py | LauraCollard/pandas | b1c3a9031569334cafc4e8d45d35408421f7dea4 | [
"BSD-3-Clause"
] | 16 | 2021-03-19T09:44:52.000Z | 2022-03-12T00:22:14.000Z | pandas/tests/util/test_assert_index_equal.py | LauraCollard/pandas | b1c3a9031569334cafc4e8d45d35408421f7dea4 | [
"BSD-3-Clause"
] | 9 | 2020-02-05T10:24:12.000Z | 2020-02-10T13:08:50.000Z | import numpy as np
import pytest
from pandas import Categorical, Index, MultiIndex, NaT
from pandas.util.testing import assert_index_equal
def test_index_equal_levels_mismatch():
msg = """Index are different
Index levels are different
\\[left\\]: 1, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\)
\\[right\\]: 2, MultiIndex\\(\\[\\('A', 1\\),
\\('A', 2\\),
\\('B', 3\\),
\\('B', 4\\)\\],
\\)"""
idx1 = Index([1, 2, 3])
idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)])
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, exact=False)
def test_index_equal_values_mismatch(check_exact):
msg = """MultiIndex level \\[1\\] are different
MultiIndex level \\[1\\] values are different \\(25\\.0 %\\)
\\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\)
\\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)"""
idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)])
idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)])
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, check_exact=check_exact)
def test_index_equal_length_mismatch(check_exact):
msg = """Index are different
Index length are different
\\[left\\]: 3, Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\)
\\[right\\]: 4, Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)"""
idx1 = Index([1, 2, 3])
idx2 = Index([1, 2, 3, 4])
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, check_exact=check_exact)
def test_index_equal_class_mismatch(check_exact):
msg = """Index are different
Index classes are different
\\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\)
\\[right\\]: Float64Index\\(\\[1\\.0, 2\\.0, 3\\.0\\], dtype='float64'\\)"""
idx1 = Index([1, 2, 3])
idx2 = Index([1, 2, 3.0])
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, exact=True, check_exact=check_exact)
def test_index_equal_values_close(check_exact):
idx1 = Index([1, 2, 3.0])
idx2 = Index([1, 2, 3.0000000001])
if check_exact:
msg = """Index are different
Index values are different \\(33\\.33333 %\\)
\\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\)
\\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0000000001\\], dtype='float64'\\)"""
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, check_exact=check_exact)
else:
assert_index_equal(idx1, idx2, check_exact=check_exact)
def test_index_equal_values_less_close(check_exact, check_less_precise):
idx1 = Index([1, 2, 3.0])
idx2 = Index([1, 2, 3.0001])
kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise)
if check_exact or not check_less_precise:
msg = """Index are different
Index values are different \\(33\\.33333 %\\)
\\[left\\]: Float64Index\\(\\[1.0, 2.0, 3.0], dtype='float64'\\)
\\[right\\]: Float64Index\\(\\[1.0, 2.0, 3.0001\\], dtype='float64'\\)"""
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, **kwargs)
else:
assert_index_equal(idx1, idx2, **kwargs)
def test_index_equal_values_too_far(check_exact, check_less_precise):
idx1 = Index([1, 2, 3])
idx2 = Index([1, 2, 4])
kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise)
msg = """Index are different
Index values are different \\(33\\.33333 %\\)
\\[left\\]: Int64Index\\(\\[1, 2, 3\\], dtype='int64'\\)
\\[right\\]: Int64Index\\(\\[1, 2, 4\\], dtype='int64'\\)"""
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, **kwargs)
def test_index_equal_level_values_mismatch(check_exact, check_less_precise):
idx1 = MultiIndex.from_tuples([("A", 2), ("A", 2), ("B", 3), ("B", 4)])
idx2 = MultiIndex.from_tuples([("A", 1), ("A", 2), ("B", 3), ("B", 4)])
kwargs = dict(check_exact=check_exact, check_less_precise=check_less_precise)
msg = """MultiIndex level \\[1\\] are different
MultiIndex level \\[1\\] values are different \\(25\\.0 %\\)
\\[left\\]: Int64Index\\(\\[2, 2, 3, 4\\], dtype='int64'\\)
\\[right\\]: Int64Index\\(\\[1, 2, 3, 4\\], dtype='int64'\\)"""
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, **kwargs)
@pytest.mark.parametrize(
"name1,name2",
[(None, "x"), ("x", "x"), (np.nan, np.nan), (NaT, NaT), (np.nan, NaT)],
)
def test_index_equal_names(name1, name2):
msg = """Index are different
Attribute "names" are different
\\[left\\]: \\[{name1}\\]
\\[right\\]: \\[{name2}\\]"""
idx1 = Index([1, 2, 3], name=name1)
idx2 = Index([1, 2, 3], name=name2)
if name1 == name2 or name1 is name2:
assert_index_equal(idx1, idx2)
else:
name1 = "'x'" if name1 == "x" else name1
name2 = "'x'" if name2 == "x" else name2
msg = msg.format(name1=name1, name2=name2)
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2)
def test_index_equal_category_mismatch(check_categorical):
msg = """Index are different
Attribute "dtype" are different
\\[left\\]: CategoricalDtype\\(categories=\\['a', 'b'\\], ordered=False\\)
\\[right\\]: CategoricalDtype\\(categories=\\['a', 'b', 'c'\\], \
ordered=False\\)"""
idx1 = Index(Categorical(["a", "b"]))
idx2 = Index(Categorical(["a", "b"], categories=["a", "b", "c"]))
if check_categorical:
with pytest.raises(AssertionError, match=msg):
assert_index_equal(idx1, idx2, check_categorical=check_categorical)
else:
assert_index_equal(idx1, idx2, check_categorical=check_categorical)
| 33.477011 | 81 | 0.61133 |
1ba6225c934a1dd0dba2505f2d9e1fd539e8a752 | 4,020 | py | Python | sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py | lmcarreiro/azure-sdk-for-python | 0bde943383725320eaaa1408fa6264fb0cd0febf | [
"MIT"
] | null | null | null | sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py | lmcarreiro/azure-sdk-for-python | 0bde943383725320eaaa1408fa6264fb0cd0febf | [
"MIT"
] | null | null | null | sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py | lmcarreiro/azure-sdk-for-python | 0bde943383725320eaaa1408fa6264fb0cd0febf | [
"MIT"
] | null | null | null | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import asyncio
import os
class QueueServiceSamplesAsync(object):
connection_string = os.getenv("CONNECTION_STRING")
async def queue_service_properties(self):
# Instantiate the QueueServiceClient from a connection string
from azure.storage.queue.aio import QueueServiceClient
queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string)
# [START async_set_queue_service_properties]
# Create service properties
from azure.storage.queue import QueueAnalyticsLogging, Metrics, CorsRule, RetentionPolicy
# Create logging settings
logging = QueueAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Create metrics for requests statistics
hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
minute_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Create CORS rules
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers
)
cors = [cors_rule1, cors_rule2]
# Set the service properties
await queue_service.set_service_properties(logging, hour_metrics, minute_metrics, cors)
# [END async_set_queue_service_properties]
# [START async_get_queue_service_properties]
properties = await queue_service.get_service_properties()
# [END async_get_queue_service_properties]
async def queues_in_account(self):
# Instantiate the QueueServiceClient from a connection string
from azure.storage.queue.aio import QueueServiceClient
queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string)
# [START async_qsc_create_queue]
await queue_service.create_queue("my_queue")
# [END async_qsc_create_queue]
try:
# [START async_qsc_list_queues]
# List all the queues in the service
list_queues = queue_service.list_queues()
async for queue in list_queues:
print(queue)
# List the queues in the service that start with the name "my_"
list_my_queues = queue_service.list_queues(name_starts_with="my_")
async for queue in list_my_queues:
print(queue)
# [END async_qsc_list_queues]
finally:
# [START async_qsc_delete_queue]
await queue_service.delete_queue("my_queue")
# [END async_qsc_delete_queue]
async def get_queue_client(self):
# Instantiate the QueueServiceClient from a connection string
from azure.storage.queue.aio import QueueServiceClient, QueueClient
queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string)
# [START async_get_queue_client]
# Get the queue client to interact with a specific queue
queue = queue_service.get_queue_client(queue_name="my_queue")
# [END async_get_queue_client]
| 43.225806 | 131 | 0.66592 |
f4258ed993dbe7517e7a2e4f48c930511c2d3023 | 2,772 | py | Python | data processing/convertToJson.py | xiameng552180/SeqDynamics_V0 | 9a8e2f54aa09d56e74f5bb649e3e05a18a508d77 | [
"MIT"
] | null | null | null | data processing/convertToJson.py | xiameng552180/SeqDynamics_V0 | 9a8e2f54aa09d56e74f5bb649e3e05a18a508d77 | [
"MIT"
] | null | null | null | data processing/convertToJson.py | xiameng552180/SeqDynamics_V0 | 9a8e2f54aa09d56e74f5bb649e3e05a18a508d77 | [
"MIT"
] | null | null | null | import os
def read_file_as_str(file_path):
if not os.path.isfile(file_path):
raise TypeError(file_path + " does not exist")
content = open(file_path).read()
return content
if __name__ == "__main__":
# content = read_file_as_str("rank.json")
# contentSplit = content.split("}{")
# result = "{\"data\":["
# for i in range(0, len(contentSplit)-1):
# result += contentSplit[i] + "},{"
# result += contentSplit[len(contentSplit)-1]
# result += "]}"
# f = open("finalrank.json", "w")
content0 = read_file_as_str("data10Month.json")
content1 = read_file_as_str("finalrank.json")
result = "{\"0\":" + content0 +", \"1\":" + content1 + "}"
f = open("data11.json", "w")
print(result, file = f)
| 86.625 | 2,052 | 0.164141 |
52f0e32b43a57bc86b63f6361b89c9ee00074271 | 542 | py | Python | python/idol/transformrules.py | epkaz93/idol | f378b7131bf2795d548382564158aec3268aa13e | [
"MIT"
] | null | null | null | python/idol/transformrules.py | epkaz93/idol | f378b7131bf2795d548382564158aec3268aa13e | [
"MIT"
] | null | null | null | python/idol/transformrules.py | epkaz93/idol | f378b7131bf2795d548382564158aec3268aa13e | [
"MIT"
] | null | null | null | from __future__ import annotations
import six
import abc
from idol.colour import Colour
import typing
if typing.TYPE_CHECKING:
from idol import Icon
@six.add_metaclass(abc.ABCMeta)
class TransformRuleBase(object):
@abc.abstractmethod
def execute(self, icon: Icon) -> Icon:
raise NotImplementedError()
class ColouriseTransformRule(TransformRuleBase):
def __init__(self, colour: Colour):
self._colour = colour
def execute(self, icon: Icon) -> Icon:
return icon.coloured_icon(self._colour)
| 18.689655 | 48 | 0.728782 |
2db56b406a84982b442cc056b5376cb27b355d57 | 2,532 | py | Python | insights/util/canonical_facts.py | chrismeyersfsu/insights-core | cb5c1ec41f9c783af72cbca855b457851e57f4aa | [
"Apache-2.0"
] | null | null | null | insights/util/canonical_facts.py | chrismeyersfsu/insights-core | cb5c1ec41f9c783af72cbca855b457851e57f4aa | [
"Apache-2.0"
] | null | null | null | insights/util/canonical_facts.py | chrismeyersfsu/insights-core | cb5c1ec41f9c783af72cbca855b457851e57f4aa | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
from insights import rule, make_metadata, run
from insights.specs import Specs
from insights.core import Parser
from insights.core.plugins import parser
@parser(Specs.ip_addresses)
class IPs(Parser):
"""
Reads the output of hostname -I and constructs a list of all assigned IP
addresses.
Example output::
192.168.1.71 10.88.0.1 172.17.0.1 172.18.0.1 10.10.121.131 2600:1700:720:7e30:e4ef:e9d0:7ea1:c8a7
Resultant data structure::
[
"192.168.1.71",
"10.88.0.1",
"172.17.0.1",
"172.18.0.1",
"10.10.121.131",
"2600:1700:720:7e30:e4ef:e9d0:7ea1:c8a7"
]
"""
def parse_content(self, content):
self.data = content[0].rstrip().split()
@parser(Specs.subscription_manager_id)
class SubscriptionManagerID(Parser):
"""
Reads the output of subscription-manager identity and retrieves the UUID
Example output::
system identity: 6655c27c-f561-4c99-a23f-f53e5a1ef311
name: rhel7.localdomain
org name: 1234567
org ID: 1234567
Resultant data::
6655c27c-f561-4c99-a23f-f53e5a1ef311
"""
def parse_content(self, content):
self.data = content[0].split(":")[-1].strip()
def _safe_parse(ds):
try:
return ds.content[0]
except Exception:
return None
def _filter_falsy(dict_):
return dict((k, v) for k, v in dict_.items() if v)
@rule(
optional=[
Specs.machine_id,
Specs.etc_machine_id,
Specs.bios_uuid,
SubscriptionManagerID,
IPs,
Specs.hostname,
Specs.mac_addresses,
]
)
def canonical_facts(
insights_id, machine_id, bios_uuid, submanid, ips, fqdn, mac_addresses
):
facts = dict(
insights_id=_safe_parse(insights_id),
machine_id=_safe_parse(machine_id),
bios_uuid=_safe_parse(bios_uuid),
subscription_manager_id=submanid.data if submanid else None,
ip_addresses=ips.data if ips else [],
mac_addresses=list(
filter(None, (_safe_parse(c) for c in mac_addresses))
) if mac_addresses else [],
fqdn=_safe_parse(fqdn),
)
return make_metadata(**_filter_falsy(facts))
def get_canonical_facts(path=None):
br = run(canonical_facts, root=path)
d = br[canonical_facts]
del d["type"]
return d
if __name__ == "__main__":
import json
print(json.dumps(get_canonical_facts()))
| 23.444444 | 104 | 0.637836 |
77f2111f5ff173ed9511e818fe62f6e75c5ef2ee | 11,324 | py | Python | plugins/modules/kibana_alert.py | exp-hc/ansible-collection-elastic | 668f6e7be368e0307df0514c999f330be0ead070 | [
"Apache-2.0"
] | null | null | null | plugins/modules/kibana_alert.py | exp-hc/ansible-collection-elastic | 668f6e7be368e0307df0514c999f330be0ead070 | [
"Apache-2.0"
] | null | null | null | plugins/modules/kibana_alert.py | exp-hc/ansible-collection-elastic | 668f6e7be368e0307df0514c999f330be0ead070 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright 2021 Expedient
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
#from plugins.modules.ece_cluster import DOCUMENTATION
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: kibana_alert
short_description: Create or delete alerts in Kibana
version_added: 2.11.1
author: Mike Garuccio (@mgaruccio)
requirements:
- python3
description:
- "This module creates or deletes alerts in kibana"
- "currently supports threshold alerts"
options:
state:
description:
- setting whether alert should be created or deleted
choices: ['present', 'absent']
default: present
type: str
alert_name:
description:
- name of the alert to create
required: True
type: str
enabled:
description:
- whether to enable the alert when creating
default: True
type: bool
alert_type:
description:
- type of alert to create
choices:
- metrics_threshold
tags:
description:
- metadata tags to attach to the alert
type: str
check_every:
description:
- frequency to check the alert on
default: 1m
type: str
notify_on:
description:
- when to send the alert
default: status_change
choices:
- status_change
type: str
conditions:
description:
- dictionary defining which conditions to alert on
- only used for metrics threshold alerts.
- see examples for details
type: dict
filter:
description:
- kql filter to apply to the conditions
type: str
filter_query:
description:
- lucence query to apply to the conditions
- at this time both this and "filter" are required for proper functioning of the module
- easiest way to get this is to do a kibana_alert_facts on an existing alert with the correct config
- alternatively can view the request in the discover tab of kibana
alert_on_no_data:
description:
whether to alert if there is no data available in the check period
type: bool
group_by:
description:
- defines the "alert for every" field in the Kibana alert
- generally the sensible default is host.name
default: host.name
type: str
actions:
description:
- actions to run when alert conditions are triggered
type: dict
consumer:
description:
- name of the application that owns the alert
default: alerts
type: str
extends_documentation_fragment:
- expedient.elastic.elastic_auth_options.documentation
'''
try:
from ansible_collections.expedient.elastic.plugins.module_utils.kibana import Kibana
except:
import sys
import os
util_path = new_path = f'{os.getcwd()}/plugins/module_utils'
sys.path.append(util_path)
from kibana import Kibana
from ansible.module_utils.basic import AnsibleModule
from json import dumps
time_unit_lookup = {
'second': 's',
'seconds': 's',
'minute': 'm',
'minutes': 'm',
'hour': 'h',
'hours': 'h',
'day': 'd',
'days': 'd',
}
alert_type_lookup = {
'metrics_threshold': 'metrics.alert.threshold'
}
action_type_lookup = {
'email': '.email',
'index': '.index',
'webhook': '.webhook'
}
# Need to get warning thresholds added here too
action_group_lookup = {
'alert': 'metrics.threshold.fired',
'recovered': 'metrics.threshold.recovered'
}
action_param_type_lookup = {
'index': 'documents',
'webhook': 'body'
}
state_lookup = {
'above': '>',
'below': '<'
}
notify_lookup = {
'status_change': 'onActionGroupChange'
}
class KibanaAlert(Kibana):
def __init__(self, module):
super().__init__(module)
self.module = module
self.alert_name = self.module.params.get('alert_name')
self.alert_type = self.module.params.get('alert_type')
self.enabled = self.module.params.get('enabled')
self.tags = self.module.params.get('tags')
## split the 'check_every' parameter into pieces as needed by elastic API
self.check_every = self.module.params.get('check_every')
self.notify_when = self.module.params.get('notify_on')
self.group_by = self.module.params.get('group_by')
self.alert_on_no_data = self.module.params.get('alert_on_no_data')
self.consumer = self.module.params.get('consumer')
self.filter = self.module.params.get('filter')
self.filter_query = self.module.params.get('filter_query')
self.alert = self.get_alert_by_name(self.alert_name)
def split_time_string(self, time_string):
tail = time_string.lstrip('0123456789')
head = time_string[:len(time_string) - len(tail)]
return head, tail
# This will defnitely need changes as we expand out functionality of the alert module, currently really only works with metrcis thresholds
def format_conditions(self):
conditions = self.module.params.get('conditions')
formatted_conditions = []
if self.alert_type == 'metrics_threshold':
for condition in conditions:
formatted_condition = {
'aggType': condition['when'],
'comparator': state_lookup[condition['state']],
'threshold': [condition['threshold']] if condition['threshold'] != 0.0 else [int(condition['threshold'])],
'timeSize': condition['time_period'],
'timeUnit': time_unit_lookup[condition['time_unit']],
}
if condition['field'] is not None:
formatted_condition['metric'] = condition['field']
formatted_conditions.append(formatted_condition)
return formatted_conditions
def format_actions(self):
actions = self.module.params.get('actions')
formatted_actions = [{
'actionTypeId': action_type_lookup[action['action_type']],
'group': action_group_lookup[action['run_when']],
'params': {
action_param_type_lookup[action['action_type']]: [action['body']] if action['body'] else dumps(action['body_json'], indent=2)
},
'id': self.get_alert_connector_by_name(action['connector'])['id'] ## need to actually implement this
} for action in actions]
return formatted_actions
def create_alert(self):
endpoint = 'alerts/alert'
criteria = self.format_conditions()
data = {
'notifyWhen': notify_lookup[self.notify_when],
'params': {
'criteria': criteria,
'alertOnNoData': self.alert_on_no_data,
'sourceId': 'default' #entirely unclear what this does but it appears to be a static value so hard-coding for now
},
'consumer': self.consumer,
'alertTypeId': alert_type_lookup[self.alert_type],
'schedule': {
'interval': self.check_every
},
'actions': self.format_actions(),
'tags': self.tags,
'name': self.alert_name,
'enabled': self.enabled
}
if self.filter:
data['params']['filterQueryText'] = self.filter
data['params']['filterQuery'] = self.filter_query
if self.group_by:
data['params']['groupBy'] = self.group_by
result = self.send_api_request(endpoint, 'POST', data=data)
return result
def delete_alert(self):
endpoint = f'alerts/alert/{self.alert["id"]}'
return self.send_api_request(endpoint, 'DELETE')
def main():
module_args=dict(
host=dict(type='str', required=True),
port=dict(type='int', default=9243),
username=dict(type='str', required=True),
password=dict(type='str', required=True, no_log=True),
verify_ssl_cert=dict(type='bool', default=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
alert_name=dict(type='str', required=True),
enabled=dict(type='bool', default=True),
alert_type=dict(type='str', choices=['metrics_threshold']), #more types will be added as we gain the ability to support them
tags=dict(type='list', elements='str', default=[]),
check_every=dict(type='str', default='1m'),
notify_on=dict(type='str', default='status_change', choices=['status_change']),
conditions=dict(type='list', elements='dict', options=dict(
when=dict(type='str', required=True, choices=['max', 'min', 'avg', 'cardnality', 'rate', 'count', 'sum', '95th_percentile', '99th_percentile']),
field=dict(type='str', required=False),
state=dict(type='str', required=True),
threshold=dict(type='float', required=True),
warning_threshold=dict(type='float', required=False), # placeholder not currently implemented
time_period=dict(type='int', default=5),
time_unit=dict(type='str', default='minute', choices=['second', 'seconds', 'minute', 'minutes', 'hour', 'hours', 'day', 'days']),
)),
filter=dict(type='str'),
filter_query=dict(type='str'),
alert_on_no_data=dict(type='bool', default=False),
group_by=dict(type='list', elements='str', required=False),
actions=dict(type='list', elements='dict', options=dict(
action_type=dict(type='str', required=True, choices=['email', 'index', 'webhook']), #Only supporting these types for now, if we need more options later we can deal with them as-needed
run_when=dict(type='str', default='alert', choices=['alert', 'warning', 'recovered']),
connector=dict(type='str', required=True),
body=dict(type='str', required=False),
body_json=dict(type='dict', required=False)
)),
consumer=dict(type='str', default='alerts'), ## This seems to always be the default value at this time, just future-proofing
)
# https://docs.ansible.com/ansible/latest/dev_guide/developing_program_flow_modules.html#argument-spec-dependencies
argument_dependencies = [
('state', 'present', ('enabled', 'alert_type', 'conditions', 'actions')),
('alert-type', 'metrics_threshold', ('conditions'))
]
results = {'changed': False}
module = AnsibleModule(argument_spec=module_args, required_if=argument_dependencies, supports_check_mode=True)
state = module.params.get('state')
kibana_alert = KibanaAlert(module)
if state == 'present':
if kibana_alert.alert:
results['msg'] = f'alert named {kibana_alert.alert_name} exists'
module.exit_json(**results)
results['changed'] = True
results['msg'] = f'alert named {module.params.get("alert_name")} will be created'
if not module.check_mode:
results['alert'] = kibana_alert.create_alert()
results['msg'] = f'alert named {module.params.get("alert_name")} created'
module.exit_json(**results)
if state == 'absent':
if not kibana_alert.alert:
results['msg'] = f'alert named {kibana_alert.alert_name} does not exist'
module.exit_json(**results)
results['changed'] = True
results['msg'] = f'alert named {module.params.get("alert_name")} will be deleted'
if not module.check_mode:
kibana_alert.delete_alert()
module.exit_json(**results)
if __name__ == '__main__':
main() | 33.702381 | 189 | 0.682268 |
7dcee4add9d372dba77b8cacbf1af77b36109a14 | 403 | py | Python | template/pc/temp.py | mob5566/dotfiles | 4ebc6aa1b150640fb064f8826f50c0baba322506 | [
"MIT"
] | null | null | null | template/pc/temp.py | mob5566/dotfiles | 4ebc6aa1b150640fb064f8826f50c0baba322506 | [
"MIT"
] | null | null | null | template/pc/temp.py | mob5566/dotfiles | 4ebc6aa1b150640fb064f8826f50c0baba322506 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
' Author: Cheng-Shih Wong
' Email: mob5566@gmail.com
' Date:
'''
def main():
pass
if __name__ == '__main__':
import sys, os
from time import time
if len(sys.argv)>1 and os.path.exists(sys.argv[1]):
sys.stdin = open(sys.argv[1], 'rb')
st = time()
main()
print('----- Run {:.6f} seconds. -----'.format(time()-st), file=sys.stderr)
| 21.210526 | 79 | 0.560794 |
85c8ef2bdd7eb83d596ee6c682be5f0479449e29 | 1,255 | py | Python | pjbbs/config.py | li765416060/li | 3a158218a2a0892577bde29f75f70cce6af11be5 | [
"Apache-2.0"
] | 1 | 2019-03-07T02:17:27.000Z | 2019-03-07T02:17:27.000Z | pjbbs/config.py | li765416060/li | 3a158218a2a0892577bde29f75f70cce6af11be5 | [
"Apache-2.0"
] | null | null | null | pjbbs/config.py | li765416060/li | 3a158218a2a0892577bde29f75f70cce6af11be5 | [
"Apache-2.0"
] | null | null | null | # 打开debug模式之后修改项目python代码不用重复启动
# 但是上线的时候必须要关掉
DEBUG=True
# 打开之后,模板修改了也不需要重启服务器
TEMPLATES_AUTO_RELOAD=True
# 数据库的配置项
# 数据库连接
DB_USERNAME='root'
DB_PASSWORD="root"
DB_HOST="127.0.0.1"
DB_PORT="3306"
DB_NAME="bbs"
DB_URL="mysql+pymysql://%s:%s@%s:%s/%s?charset=utf8" % (DB_USERNAME,DB_PASSWORD,DB_HOST,DB_PORT,DB_NAME)
SQLALCHEMY_DATABASE_URI=DB_URL
SQLALCHEMY_COMMIT_ON_TEARDOWN=False # 设置是否在每次连接结束后自动提交数据库中的变动
SQLALCHEMY_POOL_SIZE = 10 # 数据库连接池的大小。默认是数据库引擎的默认值 (通常是 5)。
SQLALCHEMY_MAX_OVERFLOW = 5 # 控制在连接池达到最大值后可以创建的连接数。当这些额外的连接使用后回收到连接池后将会被断开和抛弃。保证连接池只有设置的大小;
SQLALCHEMY_POOL_TIMEOUT = 10 # 指定数据库连接池的超时时间。默认是 10。
# 下面两项调试阶段启动,部署时关闭
SQLALCHEMY_TRACK_MODIFICATIONS=False #如果设置成 True (默认情况),Flask-SQLAlchemy 将会追踪对象的修改并且发送信号。这需要额外的内存,如果不必要的可以禁用它。
SQLALCHEMY_ECHO=True #如果设置成 True,SQLAlchemy 将会记录所有发到标准输出(stderr)的语句,这对调试很有帮助;默认为false;
# session
SECRET_KEY='ada'
REMBERME = 'remberme'
LOGIN = 'login'
CURRENT_USER_ID='user_id'
CURRENT_USER = "current_user"
# flask-mail
MAIL_SERVER = 'smtp.qq.com'
MAIL_PROT = 587
MAIL_USE_TLS = True
MAIL_USE_SSL = False
MAIL_USERNAME = "765416060@qq.com"
MAIL_PASSWORD = "faulcvhlthtobefa" # 不是登录
MAIL_DEFAULT_SENDER='765416060@qq.com' # 默认的发件人
#MAIL_USE_TLS 端口号 587
#MAIL_USE_SSL 端口号 467
# QQ邮箱不支持非加密方式发送邮件
| 24.134615 | 111 | 0.803187 |
dd092dd50a08d7fa738e659fa6962c2bde76d8eb | 5,353 | py | Python | hydroadjust/sampling.py | NiclasHjortkjaer/dhm-hydro-adjust | 30bb58a21c9d5a6d1f342e513a4a50ba24c6aa6a | [
"MIT"
] | null | null | null | hydroadjust/sampling.py | NiclasHjortkjaer/dhm-hydro-adjust | 30bb58a21c9d5a6d1f342e513a4a50ba24c6aa6a | [
"MIT"
] | null | null | null | hydroadjust/sampling.py | NiclasHjortkjaer/dhm-hydro-adjust | 30bb58a21c9d5a6d1f342e513a4a50ba24c6aa6a | [
"MIT"
] | 2 | 2022-02-18T12:58:49.000Z | 2022-02-18T13:18:39.000Z | from osgeo import gdal, ogr
from scipy.interpolate import RegularGridInterpolator
import numpy as np
from collections import namedtuple
# The ordering of window X and Y bounds is a mess in GDAL (compare e.g.
# gdal.Translate() and gdal.Warp()). Using this little structure and
# addressing the members by name should help limit confusion.
BoundingBox = namedtuple(
'BoundingBox',
['x_min', 'x_max', 'y_min', 'y_max'],
)
def get_raster_window(dataset, bbox):
"""
Return a window of the input raster dataset, containing at least the
provided bounding box.
:param dataset: Source raster dataset
:type dataset: GDAL Dataset object
:param bbox: Window bound coordinates
:type bbox: hydroadjust.sampling.BoundingBox object
:returns: GDAL Dataset object for the requested window
"""
input_geotransform = dataset.GetGeoTransform()
if input_geotransform[2] != 0.0 or input_geotransform[4] != 0.0:
raise ValueError("geotransforms with rotation are unsupported")
input_offset_x = input_geotransform[0]
input_offset_y = input_geotransform[3]
input_pixelsize_x = input_geotransform[1]
input_pixelsize_y = input_geotransform[5]
# We want to find window coordinates that:
# a) are aligned to the source raster pixels
# b) contain the requested bounding box plus at least one pixel of "padding" on each side, to allow for small floating-point rounding errors in X/Y coordinates
#
# Recall that the pixel size in the geotransform is commonly negative, hence all the min/max calls.
raw_x_min_col_float = (bbox.x_min - input_offset_x) / input_pixelsize_x
raw_x_max_col_float = (bbox.x_max - input_offset_x) / input_pixelsize_x
raw_y_min_row_float = (bbox.y_min - input_offset_y) / input_pixelsize_y
raw_y_max_row_float = (bbox.y_max - input_offset_y) / input_pixelsize_y
col_min = int(np.floor(min(raw_x_min_col_float, raw_x_max_col_float))) - 1
col_max = int(np.ceil(max(raw_x_min_col_float, raw_x_max_col_float))) + 1
row_min = int(np.floor(min(raw_y_min_row_float, raw_y_max_row_float))) - 1
row_max = int(np.ceil(max(raw_y_min_row_float, raw_y_max_row_float))) + 1
x_col_min = input_offset_x + input_pixelsize_x * col_min
x_col_max = input_offset_x + input_pixelsize_x * col_max
y_row_min = input_offset_y + input_pixelsize_y * row_min
y_row_max = input_offset_y + input_pixelsize_y * row_max
# Padded, georeferenced window coordinates. The target window to use with gdal.Translate().
padded_bbox = BoundingBox(
x_min=min(x_col_min, x_col_max),
x_max=max(x_col_min, x_col_max),
y_min=min(y_row_min, y_row_max),
y_max=max(y_row_min, y_row_max),
)
# Size in pixels of destination raster
dest_num_cols = col_max - col_min
dest_num_rows = row_max - row_min
translate_options = gdal.TranslateOptions(
width=dest_num_cols,
height=dest_num_rows,
projWin=(padded_bbox.x_min, padded_bbox.y_max, padded_bbox.x_max, padded_bbox.y_min),
resampleAlg=gdal.GRA_NearestNeighbour,
)
# gdal.Translate() needs a destination *name*, not just a Dataset to
# write into. Create a temporary file in GDAL's virtual filesystem as a
# stepping stone.
window_dataset_name = "/vsimem/temp_window.tif"
window_dataset = gdal.Translate(
window_dataset_name,
dataset,
options=translate_options
)
return window_dataset
def get_raster_interpolator(dataset):
"""
Return a scipy.interpolate.RegularGridInterpolator corresponding to a GDAL
raster.
:param dataset: Raster dataset in which to interpolate
:type dataset: GDAL Dataset object
:returns: RegularGridInterpolator accepting georeferenced X and Y input
"""
geotransform = dataset.GetGeoTransform()
band = dataset.GetRasterBand(1)
nodata_value = band.GetNoDataValue()
z_grid = band.ReadAsArray()
num_rows, num_cols = z_grid.shape
if geotransform[2] != 0.0 or geotransform[4] != 0.0:
raise ValueError("geotransforms with rotation are unsupported")
# X and Y values for the individual columns/rows of the raster. The 0.5 is
# added in order to obtain the coordinates of the cell centers rather than
# the corners.
x_values = geotransform[0] + geotransform[1]*(0.5+np.arange(num_cols))
y_values = geotransform[3] + geotransform[5]*(0.5+np.arange(num_rows))
# RegularGridInterpolator requires the x and y arrays to be in strictly
# increasing order, accommodate this
if geotransform[1] > 0.0:
col_step = 1
else:
col_step = -1
x_values = np.flip(x_values)
if geotransform[5] > 0.0:
row_step = 1
else:
row_step = -1
y_values = np.flip(y_values)
# NODATA values must be replaced with NaN for interpolation purposes
z_grid[z_grid == nodata_value] = np.nan
# The grid must be transposed to swap (row, col) coordinates into (x, y)
# order.
interpolator = RegularGridInterpolator(
points=(x_values, y_values),
values=z_grid[::row_step, ::col_step].transpose(),
method='linear',
bounds_error=False,
fill_value=np.nan,
)
return interpolator
| 37.173611 | 163 | 0.698674 |
4791793987badd9746bb2a6c5f6d6e776824acbf | 20,052 | py | Python | applyHCPAcrosswalk.py | mobalt/RedCap2NDA_withCrosswalk | 8fa8c57f049cb3adb219e50f3474cd7532cee3ce | [
"MIT"
] | null | null | null | applyHCPAcrosswalk.py | mobalt/RedCap2NDA_withCrosswalk | 8fa8c57f049cb3adb219e50f3474cd7532cee3ce | [
"MIT"
] | 1 | 2020-02-27T20:57:54.000Z | 2020-02-27T22:17:00.000Z | applyHCPAcrosswalk.py | mobalt/RedCap2NDA_withCrosswalk | 8fa8c57f049cb3adb219e50f3474cd7532cee3ce | [
"MIT"
] | 1 | 2020-02-27T21:19:08.000Z | 2020-02-27T21:19:08.000Z |
#two structures, ndarsubjects01 and edinburgh_hand01 are created by specialty
# programs HCD_ndar_edinburgh.py
#each of these structures requires consideration of all HCPD-redcap databases
#and were used as guinea pig behavioral data structures
import os, datetime
import json
import sys
from multiprocessing.dummy import Pool
import pandas as pd
import pycurl
import numpy as np
import io
from io import BytesIO
import download.box
from download.box import LifespanBox
box_temp='/home/petra/UbWinSharedSpace1/boxtemp'
box = LifespanBox(cache=box_temp)
#cache_space='/home/petra/UbWinSharedSpace1/scratch'
cache_space=box_temp
from boxsdk import JWTAuth, OAuth2, Client
snapshotdate = datetime.datetime.today().strftime('%m_%d_%Y')
#cur_dir = os.path.dirname(os.path.abspath(__file__))
#default_cache = "/data/intradb/tmp/box2nda_cache"
###REDCAP API tokens moved to configuration file
redcapconfigfile="/home/petra/UbWinSharedSpace1/ccf-nda-behavioral/PycharmToolbox/.boxApp/redcapconfig.csv"
#redcapconfigfile='/data/intradb/home/.boxApp/redcapconfig.csv'
boxconfigfile="/home/petra/UbWinSharedSpace1/ccf-nda-behavioral/PycharmToolbox/.boxApp/config.json"
crosswalkfile="/home/petra/UbWinSharedSpace1/redcap2nda_Lifespan2019/HCA_crosswalk_docs/HCPA_Crosswalk_concatenated_25Nov2019.csv"
#crosswalkfile="/home/petra/UbWinSharedSpace1/redcap2nda_Lifespan2019/crosswalk_docs/Crosswalk_test.csv"
crosswalk=pd.read_csv(crosswalkfile)
ndar_fields='UnrelatedHCAHCD_w_STG_Image_and_pseudo_GUID09_27_2019.csv'
ndar=pd.read_csv('/home/petra/UbWinSharedSpace1/redcap2nda_Lifespan2019/Dev_pedigrees/'+ndar_fields)
ndar=ndar.loc[ndar.subjectped.str.contains('HCA')]
pathout="/home/petra/UbWinSharedSpace1/redcap2nda_Lifespan2019/HCA_crosswalk_docs/prepped_structures"
#will import the crosswalk and sort by structures, then do for all structures the following
#get fieldlist for items in this structure (if redcap based and not special case like session drugs)
#get the data from redcap corresponding to this fieldlist (sincerely hope no merging required with Box data...
#needs to be called study data because code snpts in crosswalk refer to studydata datafram
#structures=crosswalk.groupby('NDA Structure').count().reset_index()[['NDA Structure']]
structures=crosswalk.drop_duplicates(subset='NDA Structure')[['HCP-A Source','dbase','NDA Structure','specialty_code']]
#normal redcap structures
normals=structures.loc[(structures['HCP-A Source'].str.contains('Redcap')==True) & (structures.specialty_code.isnull()==True)]
for structure in normals['NDA Structure']:
elements=crosswalk.loc[crosswalk['NDA Structure']==structure][['HCP-A Element']]
vars=list(elements['HCP-A Element'])
study=crosswalk.loc[crosswalk['NDA Structure']==structure,'dbase'].values[0]
redcap2structure(vars,crosswalk,pathstructuresout=pathout,studystr=study)
#ravlt - special case
extras=getredcapfieldsjson(fieldlist=[],study='hcpa')
crosswalk_subset=crosswalk.loc[(crosswalk['HCP-A Source'].str.contains('Box'))
& (crosswalk['NDA Structure']=='ravlt01')]
ravltid=crosswalk_subset.dbase.unique()[0]
ravlt=Box2dataframe(ravltid)
ravlt=ravlt.loc[ravlt.visit=='V1']
#execute any specialty codes
for index,row in crosswalk_subset.iterrows():
if pd.isna(row['python first code for form request']):
pass
else:
exec(row['python first code for form request'])
listvars=crosswalk_subset['HCP-A Element name in uploaded file'].tolist()
ravlt=ravlt[['subject']+listvars]
studydata=pd.merge(ravlt,extras,on='subject',how='right')
transformed=studydata.loc[studydata.flagged.isnull()==True].drop(columns='flagged')
#merge with required fields from ndar subjects
#--age and date need be recalcuated on fly from redcap data because possiblity of multiple dates per person'
ndarsub=ndar[['nda_guid','subjectped']].rename(
columns={'nda_guid':'subjectkey','subjectped':'src_subject_id'}).copy()
dout=pd.merge(ndarsub,transformed,how='left',left_on='src_subject_id',right_on='subject').drop(columns={'subject','dob','site', 'study', 'subject_id'})
crosswalk_subset.reset_index(inplace=True)
strucroot=crosswalk_subset['NDA Structure'].str.strip().str[:-2][0]
strucnum=crosswalk_subset['NDA Structure'].str.strip().str[-2:][0]
#finalsubset - i.e. no withdraws
#subjectkey src_subject_id interview_age interview_date gender
filePath=os.path.join(pathout,'HCPA_'+strucroot+strucnum+'_'+snapshotdate+'.csv')
if os.path.exists(filePath):
os.remove(filePath)
else:
pass
#print("Can not delete the file as it doesn't exists")
with open(filePath,'a') as f:
f.write(strucroot+","+str(int(strucnum))+"\n")
dout.to_csv(f,index=False)
#######################################
### special cases ###
#########################################
#medhx vars merge from hcpa and ssaga databases
crosswalk_subset=crosswalk.loc[crosswalk['NDA Structure']=='medh01']
hcpalist=crosswalk_subset.loc[crosswalk_subset.dbase=='hcpa','HCP-A Element'].tolist()
hcpadata=getredcapfieldsjson(fieldlist=hcpalist, study='hcpa')
ssagalist=crosswalk_subset.loc[crosswalk_subset.dbase=='ssaga','HCP-A Element'].tolist()
ssagadata=getredcapfieldsjson(fieldlist=ssagalist, study='ssaga')
ssagahcpa=pd.merge(hcpadata,ssagadata.drop(columns=['flagged','study','interview_date']),on='subject',how='inner')
vars=hcpalist+ssagalist
redcap2structure(vars,crosswalk_subset,pathstructuresout=pathout,studystr='hcpa',dframe=ssagahcpa)
#caffeine nicotine and other drug sessions
#need six rows per person corresponding to 6 sessions
crosswalk_subset=crosswalk.loc[crosswalk['NDA Structure']=='drugscr01']
sessions=['1','2','3','4','5','6']
renamelist=crosswalk_subset.loc[(crosswalk_subset['HCP-A Element'].str.contains('s1')==True) |
(crosswalk_subset['HCP-A Element'].str.contains('drug1')==True),'HCP-A Element'].tolist() + ['alc_breath1']
allsessions=pd.DataFrame()
for session in sessions:
slist=crosswalk_subset.loc[(crosswalk_subset['HCP-A Element'].str.contains('s'+session)==True) |
(crosswalk_subset['HCP-A Element'].str.contains('drug'+session)==True),'HCP-A Element'].tolist() + ['alc_breath'+session]
allstudydata=pd.DataFrame()
studies = crosswalk_subset.dbase.values[0]
for pop in studies.split():
studydata = pd.DataFrame()
studydata = getredcapfieldsjson(fieldlist=slist, study=pop)
allstudydata = pd.concat([allstudydata, studydata], axis=0, sort=True)
allstudydata['caffeine_s' + session + '___1'] = allstudydata['caffeine_s' + session + '___1'].str.replace('1','Prior to visit')
allstudydata['caffeine_s' + session + '___2'] = allstudydata['caffeine_s' + session + '___2'].str.replace('2','During visit')
allstudydata['caffeine_s' + session]=allstudydata['caffeine_s'+session+'___1'] + ' ' + allstudydata['caffeine_s'+session+'___2']
allstudydata['caffeine_s' + session]=allstudydata['caffeine_s' + session].str.replace('0','')
allstudydata['nicotine_s' + session + '___1'] = allstudydata['nicotine_s' + session + '___1'].str.replace('1','Prior to visit')
allstudydata['nicotine_s' + session + '___2'] = allstudydata['nicotine_s' + session + '___2'].str.replace('2','During visit')
allstudydata['nicotine_s' + session]=allstudydata['nicotine_s'+session+'___1'] + ' ' + allstudydata['nicotine_s'+session+'___2']
allstudydata['nicotine_s' + session]=allstudydata['nicotine_s' + session].str.replace('0','')
allstudydata=allstudydata.drop(columns=['caffeine_s'+session+'___1','caffeine_s'+session+'___2','nicotine_s'+session+'___1','nicotine_s'+session+'___2']).copy()
varmap = dict(zip(slist, renamelist))
allstudydata=allstudydata.rename(columns=varmap)
allstudydata['version_form']=session
allsessions=pd.concat([allsessions,allstudydata],axis=0,sort=True)
lout=list(crosswalk_subset['HCP-A Element name in uploaded file'])
cleanedlist = [x for x in lout if str(x) != 'nan']
listout=['subject','flagged','interview_date','interview_age','gender']+cleanedlist #output new variables and subset to those not flagged for withdrawal.
transformed=allsessions[listout].loc[allsessions.flagged.isnull()==True].drop(columns={'flagged','interview_date','gender','interview_age'})
#merge with required fields from vars in intradb staging (guid, etc)
#not sure whether it makes sense to pull these in here or recalculate on fly from redcap.
#future issues: compare this approach (e.g. pull from the file above named 'ndar') vs. what happens in the applycrosswalk.py
#program for HCD, which regenerates on fly...will require some recodeing below to pull from redcap...
#might just be easier to pull once...but how will this affect visit numbers?
ndarsub=ndar[['nda_guid','subjectped','nda_gender','nda_interview_age','nda_interview_date']].rename(
columns={'nda_guid':'subjectkey','subjectped':'src_subject_id','nda_gender':'gender',
'nda_interview_date':'interview_date','nda_interview_age':'interview_age'}).copy()
dout=pd.merge(ndarsub,transformed,how='left',left_on='src_subject_id',right_on='subject').drop(columns='subject')
dout['interview_date'] = pd.to_datetime(dout['interview_date']).dt.strftime('%m/%d/%Y')
#now export
crosswalk_subset.reset_index(inplace=True)
strucroot=crosswalk_subset['NDA Structure'].str.strip().str[:-2][0]
strucnum=crosswalk_subset['NDA Structure'].str.strip().str[-2:][0]
#finalsubset - i.e. no withdraws
#subjectkey src_subject_id interview_age interview_date gender
filePath=os.path.join(pathout,'HCPA_'+strucroot+strucnum+'_'+snapshotdate+'.csv')
if os.path.exists(filePath):
os.remove(filePath)
else:
pass
#print("Can not delete the file as it doesn't exists")
with open(filePath,'a') as f:
f.write(strucroot+","+str(int(strucnum))+"\n")
dout.to_csv(f,index=False)
######################################
#create the two penncnp stuctures
structuresbox=crosswalk_subset.drop_duplicates(subset='NDA Structure')[['HCP-A Source','dbase','NDA Structure','specialty_code']]
for structure in structuresbox['NDA Structure']:
listvars=crosswalk_subset.loc[crosswalk_subset['NDA Structure']==structure,'HCP-A Element name in uploaded file'].tolist()
pennstruct=penn[['subid']+listvars]
studydata=pd.merge(pennstruct,extras,left_on='subid',right_on='subject',how='right')
transformed=studydata.loc[studydata.flagged.isnull()==True].drop(columns='flagged')
#merge with required fields from ndar subjects
#--age and date need be recalcuated on fly from redcap data because possiblity of multiple dates per person'
ndarsub=ndar[['nda_guid','subjectped']].rename(
columns={'nda_guid':'subjectkey','subjectped':'src_subject_id'}).copy()
dout=pd.merge(ndarsub,transformed,how='left',left_on='src_subject_id',right_on='subject').drop(columns={'subject','dob','site', 'study', 'subject_id'})
crosswalk_boxsubset=crosswalk_subset.loc[crosswalk_subset['NDA Structure']==structure]
crosswalk_boxsubset.reset_index(inplace=True)
strucroot=crosswalk_boxsubset['NDA Structure'].str.strip().str[:-2][0]
strucnum=crosswalk_boxsubset['NDA Structure'].str.strip().str[-2:][0]
filePath=os.path.join(pathout,'HCPA_'+strucroot+strucnum+'_'+snapshotdate+'.csv')
if os.path.exists(filePath):
os.remove(filePath)
else:
pass
#print("Can not delete the file as it doesn't exists")
with open(filePath,'a') as f:
f.write(strucroot+","+str(int(strucnum))+"\n")
dout.to_csv(f,index=False)
###end penncnp
######################################
def Box2dataframe(curated_fileid_start):#,study,site,datatype,boxsnapshotfolderid,boxsnapshotQCfolderid):
#get current best curated data from BOX and read into pandas dataframe for QC
raw_fileid=curated_fileid_start
rawobject=box.download_file(raw_fileid)
data_path = os.path.join(cache_space, rawobject.get().name)
raw=pd.read_csv(data_path,header=0,low_memory=False, encoding = 'ISO-8859-1')
#raw['DateCreatedDatetime']=pd.to_datetime(raw.DateCreated).dt.round('min')
#raw['InstStartedDatetime']=pd.to_datetime(raw.InstStarted).dt.round('min')
#raw['InstEndedDatetime']=pd.to_datetime(raw.InstEnded).dt.round('min')
return raw
#use json format because otherwise commas in strings convert wrong in csv read
def getredcapfieldsjson(fieldlist, study='hcpa'): # , token=token[0],field=field[0],event=event[0]):
"""
Downloads requested fields from Redcap databases specified by details in redcapconfig file
Returns panda dataframe with fields 'study', 'Subject_ID, 'subject', and 'flagged', where 'Subject_ID' is the
patient id in the database of interest (sometimes called subject_id, parent_id) as well as requested fields.
subject is this same id stripped of underscores or flags like 'excluded' to make it easier to merge
flagged contains the extra characters other than the id so you can keep track of who should NOT be uploaded to NDA
or elsewwhere shared
"""
auth = pd.read_csv(redcapconfigfile)
studydata = pd.DataFrame()
fieldlistlabel = ['fields[' + str(i) + ']' for i in range(5, len(fieldlist) + 5)]
fieldrow = dict(zip(fieldlistlabel, fieldlist))
d1 = {'token': auth.loc[auth.study == study, 'token'].values[0], 'content': 'record', 'format': 'json', 'type': 'flat',
'fields[0]': auth.loc[auth.study == study, 'field'].values[0],
'fields[1]': auth.loc[auth.study == study, 'interview_date'].values[0],
'fields[2]': auth.loc[auth.study == study, 'sexatbirth'].values[0],
'fields[3]': auth.loc[auth.study == study, 'sitenum'].values[0],
'fields[4]': auth.loc[auth.study == study, 'dobvar'].values[0]}
d2 = fieldrow
d3 = {'events[0]': auth.loc[auth.study == study, 'event'].values[0], 'rawOrLabel': 'raw', 'rawOrLabelHeaders': 'raw',
'exportCheckboxLabel': 'false',
'exportSurveyFields': 'false', 'exportDataAccessGroups': 'false', 'returnFormat': 'json'}
data = {**d1, **d2, **d3}
buf = BytesIO()
ch = pycurl.Curl()
ch.setopt(ch.URL, 'https://redcap.wustl.edu/redcap/srvrs/prod_v3_1_0_001/redcap/api/')
ch.setopt(ch.HTTPPOST, list(data.items()))
ch.setopt(ch.WRITEDATA, buf)
ch.perform()
ch.close()
htmlString = buf.getvalue().decode('UTF-8')
buf.close()
d = json.loads(htmlString)
#parent_ids = pd.DataFrame(htmlString.splitlines(), columns=['row'])
#header = parent_ids.iloc[0]
#headerv2 = header.str.replace(auth.loc[auth.study == study, 'interview_date'].values[0], 'interview_date')
#headerv3 = headerv2.str.split(',')
#parent_ids.drop([0], inplace=True)
#pexpanded = pd.DataFrame(parent_ids.row.str.split(pat='\t').values.tolist(), columns=headerv3.values.tolist()[0])
pexpanded=pd.DataFrame(d)
pexpanded = pexpanded.loc[~(pexpanded[auth.loc[auth.study == study, 'field'].values[0]] == '')] ##
new = pexpanded[auth.loc[auth.study == study, 'field'].values[0]].str.split("_", 1, expand=True)
pexpanded['subject'] = new[0].str.strip()
pexpanded['flagged'] = new[1].str.strip()
pexpanded['study'] = study # auth.study[i]
studydata = pd.concat([studydata, pexpanded], axis=0, sort=True)
studydata=studydata.rename(columns={auth.loc[auth.study == study, 'interview_date'].values[0]:'interview_date'})
# Convert age in years to age in months
# note that dob is hardcoded var name here because all redcap databases use same variable name...sue me
# interview date, which was originally v1_date for hcpa, has been renamed in line above, headerv2
try:
studydata['nb_months'] = (
12 * (pd.to_datetime(studydata['interview_date']).dt.year - pd.to_datetime(studydata.dob).dt.year) +
(pd.to_datetime(studydata['interview_date']).dt.month - pd.to_datetime(studydata.dob).dt.month) +
(pd.to_datetime(studydata['interview_date']).dt.day - pd.to_datetime(studydata.dob).dt.day) / 31)
studydatasub=studydata.loc[studydata.nb_months.isnull()].copy()
studydatasuper = studydata.loc[~(studydata.nb_months.isnull())].copy()
studydatasuper['nb_months'] = studydatasuper['nb_months'].apply(np.floor).astype(int)
studydatasuper['nb_monthsPHI'] = studydatasuper['nb_months']
studydatasuper.loc[studydatasuper.nb_months > 1080, 'nb_monthsPHI'] = 1200
studydata=pd.concat([studydatasub,studydatasuper],sort=True)
studydata = studydata.drop(columns={'nb_months'}).rename(columns={'nb_monthsPHI': 'interview_age'})
except:
pass
#convert gender to M/F string
try:
studydata.gender = studydata.gender.str.replace('1', 'M')
studydata.gender = studydata.gender.str.replace('2', 'F')
except:
print(study+' has no variable named gender')
return studydata
def redcap2structure(vars,crosswalk,pathstructuresout=pathout,studystr='hcpa',dframe=None):
"""
Takes list of vars from the crosswalk, gets the data from Redcap, and puts into structure format after
merging with NDAR requiredvars. Outputs a csv structure in NDA format to pathstructureout location
"""
#varslim=[x for x in fieldlist if str(x) != 'nan']
#varnan=[x for x in fieldlist if str(x) == 'nan']
if dframe is not None:
studydata=dframe
else:
studydata=getredcapfieldsjson(fieldlist=vars,study=studystr)
#studydata=getredcapfieldsjson(fieldlist=vars,study=studystr)
if studystr=='ssaga':
extras=getredcapfieldsjson(fieldlist=[],study='hcpa')
studydata=pd.merge(studydata.drop(columns='interview_date'),extras[['interview_age','interview_date','subject','gender']],on='subject',how='left')
#get the relevant rows of the crosswalk
crosswalk_subset=pd.merge(crosswalk,pd.DataFrame(vars,columns=['HCP-A Element']),on='HCP-A Element',how='inner')[['NDA Structure', 'NDA Element', 'HCP-A Element', 'HCP-A Source',
'CCF action applied (e.g. request from Form Request)',
'HCP-A Element name in uploaded file',
'python first code for form request']]
#execute transformation codes stored in the crosswalk
for index,row in crosswalk_subset.iterrows():
if pd.isna(row['python first code for form request']):
pass
else:
exec(row['python first code for form request'])
#remove fields with empty values HCP-A Element name in uploaded file -- these are empty because NDA doesnt want them
crosswalk_subset=crosswalk_subset.loc[crosswalk_subset['HCP-A Element name in uploaded file'].isnull()==False]
listout=['subject','flagged','interview_date','interview_age','gender']+list(crosswalk_subset['HCP-A Element name in uploaded file'])
#output new variables and subset to those not flagged for withdrawal.
transformed=studydata[listout].loc[studydata.flagged.isnull()==True].drop(columns='flagged')
#merge with required fields from ndar subjects
#--age and date need be recalcuated on fly from redcap data because possiblity of multiple dates per person'
ndarsub=ndar[['nda_guid','subjectped']].rename(
columns={'nda_guid':'subjectkey','subjectped':'src_subject_id'}).copy()
dout=pd.merge(ndarsub,transformed,how='left',left_on='src_subject_id',right_on='subject').drop(columns='subject')
dout['interview_date'] = pd.to_datetime(dout['interview_date']).dt.strftime('%m/%d/%Y')
#now export
crosswalk_subset.reset_index(inplace=True)
strucroot=crosswalk_subset['NDA Structure'].str.strip().str[:-2][0]
strucnum=crosswalk_subset['NDA Structure'].str.strip().str[-2:][0]
#finalsubset - i.e. no withdraws
#subjectkey src_subject_id interview_age interview_date gender
filePath=os.path.join(pathstructuresout,'HCPA_'+strucroot+strucnum+'_'+snapshotdate+'.csv')
if os.path.exists(filePath):
os.remove(filePath)
else:
pass
#print("Can not delete the file as it doesn't exists")
with open(filePath,'a') as f:
f.write(strucroot+","+str(int(strucnum))+"\n")
dout.to_csv(f,index=False)
| 57.291429 | 182 | 0.720876 |
0e7894f0820d257a241d1d8d0e7dc08819519908 | 843 | py | Python | DungeonsOfNoudar586/make_palette.py | libretro/dungeons-of-noudar | 40fe44c4a180d7daf3fe97eff4ddbfd2cd558c62 | [
"BSD-2-Clause"
] | 1 | 2020-12-25T01:06:43.000Z | 2020-12-25T01:06:43.000Z | Wizard3D/make_palette.py | TheFakeMontyOnTheRun/wizard-of-galicia-3d | 0255289758f782579aedf1055b3c790260fc65a6 | [
"BSD-2-Clause"
] | null | null | null | Wizard3D/make_palette.py | TheFakeMontyOnTheRun/wizard-of-galicia-3d | 0255289758f782579aedf1055b3c790260fc65a6 | [
"BSD-2-Clause"
] | 2 | 2020-08-17T16:13:01.000Z | 2020-08-27T19:38:26.000Z | import glob
from PIL import Image
from math import floor
palette = [[0,0,0]];
def transform( pixel ):
return [ 20 * ( pixel[ 0 ] / 20), 20 * ( pixel[ 1 ] / 20), 20 * ( pixel[ 2 ] / 20 ) ]
def add_to_palette( filename ):
imgFile = Image.open( filename )
img = imgFile.load()
for y in range( 0, imgFile.height ):
for x in range( 0, imgFile.width ):
pixel = img[ x, y ]
adjusted = transform( pixel )
if pixel[ 3 ] < 254:
adjusted = [ 255, 0, 255 ]
if palette.count( adjusted ) == 0:
palette.append( adjusted )
for filename in glob.glob('res/*.png'):
add_to_palette( filename )
palette.sort()
print len( palette )
for pixel in palette:
print str(pixel[ 0 ] ) + "\t" + str(pixel[ 1 ] ) + "\t" + str(pixel[ 2 ] )
| 26.34375 | 89 | 0.533808 |
12363ce750099de3e2dc844f72536925d7218fa7 | 5,474 | py | Python | goatools/semantic.py | Odinidoer/goatools | 475be141437b7d81dc7d03533c2b2c07974400c2 | [
"BSD-2-Clause"
] | 1 | 2019-02-07T03:35:09.000Z | 2019-02-07T03:35:09.000Z | goatools/semantic.py | Odinidoer/goatools | 475be141437b7d81dc7d03533c2b2c07974400c2 | [
"BSD-2-Clause"
] | null | null | null | goatools/semantic.py | Odinidoer/goatools | 475be141437b7d81dc7d03533c2b2c07974400c2 | [
"BSD-2-Clause"
] | 1 | 2019-02-12T13:45:42.000Z | 2019-02-12T13:45:42.000Z | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Compute semantic similarities between GO terms. Borrowed from book chapter from
Alex Warwick Vesztrocy and Christophe Dessimoz (thanks). For details, please
check out:
notebooks/semantic_similarity.ipynb
"""
import math
from collections import Counter
class TermCounts(object):
'''
TermCounts counts the term counts for each
'''
def __init__(self, godag, annots):
'''
Initialise the counts and
'''
# Backup
self.godag = godag
# Initialise the counters
self._counts = Counter()
self._aspect_counts = Counter()
# Fill the counters...
self._count_terms(godag, annots)
def _count_terms(self, godag, annots):
'''
Fills in the counts and overall aspect counts.
'''
for terms in annots.values(): # key is 'gene'
# Make a union of all the terms for a gene, if term parents are
# propagated but they won't get double-counted for the gene
allterms = set(terms)
for go_id in terms:
allterms |= godag[go_id].get_all_parents()
for parent in allterms:
self._counts[parent] += 1
for go_id, child in self._counts.items():
# Group by namespace
namespace = godag[go_id].namespace
self._aspect_counts[namespace] += child
def get_count(self, go_id):
'''
Returns the count of that GO term observed in the annotations.
'''
return self._counts[go_id]
def get_total_count(self, aspect):
'''
Gets the total count that's been precomputed.
'''
return self._aspect_counts[aspect]
def get_term_freq(self, go_id):
'''
Returns the frequency at which a particular GO term has
been observed in the annotations.
'''
try:
namespace = self.godag[go_id].namespace
freq = float(self.get_count(go_id)) / float(self.get_total_count(namespace))
#print self.get_count(go_id), self.get_total_count(namespace), freq
except ZeroDivisionError:
freq = 0
return freq
def get_info_content(go_id, termcounts):
'''
Calculates the information content of a GO term.
'''
# Get the observed frequency of the GO term
freq = termcounts.get_term_freq(go_id)
# Calculate the information content (i.e., -log("freq of GO term")
return -1.0 * math.log(freq) if freq else 0
def resnik_sim(go_id1, go_id2, godag, termcounts):
'''
Computes Resnik's similarity measure.
'''
goterm1 = godag[go_id1]
goterm2 = godag[go_id2]
if goterm1.namespace == goterm2.namespace:
msca_goid = deepest_common_ancestor([go_id1, go_id2], godag)
return get_info_content(msca_goid, termcounts)
def lin_sim(goid1, goid2, godag, termcnts):
'''
Computes Lin's similarity measure.
'''
sim_r = resnik_sim(goid1, goid2, godag, termcnts)
if sim_r is not None:
return (-2*sim_r)/(get_info_content(goid1, termcnts) + get_info_content(goid2, termcnts))
def common_parent_go_ids(goids, godag):
'''
This function finds the common ancestors in the GO
tree of the list of goids in the input.
'''
# Find candidates from first
rec = godag[goids[0]]
candidates = rec.get_all_parents()
candidates.update({goids[0]})
# Find intersection with second to nth goid
for goid in goids[1:]:
rec = godag[goid]
parents = rec.get_all_parents()
parents.update({goid})
# Find the intersection with the candidates, and update.
candidates.intersection_update(parents)
return candidates
def deepest_common_ancestor(goterms, godag):
'''
This function gets the nearest common ancestor
using the above function.
Only returns single most specific - assumes unique exists.
'''
# Take the element at maximum depth.
return max(common_parent_go_ids(goterms, godag), key=lambda t: godag[t].depth)
def min_branch_length(go_id1, go_id2, godag, branch_dist):
'''
Finds the minimum branch length between two terms in the GO DAG.
'''
# First get the deepest common ancestor
goterm1 = godag[go_id1]
goterm2 = godag[go_id2]
if goterm1.namespace == goterm2.namespace:
dca = deepest_common_ancestor([go_id1, go_id2], godag)
# Then get the distance from the DCA to each term
dca_depth = godag[dca].depth
depth1 = goterm1.depth - dca_depth
depth2 = goterm2.depth - dca_depth
# Return the total distance - i.e., to the deepest common ancestor and back.
return depth1 + depth2
elif branch_dist is not None:
return goterm1.depth + goterm2.depth + branch_dist
def semantic_distance(go_id1, go_id2, godag, branch_dist=None):
'''
Finds the semantic distance (minimum number of connecting branches)
between two GO terms.
'''
return min_branch_length(go_id1, go_id2, godag, branch_dist)
def semantic_similarity(go_id1, go_id2, godag, branch_dist=None):
'''
Finds the semantic similarity (inverse of the semantic distance)
between two GO terms.
'''
dist = semantic_distance(go_id1, go_id2, godag, branch_dist)
if dist is not None:
return 1.0 / float(dist)
| 30.752809 | 97 | 0.637559 |
7c37ec44f558aae1a1f4a212ea40205c1ca93b0f | 19,384 | py | Python | maskrcnn_benchmark/engine/inference_aurora.py | niuchuangnn/aurora-maskrcnn | b17acd8c68f9ff3bb561d8f977893948a69c06e5 | [
"MIT"
] | 4 | 2020-02-13T16:02:33.000Z | 2021-03-26T01:12:14.000Z | maskrcnn_benchmark/engine/inference_aurora.py | niuchuangnn/aurora-maskrcnn | b17acd8c68f9ff3bb561d8f977893948a69c06e5 | [
"MIT"
] | 1 | 2021-03-01T12:48:37.000Z | 2021-03-04T03:11:24.000Z | maskrcnn_benchmark/engine/inference_aurora.py | niuchuangnn/aurora-maskrcnn | b17acd8c68f9ff3bb561d8f977893948a69c06e5 | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import datetime
import logging
import tempfile
import time
import os
from collections import OrderedDict
import torch
from tqdm import tqdm
from ..structures.bounding_box import BoxList
from ..utils.comm import is_main_process
from ..utils.comm import scatter_gather
from ..utils.comm import synchronize
from maskrcnn_benchmark.modeling.roi_heads.mask_head.inference import Masker
from maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou
import cv2
import math
from sklearn.decomposition import PCA
import numpy as np
from PIL import Image
def select_top_predictions(predictions, confidence_threshold=0.7):
scores = predictions.get_field("scores")
keep = torch.nonzero(scores > confidence_threshold).squeeze(1)
predictions = predictions[keep]
scores = predictions.get_field("scores")
_, idx = scores.sort(0, descending=True)
return predictions[idx]
def compute_prediction(model, image_list):
with torch.no_grad():
predictions = model(image_list)
predictions = [o.to("cpu") for o in predictions]
predictions_pro = []
for prediction in predictions:
# reshape prediction (a BoxList) into the original image size
height, width = image_list.image_sizes[0]
prediction = prediction.resize((width, height))
masker = Masker(threshold=0.5, padding=1)
if prediction.has_field("mask"):
# if we have masks, paste the masks in the right position
# in the image, as defined by the bounding boxes
masks = prediction.get_field("mask")
masks = masker(masks, prediction)
prediction.add_field("mask", masks)
predictions_pro.append(prediction)
return predictions_pro
def compute_angle(model, image, thresh_bdry_number=150, max=True):
predictions = compute_prediction(model, image)
top_predictions = [select_top_predictions(prediction) for prediction in predictions]
prediction_angles = []
for top_prediction in top_predictions:
masks = top_prediction.get_field("mask").numpy()
if len(masks) <= 0:
return None
angles = []
max_contour = []
max_contour_num = -1
for mask in masks:
thresh = mask[0, :, :, None]
_, contours, hierarchy = cv2.findContours(
thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for contour in contours:
contour_num = contour.shape[0]
if contour_num > max_contour_num:
max_contour = np.squeeze(contour)
max_contour_num = contour_num
if contour_num > thresh_bdry_number:
contour = np.squeeze(contour)
pca = PCA()
pca.fit(contour)
components = pca.components_
main_ax = components[0]
angle = math.atan(main_ax[1] / main_ax[0]) * (180.0 / math.pi)
angles.append(angle)
if len(angles) <= 0 or max:
pca = PCA()
pca.fit(max_contour)
components = pca.components_
main_ax = components[0]
angle_ave = math.atan(main_ax[1] / main_ax[0]) * (180.0 / math.pi)
else:
angle_ave = np.array(angles).mean()
prediction_angles.append(angle_ave)
return prediction_angles
def compute_on_dataset(model, data_loader, device, two_stage=True):
model.eval()
results_dict = {}
cpu_device = torch.device("cpu")
for i, batch in tqdm(enumerate(data_loader)):
images, targets, image_ids = batch
images = images.to(device)
with torch.no_grad():
if two_stage:
angles = compute_angle(model, images)
images = images.to(cpu_device)
images = images.rotate(angles)
images = images.to(device)
output = model(images)
output = [o.to(cpu_device) for o in output]
if two_stage:
for a in range(len(angles)):
output[a].add_field('angle', -angles[a])
results_dict.update(
{img_id: result for img_id, result in zip(image_ids, output)}
)
return results_dict
def prepare_for_coco_detection(predictions, dataset):
# assert isinstance(dataset, COCODataset)
coco_results = []
for image_id, prediction in enumerate(predictions):
original_id = dataset.id_to_img_map[image_id]
if len(prediction) == 0:
continue
# TODO replace with get_img_info?
image_width = dataset.coco.imgs[original_id]["width"]
image_height = dataset.coco.imgs[original_id]["height"]
prediction = prediction.resize((image_width, image_height))
prediction = prediction.convert("xywh")
boxes = prediction.bbox.tolist()
scores = prediction.get_field("scores").tolist()
labels = prediction.get_field("labels").tolist()
mapped_labels = [dataset.contiguous_category_id_to_json_id[i] for i in labels]
coco_results.extend(
[
{
"image_id": original_id,
"category_id": mapped_labels[k],
"bbox": box,
"score": scores[k],
}
for k, box in enumerate(boxes)
]
)
return coco_results
def prepare_for_coco_segmentation(predictions, dataset):
import pycocotools.mask as mask_util
import numpy as np
masker = Masker(threshold=0.5, padding=1)
# assert isinstance(dataset, COCODataset)
two_stage = 'angle' in predictions[0].extra_fields.keys()
coco_results = []
boxes = []
for image_id, prediction in tqdm(enumerate(predictions)):
original_id = dataset.id_to_img_map[image_id]
if len(prediction) == 0:
continue
width_ori, height_ori = prediction.size
# TODO replace with get_img_info?
image_width = dataset.coco.imgs[original_id]["width"]
image_height = dataset.coco.imgs[original_id]["height"]
prediction = prediction.resize((image_width, image_height))
masks = prediction.get_field("mask")
if two_stage:
angle = prediction.get_field('angle')
# t = time.time()
masks = masker(masks, prediction)
if two_stage:
num_mask = masks.shape[0]
boxes_i = []
for m in range(num_mask):
mask = masks[m, 0, :, :].numpy()
mask = Image.fromarray(mask)
mask = mask.rotate(angle)
mask_re = mask.resize((width_ori, height_ori))
mask = np.asarray(mask)
masks[m, 0, :, :] = torch.from_numpy(mask)
mask_re = np.asarray(mask_re)
idx_y, idx_x = np.where(mask_re==1)
xmin = idx_x.min()
xmax = idx_x.max()
ymin = idx_y.min()
ymax = idx_y.max()
boxes_i.append([xmin, ymin, xmax, ymax])
boxes.append(boxes_i)
# logger.info('Time mask: {}'.format(time.time() - t))
# prediction = prediction.convert('xywh')
# boxes = prediction.bbox.tolist()
scores = prediction.get_field("scores").tolist()
labels = prediction.get_field("labels").tolist()
# rles = prediction.get_field('mask')
rles = [
mask_util.encode(np.array(mask[0, :, :, np.newaxis], order="F"))[0]
for mask in masks
]
for rle in rles:
rle["counts"] = rle["counts"].decode("utf-8")
mapped_labels = [dataset.contiguous_category_id_to_json_id[i] for i in labels]
coco_results.extend(
[
{
"image_id": original_id,
"category_id": mapped_labels[k],
"segmentation": rle,
"score": scores[k],
}
for k, rle in enumerate(rles)
]
)
return coco_results, boxes
# inspired from Detectron
def evaluate_box_proposals(
predictions, dataset, thresholds=None, area="all", limit=None
):
"""Evaluate detection proposal recall metrics. This function is a much
faster alternative to the official COCO API recall evaluation code. However,
it produces slightly different results.
"""
# Record max overlap value for each gt box
# Return vector of overlap values
areas = {
"all": 0,
"small": 1,
"medium": 2,
"large": 3,
"96-128": 4,
"128-256": 5,
"256-512": 6,
"512-inf": 7,
}
area_ranges = [
[0 ** 2, 1e5 ** 2], # all
[0 ** 2, 32 ** 2], # small
[32 ** 2, 96 ** 2], # medium
[96 ** 2, 1e5 ** 2], # large
[96 ** 2, 128 ** 2], # 96-128
[128 ** 2, 256 ** 2], # 128-256
[256 ** 2, 512 ** 2], # 256-512
[512 ** 2, 1e5 ** 2],
] # 512-inf
assert area in areas, "Unknown area range: {}".format(area)
area_range = area_ranges[areas[area]]
gt_overlaps = []
num_pos = 0
for image_id, prediction in enumerate(predictions):
original_id = dataset.id_to_img_map[image_id]
# TODO replace with get_img_info?
image_width = dataset.coco.imgs[original_id]["width"]
image_height = dataset.coco.imgs[original_id]["height"]
prediction = prediction.resize((image_width, image_height))
# sort predictions in descending order
# TODO maybe remove this and make it explicit in the documentation
inds = prediction.get_field("objectness").sort(descending=True)[1]
prediction = prediction[inds]
ann_ids = dataset.coco.getAnnIds(imgIds=original_id)
anno = dataset.coco.loadAnns(ann_ids)
gt_boxes = [obj["bbox"] for obj in anno if obj["iscrowd"] == 0]
gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes
gt_boxes = BoxList(gt_boxes, (image_width, image_height), mode="xywh").convert(
"xyxy"
)
gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0])
if len(gt_boxes) == 0:
continue
valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
gt_boxes = gt_boxes[valid_gt_inds]
num_pos += len(gt_boxes)
if len(gt_boxes) == 0:
continue
if len(prediction) == 0:
continue
if limit is not None and len(prediction) > limit:
prediction = prediction[:limit]
overlaps = boxlist_iou(prediction, gt_boxes)
_gt_overlaps = torch.zeros(len(gt_boxes))
for j in range(min(len(prediction), len(gt_boxes))):
# find which proposal box maximally covers each gt box
# and get the iou amount of coverage for each gt box
max_overlaps, argmax_overlaps = overlaps.max(dim=0)
# find which gt box is 'best' covered (i.e. 'best' = most iou)
gt_ovr, gt_ind = max_overlaps.max(dim=0)
assert gt_ovr >= 0
# find the proposal box that covers the best covered gt box
box_ind = argmax_overlaps[gt_ind]
# record the iou coverage of this gt box
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert _gt_overlaps[j] == gt_ovr
# mark the proposal box and the gt box as used
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
# append recorded iou coverage level
gt_overlaps.append(_gt_overlaps)
gt_overlaps = torch.cat(gt_overlaps, dim=0)
gt_overlaps, _ = torch.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
recalls = torch.zeros_like(thresholds)
# compute recall for each iou threshold
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
# ar = 2 * np.trapz(recalls, thresholds)
ar = recalls.mean()
return {
"ar": ar,
"recalls": recalls,
"thresholds": thresholds,
"gt_overlaps": gt_overlaps,
"num_pos": num_pos,
}
def evaluate_predictions_on_coco(
coco_gt, coco_results, json_result_file, iou_type="bbox"
):
import json
with open(json_result_file, "w") as f:
json.dump(coco_results, f)
from pycocotools.cocoeval import COCOeval
coco_dt = coco_gt.loadRes(str(json_result_file))
# coco_dt = coco_gt.loadRes(coco_results)
coco_eval = COCOeval(coco_gt, coco_dt, iou_type)
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
return coco_eval
def _accumulate_predictions_from_multiple_gpus(predictions_per_gpu):
all_predictions = scatter_gather(predictions_per_gpu)
if not is_main_process():
return
# merge the list of dicts
predictions = {}
for p in all_predictions:
predictions.update(p)
# convert a dict where the key is the index in a list
image_ids = list(sorted(predictions.keys()))
if len(image_ids) != image_ids[-1] + 1:
logger = logging.getLogger("maskrcnn_benchmark.inference")
logger.warning(
"Number of images that were gathered from multiple processes is not "
"a contiguous set. Some images might be missing from the evaluation"
)
# convert to a list
predictions = [predictions[i] for i in image_ids]
return predictions
class COCOResults(object):
METRICS = {
"bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"segm": ["AP", "AP50", "AP75", "APs", "APm", "APl"],
"box_proposal": [
"AR@100",
"ARs@100",
"ARm@100",
"ARl@100",
"AR@1000",
"ARs@1000",
"ARm@1000",
"ARl@1000",
],
"keypoint": ["AP", "AP50", "AP75", "APm", "APl"],
}
def __init__(self, *iou_types):
allowed_types = ("box_proposal", "bbox", "segm")
assert all(iou_type in allowed_types for iou_type in iou_types)
results = OrderedDict()
for iou_type in iou_types:
results[iou_type] = OrderedDict(
[(metric, -1) for metric in COCOResults.METRICS[iou_type]]
)
self.results = results
def update(self, coco_eval):
if coco_eval is None:
return
from pycocotools.cocoeval import COCOeval
assert isinstance(coco_eval, COCOeval)
s = coco_eval.stats
iou_type = coco_eval.params.iouType
res = self.results[iou_type]
metrics = COCOResults.METRICS[iou_type]
for idx, metric in enumerate(metrics):
res[metric] = s[idx]
def __repr__(self):
# TODO make it pretty
return repr(self.results)
def check_expected_results(results, expected_results, sigma_tol):
if not expected_results:
return
logger = logging.getLogger("maskrcnn_benchmark.inference")
for task, metric, (mean, std) in expected_results:
actual_val = results.results[task][metric]
lo = mean - sigma_tol * std
hi = mean + sigma_tol * std
ok = (lo < actual_val) and (actual_val < hi)
msg = (
"{} > {} sanity check (actual vs. expected): "
"{:.3f} vs. mean={:.4f}, std={:.4}, range=({:.4f}, {:.4f})"
).format(task, metric, actual_val, mean, std, lo, hi)
if not ok:
msg = "FAIL: " + msg
logger.error(msg)
else:
msg = "PASS: " + msg
logger.info(msg)
def inference_aurora(
model,
data_loader,
iou_types=("bbox",),
box_only=False,
device="cuda",
expected_results=(),
expected_results_sigma_tol=4,
output_folder=None,
two_stage=True,
):
# convert to a torch.device for efficiency
device = torch.device(device)
num_devices = (
torch.distributed.deprecated.get_world_size()
if torch.distributed.deprecated.is_initialized()
else 1
)
logger = logging.getLogger("maskrcnn_benchmark.inference_aurora")
dataset = data_loader.dataset
logger.info("Start evaluation on {} images".format(len(dataset)))
start_time = time.time()
predictions = compute_on_dataset(model, data_loader, device, two_stage=two_stage)
# wait for all processes to complete before measuring the time
synchronize()
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=total_time))
logger.info(
"Total inference time: {} ({} s / img per device, on {} devices)".format(
total_time_str, total_time * num_devices / len(dataset), num_devices
)
)
predictions = _accumulate_predictions_from_multiple_gpus(predictions)
if not is_main_process():
return
if output_folder:
torch.save(predictions, os.path.join(output_folder, "predictions.pth"))
if box_only:
logger.info("Evaluating bbox proposals")
areas = {"all": "", "small": "s", "medium": "m", "large": "l"}
res = COCOResults("box_proposal")
for limit in [100, 1000]:
for area, suffix in areas.items():
stats = evaluate_box_proposals(
predictions, dataset, area=area, limit=limit
)
key = "AR{}@{:d}".format(suffix, limit)
res.results["box_proposal"][key] = stats["ar"].item()
logger.info(res)
check_expected_results(res, expected_results, expected_results_sigma_tol)
if output_folder:
torch.save(res, os.path.join(output_folder, "box_proposals.pth"))
return
logger.info("Preparing results for COCO format")
coco_results = {}
if "segm" in iou_types:
logger.info("Preparing segm results")
coco_results["segm"], boxes = prepare_for_coco_segmentation(predictions, dataset)
if two_stage:
num_predictions = len(predictions)
assert len(boxes) == num_predictions
for p in range(num_predictions):
boxes_p = torch.from_numpy(np.array(boxes[p]).astype(np.float32))
predictions[p].bbox = boxes_p
if "bbox" in iou_types:
logger.info("Preparing bbox results")
coco_results["bbox"] = prepare_for_coco_detection(predictions, dataset)
results = COCOResults(*iou_types)
logger.info("Evaluating predictions")
for iou_type in iou_types:
with tempfile.NamedTemporaryFile() as f:
file_path = f.name
if output_folder:
file_path = os.path.join(output_folder, iou_type + ".json")
res = evaluate_predictions_on_coco(
dataset.coco, coco_results[iou_type], file_path, iou_type
)
results.update(res)
logger.info(results)
check_expected_results(results, expected_results, expected_results_sigma_tol)
if output_folder:
torch.save(results, os.path.join(output_folder, "coco_results.pth"))
return results, coco_results, predictions
| 34.066784 | 89 | 0.600547 |
d6388d08fecfd158d3274088288cb459c0068d72 | 2,191 | py | Python | meb/MEBLibSlidingWindow.py | vibhatha/PSGDSVMPY | 69ed88f5db8d9a250ee944f44b88e54351f8696f | [
"Apache-2.0"
] | null | null | null | meb/MEBLibSlidingWindow.py | vibhatha/PSGDSVMPY | 69ed88f5db8d9a250ee944f44b88e54351f8696f | [
"Apache-2.0"
] | null | null | null | meb/MEBLibSlidingWindow.py | vibhatha/PSGDSVMPY | 69ed88f5db8d9a250ee944f44b88e54351f8696f | [
"Apache-2.0"
] | null | null | null | import sys
import os
HOME=os.environ['HOME']
sys.path.insert(1,HOME+'/github/StreamingSVM')
from operations import LoadLibsvm
import numpy as np
from matplotlib import pyplot as plt
X = np.array([[1,1],[2,1],[3,1],[4,1],[1,5],[2,6],[3,7],[4,5]])
y = np.array([1,1,1,1,-1,-1,-1,-1])
X_test = np.array([[1,1.25],[2.1,1.15],[3.1,1.45],[4.23,1.21],[1.3,5.25],[2.11,6.24],[3.3,7.24],[4.212,5.78]])
#plt.scatter(X[:,0],X[:,1])
#plt.show()
dataset = 'ijcnn1'
training_filepath = '/home/vibhatha/data/svm/'+dataset+'/training.csv'
testing_filepath = '/home/vibhatha/data/svm/'+dataset+'/testing.csv'
n_features = 300
split = False
training_loader = LoadLibsvm.LoadLibSVM(filename=training_filepath, n_features=n_features)
x_training = []
y_training = []
x_testing = []
y_testing = []
if split == True:
x_all, y_all = training_loader.load_all_data()
ratio = 0.8
size = len(x_all)
split_index = int(size * ratio)
x_training = x_all[:split_index]
x_testing = x_all[split_index:]
y_training = y_all[:split_index]
y_testing = y_all[split_index:]
else :
training_loader = LoadLibsvm.LoadLibSVM(filename=training_filepath, n_features=n_features)
testing_loader = LoadLibsvm.LoadLibSVM(filename=testing_filepath, n_features=n_features)
x_training, y_training = training_loader.load_all_data()
x_testing, y_testing = testing_loader.load_all_data()
print(x_training.shape)
X = x_training
y = y_training
M = 1
R = 0
e2 = 1
w = y[0] * X[0]
C = 1
i=0
L=200
while (i < len(X)-L):
for j in range(i, i + L):
d = np.sqrt(np.linalg.norm(w-y[j]*X[j]) + e2 + 1/C)
if d >= R:
w = w + 0.5 * (1 - R/d) * (y[j] * X[j] - w)
R = R + 0.5 * (d-R)
e2 = e2 * (1- (0.5) * (1- (R/d)))**2 + (0.5 * (1 - (R/d)))**2
M = M + 1
i = i + 1
print("R : ", R)
print("W : ", w)
print("M : ", M)
#plt.scatter(X[:,0],X[:,1])
#plt.show()
labels = []
for x in x_testing:
label = np.sign(np.dot(w.T, x))
labels.append(label)
y_pred = np.array(labels)
print(labels)
print(y_testing)
correct = (y_pred == y_testing).sum()
total = len(y_pred)
acc = float(correct) / float(total) * 100.0
print("Acc : ", acc)
| 26.719512 | 110 | 0.617526 |
3ac9a2e0f44fca9da47e4ad0d8e4457a4cfdb12c | 6,377 | py | Python | train.py | ssaz5/pytorch-retinanet | 6963919387d31f8c7657bec671f3dbcb22556e32 | [
"Apache-2.0"
] | null | null | null | train.py | ssaz5/pytorch-retinanet | 6963919387d31f8c7657bec671f3dbcb22556e32 | [
"Apache-2.0"
] | null | null | null | train.py | ssaz5/pytorch-retinanet | 6963919387d31f8c7657bec671f3dbcb22556e32 | [
"Apache-2.0"
] | null | null | null | import time
import os
import copy
import argparse
import pdb
import collections
import sys
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.autograd import Variable
from torchvision import datasets, models, transforms
import torchvision
import model
from anchors import Anchors
import losses
from dataloader import CocoDataset, CSVDataset, collater, Resizer, AspectRatioBasedSampler, Augmenter, UnNormalizer, Normalizer
from torch.utils.data import Dataset, DataLoader
import coco_eval
import csv_eval
# assert torch.__version__.split('.')[1] == '4'
print('CUDA available: {}'.format(torch.cuda.is_available()))
def main(args=None):
parser = argparse.ArgumentParser(description='Simple training script for training a RetinaNet network.')
parser.add_argument('--dataset', help='Dataset type, must be one of csv or coco.')
parser.add_argument('--coco_path', help='Path to COCO directory')
parser.add_argument('--csv_train', help='Path to file containing training annotations (see readme)')
parser.add_argument('--csv_classes', help='Path to file containing class list (see readme)')
parser.add_argument('--csv_val', help='Path to file containing validation annotations (optional, see readme)')
parser.add_argument('--depth', help='Resnet depth, must be one of 18, 34, 50, 101, 152', type=int, default=50)
parser.add_argument('--epochs', help='Number of epochs', type=int, default=100)
parser = parser.parse_args(args)
# Create the data loaders
if parser.dataset == 'coco':
if parser.coco_path is None:
raise ValueError('Must provide --coco_path when training on COCO,')
dataset_train = CocoDataset(parser.coco_path, set_name='train2017', transform=transforms.Compose([Normalizer(), Augmenter(), Resizer()]))
dataset_val = CocoDataset(parser.coco_path, set_name='val2017', transform=transforms.Compose([Normalizer(), Resizer()]))
elif parser.dataset == 'csv':
if parser.csv_train is None:
raise ValueError('Must provide --csv_train when training on COCO,')
if parser.csv_classes is None:
raise ValueError('Must provide --csv_classes when training on COCO,')
dataset_train = CSVDataset(train_file=parser.csv_train, class_list=parser.csv_classes, transform=transforms.Compose([Normalizer(), Augmenter(), Resizer()]))
if parser.csv_val is None:
dataset_val = None
print('No validation annotations provided.')
else:
dataset_val = CSVDataset(train_file=parser.csv_val, class_list=parser.csv_classes, transform=transforms.Compose([Normalizer(), Resizer()]))
else:
raise ValueError('Dataset type not understood (must be csv or coco), exiting.')
sampler = AspectRatioBasedSampler(dataset_train, batch_size=2, drop_last=False)
dataloader_train = DataLoader(dataset_train, num_workers=3, collate_fn=collater, batch_sampler=sampler)
if dataset_val is not None:
sampler_val = AspectRatioBasedSampler(dataset_val, batch_size=1, drop_last=False)
dataloader_val = DataLoader(dataset_val, num_workers=3, collate_fn=collater, batch_sampler=sampler_val)
# Create the model
if parser.depth == 18:
retinanet = model.resnet18(num_classes=dataset_train.num_classes(), pretrained=True)
elif parser.depth == 34:
retinanet = model.resnet34(num_classes=dataset_train.num_classes(), pretrained=True)
elif parser.depth == 50:
retinanet = model.resnet50(num_classes=dataset_train.num_classes(), pretrained=True)
elif parser.depth == 101:
retinanet = model.resnet101(num_classes=dataset_train.num_classes(), pretrained=True)
elif parser.depth == 152:
retinanet = model.resnet152(num_classes=dataset_train.num_classes(), pretrained=True)
else:
raise ValueError('Unsupported model depth, must be one of 18, 34, 50, 101, 152')
use_gpu = True
if use_gpu:
retinanet = retinanet.cuda()
retinanet = torch.nn.DataParallel(retinanet).cuda()
retinanet.training = True
optimizer = optim.Adam(retinanet.parameters(), lr=1e-5)
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=3, verbose=True)
loss_hist = collections.deque(maxlen=500)
retinanet.train()
retinanet.module.freeze_bn()
print('Num training images: {}'.format(len(dataset_train)))
for epoch_num in range(parser.epochs):
retinanet.train()
retinanet.module.freeze_bn()
epoch_loss = []
for iter_num, data in enumerate(dataloader_train):
try:
optimizer.zero_grad()
classification_loss, regression_loss = retinanet([data['img'].cuda().float(), data['annot']])
classification_loss = classification_loss.mean()
regression_loss = regression_loss.mean()
loss = classification_loss + regression_loss
if bool(loss == 0):
continue
loss.backward()
torch.nn.utils.clip_grad_norm_(retinanet.parameters(), 0.1)
optimizer.step()
loss_hist.append(float(loss))
epoch_loss.append(float(loss))
print('Epoch: {} | Iteration: {} | Classification loss: {:1.5f} | Regression loss: {:1.5f} | Running loss: {:1.5f}'.format(epoch_num, iter_num, float(classification_loss), float(regression_loss), np.mean(loss_hist)))
del classification_loss
del regression_loss
except Exception as e:
print(e)
continue
if parser.dataset == 'coco':
print('Evaluating dataset')
coco_eval.evaluate_coco(dataset_val, retinanet)
elif parser.dataset == 'csv' and parser.csv_val is not None:
print('Evaluating dataset')
mAP = csv_eval.evaluate(dataset_val, retinanet)
scheduler.step(np.mean(epoch_loss))
torch.save(retinanet.module, '{}_retinanet_{}.pt'.format(parser.dataset, epoch_num))
retinanet.eval()
torch.save(retinanet, 'model_final.pt'.format(epoch_num))
if __name__ == '__main__':
main()
| 35.427778 | 232 | 0.674455 |
f3c0eea06fadf5f88784664c3998118ace7dd653 | 11,987 | py | Python | tencentcloud/tbm/v20180129/tbm_client.py | PlasticMem/tencentcloud-sdk-python | 666db85623d51d640a165907a19aef5fba53b38d | [
"Apache-2.0"
] | 465 | 2018-04-27T09:54:59.000Z | 2022-03-29T02:18:01.000Z | tencentcloud/tbm/v20180129/tbm_client.py | PlasticMem/tencentcloud-sdk-python | 666db85623d51d640a165907a19aef5fba53b38d | [
"Apache-2.0"
] | 91 | 2018-04-27T09:48:11.000Z | 2022-03-12T08:04:04.000Z | tencentcloud/tbm/v20180129/tbm_client.py | PlasticMem/tencentcloud-sdk-python | 666db85623d51d640a165907a19aef5fba53b38d | [
"Apache-2.0"
] | 232 | 2018-05-02T08:02:46.000Z | 2022-03-30T08:02:48.000Z | # -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.tbm.v20180129 import models
class TbmClient(AbstractClient):
_apiVersion = '2018-01-29'
_endpoint = 'tbm.tencentcloudapi.com'
_service = 'tbm'
def DescribeBrandCommentCount(self, request):
"""通过分析用户在评价品牌时用词的正负面情绪评分,返回品牌好评与差评评价条数,按天输出结果。
:param request: Request instance for DescribeBrandCommentCount.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandCommentCountRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandCommentCountResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandCommentCount", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandCommentCountResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandExposure(self, request):
"""监测品牌关键词命中文章标题或全文的文章篇数,按天输出数据。
:param request: Request instance for DescribeBrandExposure.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandExposureRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandExposureResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandExposure", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandExposureResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandMediaReport(self, request):
"""监测品牌关键词出现在媒体网站(新闻媒体、网络门户、政府网站、微信公众号、天天快报等)发布资讯标题和正文中的报道数。按天输出结果。
:param request: Request instance for DescribeBrandMediaReport.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandMediaReportRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandMediaReportResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandMediaReport", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandMediaReportResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandNegComments(self, request):
"""通过分析用户在评价品牌时用词的正负面情绪评分,返回品牌热门差评观点列表。
:param request: Request instance for DescribeBrandNegComments.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandNegCommentsRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandNegCommentsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandNegComments", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandNegCommentsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandPosComments(self, request):
"""通过分析用户在评价品牌时用词的正负面情绪评分,返回品牌热门好评观点列表。
:param request: Request instance for DescribeBrandPosComments.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandPosCommentsRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandPosCommentsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandPosComments", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandPosCommentsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandSocialOpinion(self, request):
"""检测品牌关键词出现在微博、QQ兴趣部落、论坛、博客等个人公开贡献资讯中的内容,每天聚合近30天热度最高的观点列表。
:param request: Request instance for DescribeBrandSocialOpinion.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandSocialOpinionRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandSocialOpinionResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandSocialOpinion", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandSocialOpinionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeBrandSocialReport(self, request):
"""监测品牌关键词出现在微博、QQ兴趣部落、论坛、博客等个人公开贡献资讯中的条数。按天输出数据结果。
:param request: Request instance for DescribeBrandSocialReport.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandSocialReportRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeBrandSocialReportResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeBrandSocialReport", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeBrandSocialReportResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeIndustryNews(self, request):
"""根据客户定制的行业关键词,监测关键词出现在媒体网站(新闻媒体、网络门户、政府网站、微信公众号、天天快报等)发布资讯标题和正文中的报道数,以及文章列表、来源渠道、作者、发布时间等。
:param request: Request instance for DescribeIndustryNews.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeIndustryNewsRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeIndustryNewsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeIndustryNews", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeIndustryNewsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeUserPortrait(self, request):
"""通过分析洞察参与过品牌媒体互动的用户,比如公开发表品牌的新闻评论、在公开社交渠道发表过对品牌的评价观点等用户,返回用户的画像属性分布,例如性别、年龄、地域、喜爱的明星、喜爱的影视。
:param request: Request instance for DescribeUserPortrait.
:type request: :class:`tencentcloud.tbm.v20180129.models.DescribeUserPortraitRequest`
:rtype: :class:`tencentcloud.tbm.v20180129.models.DescribeUserPortraitResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeUserPortrait", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeUserPortraitResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | 43.118705 | 101 | 0.62134 |
bf21047df5516b43c4bb231a6c9dfd482ad66329 | 16,070 | py | Python | pyinfra/api/connectors/ssh.py | blarghmatey/pyinfra | b8287618d66a4e00963c88a3ef191c94e8320f70 | [
"MIT"
] | null | null | null | pyinfra/api/connectors/ssh.py | blarghmatey/pyinfra | b8287618d66a4e00963c88a3ef191c94e8320f70 | [
"MIT"
] | null | null | null | pyinfra/api/connectors/ssh.py | blarghmatey/pyinfra | b8287618d66a4e00963c88a3ef191c94e8320f70 | [
"MIT"
] | null | null | null | from __future__ import print_function, unicode_literals
from distutils.spawn import find_executable
from getpass import getpass
from os import path
from socket import (
error as socket_error,
gaierror,
)
import click
import six
from paramiko import (
AuthenticationException,
DSSKey,
ECDSAKey,
Ed25519Key,
MissingHostKeyPolicy,
PasswordRequiredException,
RSAKey,
SFTPClient,
SSHException,
)
import pyinfra
from pyinfra import logger
from pyinfra.api.exceptions import ConnectError, PyinfraError
from pyinfra.api.util import get_file_io, memoize
from .sshuserclient import SSHClient
from .util import (
get_sudo_password,
make_unix_command,
read_buffers_into_queue,
run_local_process,
split_combined_output,
write_stdin,
)
EXECUTION_CONNECTOR = True
def make_names_data(hostname):
yield '@ssh/{0}'.format(hostname), {'ssh_hostname': hostname}, []
def _raise_connect_error(host, message, data):
message = '{0} ({1})'.format(message, data)
raise ConnectError(message)
def _load_private_key_file(filename, key_filename, key_password):
exception = PyinfraError('Invalid key: {0}'.format(filename))
for key_cls in (RSAKey, DSSKey, ECDSAKey, Ed25519Key):
try:
return key_cls.from_private_key_file(
filename=filename,
)
except PasswordRequiredException:
if not key_password:
# If password is not provided, but we're in CLI mode, ask for it. I'm not a
# huge fan of having CLI specific code in here, but it doesn't really fit
# anywhere else without duplicating lots of key related code into cli.py.
if pyinfra.is_cli:
key_password = getpass(
'Enter password for private key: {0}: '.format(
key_filename,
),
)
# API mode and no password? We can't continue!
else:
raise PyinfraError(
'Private key file ({0}) is encrypted, set ssh_key_password to '
'use this key'.format(key_filename),
)
# Now, try opening the key with the password
try:
return key_cls.from_private_key_file(
filename=filename,
password=key_password,
)
except SSHException:
raise PyinfraError(
'Incorrect password for private key: {0}'.format(
key_filename,
),
)
except SSHException as e: # key does not match key_cls type
exception = e
raise exception
def _get_private_key(state, key_filename, key_password):
if key_filename in state.private_keys:
return state.private_keys[key_filename]
ssh_key_filenames = [
# Global from executed directory
path.expanduser(key_filename),
]
# Relative to the deploy
if state.deploy_dir:
ssh_key_filenames.append(
path.join(state.deploy_dir, key_filename),
)
key_file_exists = False
for filename in ssh_key_filenames:
if not path.isfile(filename):
continue
key_file_exists = True
try:
key = _load_private_key_file(filename, key_filename, key_password)
break
except SSHException:
pass
# No break, so no key found
else:
if not key_file_exists:
raise PyinfraError('No such private key file: {0}'.format(key_filename))
# TODO: upgrade min paramiko version to 2.7 and remove this (pyinfra v2)
extra_info = ''
from pkg_resources import get_distribution, parse_version
if get_distribution('paramiko').parsed_version < parse_version('2.7'):
extra_info = (
'\n Paramiko versions under 2.7 do not support the latest OpenSSH key formats,'
' upgrading may fix this error.'
'\n For more information, see this issue: '
'https://github.com/Fizzadar/pyinfra/issues/548'
)
raise PyinfraError('Invalid private key file: {0}{1}'.format(key_filename, extra_info))
# Load any certificate, names from OpenSSH:
# https://github.com/openssh/openssh-portable/blob/049297de975b92adcc2db77e3fb7046c0e3c695d/ssh-keygen.c#L2453 # noqa: E501
for certificate_filename in (
'{0}-cert.pub'.format(key_filename),
'{0}.pub'.format(key_filename),
):
if path.isfile(certificate_filename):
key.load_certificate(certificate_filename)
state.private_keys[key_filename] = key
return key
def _make_paramiko_kwargs(state, host):
kwargs = {
'allow_agent': False,
'look_for_keys': False,
'hostname': host.data.ssh_hostname or host.name,
}
for key, value in (
('username', host.data.ssh_user),
('port', int(host.data.ssh_port or 0)),
('timeout', state.config.CONNECT_TIMEOUT),
):
if value:
kwargs[key] = value
# Password auth (boo!)
if host.data.ssh_password:
kwargs['password'] = host.data.ssh_password
# Key auth!
elif host.data.ssh_key:
kwargs['pkey'] = _get_private_key(
state,
key_filename=host.data.ssh_key,
key_password=host.data.ssh_key_password,
)
# No key or password, so let's have paramiko look for SSH agents and user keys
else:
kwargs['allow_agent'] = True
kwargs['look_for_keys'] = True
return kwargs
def connect(state, host):
'''
Connect to a single host. Returns the SSH client if succesful. Stateless by
design so can be run in parallel.
'''
kwargs = _make_paramiko_kwargs(state, host)
logger.debug('Connecting to: {0} ({1})'.format(host.name, kwargs))
hostname = kwargs.pop('hostname')
try:
# Create new client & connect to the host
client = SSHClient()
client.set_missing_host_key_policy(MissingHostKeyPolicy())
client.connect(hostname, **kwargs)
return client
except AuthenticationException:
auth_kwargs = {}
for key, value in kwargs.items():
if key in ('username', 'password'):
auth_kwargs[key] = value
continue
if key == 'pkey' and value:
auth_kwargs['key'] = host.data.ssh_key
auth_args = ', '.join(
'{0}={1}'.format(key, value)
for key, value in auth_kwargs.items()
)
_raise_connect_error(host, 'Authentication error', auth_args)
except SSHException as e:
_raise_connect_error(host, 'SSH error', e)
except gaierror:
_raise_connect_error(host, 'Could not resolve hostname', hostname)
except socket_error as e:
_raise_connect_error(host, 'Could not connect', e)
except EOFError as e:
_raise_connect_error(host, 'EOF error', e)
def run_shell_command(
state, host, command,
get_pty=False,
timeout=None,
stdin=None,
success_exit_codes=None,
print_output=False,
print_input=False,
return_combined_output=False,
use_sudo_password=False,
**command_kwargs
):
'''
Execute a command on the specified host.
Args:
state (``pyinfra.api.State`` obj): state object for this command
hostname (string): hostname of the target
command (string): actual command to execute
sudo (boolean): whether to wrap the command with sudo
sudo_user (string): user to sudo to
get_pty (boolean): whether to get a PTY before executing the command
env (dict): environment variables to set
timeout (int): timeout for this command to complete before erroring
Returns:
tuple: (exit_code, stdout, stderr)
stdout and stderr are both lists of strings from each buffer.
'''
if use_sudo_password:
command_kwargs['use_sudo_password'] = get_sudo_password(
state, host, use_sudo_password,
run_shell_command=run_shell_command,
put_file=put_file,
)
command = make_unix_command(command, **command_kwargs)
actual_command = command.get_raw_value()
logger.debug('Running command on {0}: (pty={1}) {2}'.format(
host.name, get_pty, command,
))
if print_input:
click.echo('{0}>>> {1}'.format(host.print_prefix, command), err=True)
# Run it! Get stdout, stderr & the underlying channel
stdin_buffer, stdout_buffer, stderr_buffer = host.connection.exec_command(
actual_command,
get_pty=get_pty,
)
if stdin:
write_stdin(stdin, stdin_buffer)
combined_output = read_buffers_into_queue(
stdout_buffer,
stderr_buffer,
timeout=timeout,
print_output=print_output,
print_prefix=host.print_prefix,
)
logger.debug('Waiting for exit status...')
exit_status = stdout_buffer.channel.recv_exit_status()
logger.debug('Command exit status: {0}'.format(exit_status))
if success_exit_codes:
status = exit_status in success_exit_codes
else:
status = exit_status == 0
if return_combined_output:
return status, combined_output
stdout, stderr = split_combined_output(combined_output)
return status, stdout, stderr
@memoize
def _get_sftp_connection(host):
transport = host.connection.get_transport()
try:
return SFTPClient.from_transport(transport)
except SSHException as e:
six.raise_from(ConnectError((
'Unable to establish SFTP connection. Check that the SFTP subsystem '
'for the SSH service at {0} is enabled.'
).format(host)), e)
def _get_file(host, remote_filename, filename_or_io):
with get_file_io(filename_or_io, 'wb') as file_io:
sftp = _get_sftp_connection(host)
sftp.getfo(remote_filename, file_io)
def get_file(
state, host, remote_filename, filename_or_io,
sudo=False, sudo_user=None, su_user=None,
print_output=False, print_input=False,
**command_kwargs
):
'''
Download a file from the remote host using SFTP. Supports download files
with sudo by copying to a temporary directory with read permissions,
downloading and then removing the copy.
'''
if sudo or su_user:
# Get temp file location
temp_file = state.get_temp_filename(remote_filename)
# Copy the file to the tempfile location and add read permissions
command = 'cp {0} {1} && chmod +r {0}'.format(remote_filename, temp_file)
copy_status, _, stderr = run_shell_command(
state, host, command,
sudo=sudo, sudo_user=sudo_user, su_user=su_user,
print_output=print_output,
print_input=print_input,
**command_kwargs
)
if copy_status is False:
logger.error('File download copy temp error: {0}'.format('\n'.join(stderr)))
return False
try:
_get_file(host, temp_file, filename_or_io)
# Ensure that, even if we encounter an error, we (attempt to) remove the
# temporary copy of the file.
finally:
remove_status, _, stderr = run_shell_command(
state, host, 'rm -f {0}'.format(temp_file),
sudo=sudo, sudo_user=sudo_user, su_user=su_user,
print_output=print_output,
print_input=print_input,
**command_kwargs
)
if remove_status is False:
logger.error('File download remove temp error: {0}'.format('\n'.join(stderr)))
return False
else:
_get_file(host, remote_filename, filename_or_io)
if print_output:
click.echo(
'{0}file downloaded: {1}'.format(host.print_prefix, remote_filename),
err=True,
)
return True
def _put_file(host, filename_or_io, remote_location):
with get_file_io(filename_or_io) as file_io:
sftp = _get_sftp_connection(host)
sftp.putfo(file_io, remote_location)
def put_file(
state, host, filename_or_io, remote_filename,
sudo=False, sudo_user=None, su_user=None,
print_output=False, print_input=False,
**command_kwargs
):
'''
Upload file-ios to the specified host using SFTP. Supports uploading files
with sudo by uploading to a temporary directory then moving & chowning.
'''
# sudo/su are a little more complicated, as you can only sftp with the SSH
# user connected, so upload to tmp and copy/chown w/sudo and/or su_user
if sudo or su_user:
# Get temp file location
temp_file = state.get_temp_filename(remote_filename)
_put_file(host, filename_or_io, temp_file)
# Execute run_shell_command w/sudo and/or su_user
command = 'mv {0} {1}'.format(temp_file, remote_filename)
# Move it to the su_user if present
if su_user:
command = '{0} && chown {1} {2}'.format(command, su_user, remote_filename)
# Otherwise any sudo_user
elif sudo_user:
command = '{0} && chown {1} {2}'.format(command, sudo_user, remote_filename)
status, _, stderr = run_shell_command(
state, host, command,
sudo=sudo, sudo_user=sudo_user, su_user=su_user,
print_output=print_output,
print_input=print_input,
**command_kwargs
)
if status is False:
logger.error('File upload error: {0}'.format('\n'.join(stderr)))
return False
# No sudo and no su_user, so just upload it!
else:
_put_file(host, filename_or_io, remote_filename)
if print_output:
click.echo(
'{0}file uploaded: {1}'.format(host.print_prefix, remote_filename),
err=True,
)
return True
def check_can_rsync(host):
if host.data.ssh_key_password:
raise NotImplementedError('Rsync does not currently work with SSH keys needing passwords.')
if host.data.ssh_password:
raise NotImplementedError('Rsync does not currently work with SSH passwords.')
if not find_executable('rsync'):
raise NotImplementedError('The `rsync` binary is not available on this system.')
def rsync(
state, host, src, dest, flags,
print_output=False, print_input=False,
sudo=False,
sudo_user=None,
**ignored_kwargs
):
hostname = host.data.ssh_hostname or host.name
user = ''
if host.data.ssh_user:
user = '{0}@'.format(host.data.ssh_user)
ssh_flags = []
port = host.data.ssh_port
if port:
ssh_flags.append('-p {0}'.format(port))
ssh_key = host.data.ssh_key
if ssh_key:
ssh_flags.append('-i {0}'.format(ssh_key))
remote_rsync_command = 'rsync'
if sudo:
remote_rsync_command = 'sudo rsync'
if sudo_user:
remote_rsync_command = 'sudo -u {0} rsync'.format(sudo_user)
rsync_command = (
'rsync {rsync_flags} '
"--rsh 'ssh -o BatchMode=yes -o StrictHostKeyChecking=no {ssh_flags}' "
"--rsync-path '{remote_rsync_command}' "
'{src} {user}{hostname}:{dest}'
).format(
rsync_flags=' '.join(flags),
ssh_flags=' '.join(ssh_flags),
remote_rsync_command=remote_rsync_command,
user=user, hostname=hostname,
src=src, dest=dest,
)
if print_input:
click.echo('{0}>>> {1}'.format(host.print_prefix, rsync_command), err=True)
return_code, combined_output = run_local_process(
rsync_command,
print_output=print_output,
print_prefix=host.print_prefix,
)
status = return_code == 0
if not status:
_, stderr = split_combined_output(combined_output)
raise IOError('\n'.join(stderr))
return True
| 30.263653 | 128 | 0.628002 |
bc3af7951a21714669f0665803e37c96ec531a8b | 3,703 | py | Python | squeezeDet/src/nets/squeezeDet.py | Walter1218/Self_Driving_Car_ND | 526a9583a2bc616cb19cdfc7921b5e1c0f9711bd | [
"MIT"
] | 2 | 2017-05-25T01:26:41.000Z | 2019-08-16T13:38:57.000Z | squeezeDet/src/nets/squeezeDet.py | Walter1218/Self_Driving_Car_ND | 526a9583a2bc616cb19cdfc7921b5e1c0f9711bd | [
"MIT"
] | null | null | null | squeezeDet/src/nets/squeezeDet.py | Walter1218/Self_Driving_Car_ND | 526a9583a2bc616cb19cdfc7921b5e1c0f9711bd | [
"MIT"
] | 1 | 2019-03-15T02:19:49.000Z | 2019-03-15T02:19:49.000Z | # Author: Bichen Wu (bichen@berkeley.edu) 08/25/2016
"""SqueezeDet model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import joblib
from utils import util
from easydict import EasyDict as edict
import numpy as np
import tensorflow as tf
from nn_skeleton import ModelSkeleton
class SqueezeDet(ModelSkeleton):
def __init__(self, mc, gpu_id):
with tf.device('/gpu:{}'.format(gpu_id)):
ModelSkeleton.__init__(self, mc)
self._add_forward_graph()
self._add_interpretation_graph()
self._add_loss_graph()
self._add_train_graph()
self._add_viz_graph()
def _add_forward_graph(self):
"""NN architecture."""
mc = self.mc
if mc.LOAD_PRETRAINED_MODEL:
assert tf.gfile.Exists(mc.PRETRAINED_MODEL_PATH), \
'Cannot find pretrained model at the given path:' \
' {}'.format(mc.PRETRAINED_MODEL_PATH)
self.caffemodel_weight = joblib.load(mc.PRETRAINED_MODEL_PATH)
conv1 = self._conv_layer(
'conv1', self.image_input, filters=64, size=3, stride=2,
padding='VALID', freeze=True)
pool1 = self._pooling_layer(
'pool1', conv1, size=3, stride=2, padding='VALID')
fire2 = self._fire_layer(
'fire2', pool1, s1x1=16, e1x1=64, e3x3=64, freeze=False)
fire3 = self._fire_layer(
'fire3', fire2, s1x1=16, e1x1=64, e3x3=64, freeze=False)
pool3 = self._pooling_layer(
'pool3', fire3, size=3, stride=2, padding='VALID')
fire4 = self._fire_layer(
'fire4', pool3, s1x1=32, e1x1=128, e3x3=128, freeze=False)
fire5 = self._fire_layer(
'fire5', fire4, s1x1=32, e1x1=128, e3x3=128, freeze=False)
pool5 = self._pooling_layer(
'pool5', fire5, size=3, stride=2, padding='VALID')
fire6 = self._fire_layer(
'fire6', pool5, s1x1=48, e1x1=192, e3x3=192, freeze=False)
fire7 = self._fire_layer(
'fire7', fire6, s1x1=48, e1x1=192, e3x3=192, freeze=False)
fire8 = self._fire_layer(
'fire8', fire7, s1x1=64, e1x1=256, e3x3=256, freeze=False)
fire9 = self._fire_layer(
'fire9', fire8, s1x1=64, e1x1=256, e3x3=256, freeze=False)
# Two extra fire modules that are not trained before
fire10 = self._fire_layer(
'fire10', fire9, s1x1=96, e1x1=384, e3x3=384, freeze=False)
fire11 = self._fire_layer(
'fire11', fire10, s1x1=96, e1x1=384, e3x3=384, freeze=False)
dropout11 = tf.nn.dropout(fire11, self.keep_prob, name='drop11')
num_output = mc.ANCHOR_PER_GRID * (mc.CLASSES + 1 + 4)
self.preds = self._conv_layer(
'conv12', dropout11, filters=num_output, size=3, stride=1,
padding='SAME', xavier=False, relu=False, stddev=0.0001)
def _fire_layer(self, layer_name, inputs, s1x1, e1x1, e3x3, freeze=False):
"""Fire layer constructor.
Args:
layer_name: layer name
inputs: input tensor
s1x1: number of 1x1 filters in squeeze layer.
e1x1: number of 1x1 filters in expand layer.
e3x3: number of 3x3 filters in expand layer.
freeze: if true, do not train parameters in this layer.
Returns:
fire layer operation.
"""
sq1x1 = self._conv_layer(
layer_name+'/squeeze1x1', inputs, filters=s1x1, size=1, stride=1,
padding='SAME', freeze=freeze)
ex1x1 = self._conv_layer(
layer_name+'/expand1x1', sq1x1, filters=e1x1, size=1, stride=1,
padding='SAME', freeze=freeze)
ex3x3 = self._conv_layer(
layer_name+'/expand3x3', sq1x1, filters=e3x3, size=3, stride=1,
padding='SAME', freeze=freeze)
return tf.concat([ex1x1, ex3x3], 3, name=layer_name+'/concat')
| 34.933962 | 76 | 0.664866 |
5184a574c6b2ae85ee48d3894f091535a9f94b9b | 3,141 | py | Python | exp-visual7w/exp_test_visual7w_baseline.py | ronghanghu/cmn | 85644ad56f8f62d04a5e8636ad3efe9ef7b34705 | [
"MIT"
] | 72 | 2017-04-12T17:07:36.000Z | 2021-06-18T08:20:47.000Z | exp-visual7w/exp_test_visual7w_baseline.py | ronghanghu/cmn | 85644ad56f8f62d04a5e8636ad3efe9ef7b34705 | [
"MIT"
] | 8 | 2017-07-06T04:24:04.000Z | 2020-09-17T10:29:44.000Z | exp-visual7w/exp_test_visual7w_baseline.py | ronghanghu/cmn | 85644ad56f8f62d04a5e8636ad3efe9ef7b34705 | [
"MIT"
] | 21 | 2017-04-19T07:38:09.000Z | 2021-02-28T13:39:22.000Z | from __future__ import absolute_import, division, print_function
import sys
import os; os.environ['CUDA_VISIBLE_DEVICES'] = '0' # using GPU 0
import tensorflow as tf
import numpy as np
import skimage.io
import skimage.transform
from models import visual7w_baseline_model, spatial_feat, fastrcnn_vgg_net
from util.visual7w_baseline_train.visual7w_baseline_data_reader import DataReader
from util import loss, eval_tools, text_processing
################################################################################
# Parameters
################################################################################
# Model Params
T = 20
num_vocab = 72704
embed_dim = 300
lstm_dim = 1000
# Data Params
imdb_file = './exp-visual7w/data/imdb/imdb_tst.npy'
vocab_file = './word_embedding/vocabulary_72700.txt'
im_mean = visual7w_baseline_model.fastrcnn_vgg_net.channel_mean
# Snapshot Params
snapshot_file = './downloaded_models/visual7w_baseline_iter_200000.tfmodel'
################################################################################
# Network
################################################################################
im_batch = tf.placeholder(tf.float32, [1, None, None, 3])
bbox_batch = tf.placeholder(tf.float32, [None, 5])
spatial_batch = tf.placeholder(tf.float32, [None, 5])
text_seq_batch = tf.placeholder(tf.int32, [T, None])
scores = visual7w_baseline_model.visual7w_baseline_net(im_batch, bbox_batch, spatial_batch,
text_seq_batch, num_vocab, embed_dim, lstm_dim, vgg_dropout=False, lstm_dropout=False)
num_choices = 4
scores = tf.reshape(scores, [-1, num_choices])
################################################################################
# Initialize parameters and load data
################################################################################
# Load data
reader = DataReader(imdb_file, vocab_file, im_mean, shuffle=False)
# Start Session
sess = tf.Session(config=tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True)))
# Snapshot saver
snapshot_saver = tf.train.Saver()
snapshot_saver.restore(sess, snapshot_file)
################################################################################
# Optimization loop
################################################################################
num_correct = 0
num_total = 0
# Run optimization
for n_iter in range(reader.num_batch):
batch = reader.read_batch()
# Forward and Backward pass
scores_val = sess.run(scores,
feed_dict={
im_batch : batch['im_batch'],
bbox_batch : batch['bbox_batch'],
spatial_batch : batch['spatial_batch'],
text_seq_batch : batch['text_seq_batch']
})
predicts = np.argmax(scores_val, axis=1)
labels = batch['label_batch']
num_correct += np.sum(predicts == labels)
num_total += len(labels)
print('\titer = %d, accuracy (avg) = %f' % (n_iter, num_correct / num_total))
print('On the following imdb:', imdb_file)
print('Using the following snapshot:', snapshot_file)
print('final accuracy: %f (= %d / %d)' % (num_correct / num_total, num_correct, num_total))
| 34.516484 | 91 | 0.583572 |
0d11fe319391aef19291f1aa0d53255fe904ca9a | 32,706 | py | Python | affiliate/model/mysql_report.py | gods-view/AdclickIO | ccb73867e568aac5f40bd5890149626ce0be1897 | [
"BSD-2-Clause"
] | null | null | null | affiliate/model/mysql_report.py | gods-view/AdclickIO | ccb73867e568aac5f40bd5890149626ce0be1897 | [
"BSD-2-Clause"
] | null | null | null | affiliate/model/mysql_report.py | gods-view/AdclickIO | ccb73867e568aac5f40bd5890149626ce0be1897 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
from peewee import *
from affiliate.model.config import mysql_report
import time
reportDb = MySQLDatabase(mysql_report['name'],
host=mysql_report['host'],
port=int(mysql_report['port']),
user=mysql_report['user'],
passwd=mysql_report['passwd']
)
class BaseModel(Model):
"""A base model that will use our MySQL database"""
class Meta:
database = reportDb
class AdStatisLog0(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_0"
index = (('KeysMD5', True))
class AdStatisLog1(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_1"
index = (('KeysMD5', True))
class AdStatisLog2(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_2"
index = (('KeysMD5', True))
class AdStatisLog3(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_3"
index = (('KeysMD5', True))
class AdStatisLog4(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_4"
index = (('KeysMD5', True))
class AdStatisLog5(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_5"
index = (('KeysMD5', True))
class AdStatisLog6(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_6"
index = (('KeysMD5', True))
class AdStatisLog7(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_7"
index = (('KeysMD5', True))
class AdStatisLog8(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_8"
index = (('KeysMD5', True))
class AdStatisLog9(BaseModel):
UserID = CharField(null=False, default=0)
CampaignID = CharField(null=False, default=0)
CampaignName = CharField(max_length=256, null=True, default='')
FlowID = CharField(null=True, default=0)
FlowName = CharField(max_length=256, null=True, default='')
LanderID = CharField(null=True, default=0)
LanderName = CharField(max_length=256, null=True, default='')
OfferID = CharField(null=True, default=0)
OfferName = CharField(max_length=256, null=True, default='')
OfferUrl = CharField(max_length=256, null=True, default='')
OfferCountry = CharField(max_length=256, null=True, default='')
AffiliateNetworkID = CharField(null=True, default=0)
AffilliateNetworkName = CharField(max_length=256, null=True, default='')
TrafficSourceID = CharField(null=True, default=0)
TrafficSourceName = CharField(max_length=256, null=True, default='')
Language = CharField(max_length=256, null=True, default='')
Model = CharField(max_length=256, null=True, default='')
Country = CharField(max_length=256, null=True, default='')
City = CharField(max_length=256, null=True, default='')
Region = CharField(max_length=256, null=True, default='')
ISP = CharField(max_length=256, null=True, default='')
MobileCarrier = CharField(max_length=256, null=True, default='')
Domain = CharField(max_length=256, null=True, default='')
DeviceType = CharField(max_length=256, null=True, default='')
Brand = CharField(max_length=256, null=True, default='')
OS = CharField(max_length=256, null=True, default='')
OSVersion = CharField(max_length=256, null=True, default='')
Browser = CharField(max_length=256, null=True, default='')
BrowserVersion = CharField(max_length=256, null=True, default='')
ConnectionType = CharField(max_length=256, null=True, default='')
Timestamp = CharField(null=True, default=0)
Visits = CharField(null=True, default=0)
Clicks = CharField(null=True, default=0)
Conversions = CharField(null=True, default=0)
Cost = CharField(null=True, default=0)
Revenue = CharField(null=True, default=0)
Impressions = CharField(null=True, default=0)
KeysMD5 = CharField(max_length=256, null=True, default='')
Ip = CharField(max_length=256, null=True, default='')
V1 = CharField(max_length=256, null=True, default='')
V2 = CharField(max_length=256, null=True, default='')
V3 = CharField(max_length=256, null=True, default='')
V4 = CharField(max_length=256, null=True, default='')
V5 = CharField(max_length=256, null=True, default='')
V6 = CharField(max_length=256, null=True, default='')
V7 = CharField(max_length=256, null=True, default='')
V8 = CharField(max_length=256, null=True, default='')
V9 = CharField(max_length=256, null=True, default='')
V10 = CharField(max_length=256, null=True, default='')
tsCampaignId = CharField(max_length=256, null=True, default='')
tsWebsiteId = CharField(max_length=256, null=True, default='')
ClickId = CharField(max_length=256, null=True, default='')
class Meta:
db_table = "adstatis_new_9"
index = (('KeysMD5', True))
class TotalInfo(BaseModel):
id = IntegerField(null=False)
userid = IntegerField(null=False)
class Meta:
db_table = "total_info"
reportDb.connect()
# a = Country.update(name='ccc').where(Country.id == 1).execute()
# pass
| 52.3296 | 76 | 0.694429 |
a61c248a1b8971396e8dd37200ef83143f941736 | 972 | py | Python | statsmodels/tests/test_package.py | o-P-o/statsmodels | ee9f5c0bd7ee7f646bdbaf31fbc295e5a0ab02f8 | [
"BSD-3-Clause"
] | 1 | 2020-06-18T07:38:11.000Z | 2020-06-18T07:38:11.000Z | statsmodels/tests/test_package.py | o-P-o/statsmodels | ee9f5c0bd7ee7f646bdbaf31fbc295e5a0ab02f8 | [
"BSD-3-Clause"
] | null | null | null | statsmodels/tests/test_package.py | o-P-o/statsmodels | ee9f5c0bd7ee7f646bdbaf31fbc295e5a0ab02f8 | [
"BSD-3-Clause"
] | null | null | null | import subprocess
import sys
import pytest
from statsmodels.compat.scipy import SCIPY_11
def test_lazy_imports():
# Check that when statsmodels.api is imported, matplotlib is _not_ imported
cmd = ("import statsmodels.api as sm; "
"import sys; "
"mods = [x for x in sys.modules if 'matplotlib.pyplot' in x]; "
"assert not mods, mods")
cmd = sys.executable + ' -c "' + cmd + '"'
p = subprocess.Popen(cmd, shell=True, close_fds=True)
p.wait()
rc = p.returncode
assert rc == 0
@pytest.mark.skipif(SCIPY_11, reason='SciPy raises on -OO')
def test_docstring_optimization_compat():
# GH#5235 check that importing with stripped docstrings does not raise
cmd = sys.executable + ' -OO -c "import statsmodels.api as sm"'
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out = p.communicate()
rc = p.returncode
assert rc == 0, out
| 31.354839 | 79 | 0.651235 |
13ac80afd76b9a697663b3f26daab02359439e9e | 4,865 | py | Python | tests_unit/models_tests/test_functional_stochastic.py | p1ndsvin/uncertainty-wizard | 92a7a9bcb411e512cf6ad54e7ba226a3c66d3583 | [
"MIT"
] | 33 | 2020-12-21T20:21:21.000Z | 2022-03-25T17:51:36.000Z | tests_unit/models_tests/test_functional_stochastic.py | swb19/uncertainty-wizard | 5ba9bfc6ee967eb5f226abbedb6f9d5452b3cfea | [
"MIT"
] | 83 | 2020-12-18T18:18:28.000Z | 2022-03-28T21:17:29.000Z | tests_unit/models_tests/test_functional_stochastic.py | swb19/uncertainty-wizard | 5ba9bfc6ee967eb5f226abbedb6f9d5452b3cfea | [
"MIT"
] | 5 | 2021-02-13T13:27:48.000Z | 2021-12-25T16:45:19.000Z | from unittest import TestCase
import numpy as np
import tensorflow as tf
import uncertainty_wizard as uwiz
from uncertainty_wizard.internal_utils import UncertaintyWizardWarning
from uncertainty_wizard.models import StochasticFunctional
from uncertainty_wizard.models.stochastic_utils.layers import UwizBernoulliDropout
from uncertainty_wizard.quantifiers import StandardDeviation
class FunctionalStochasticTest(TestCase):
@staticmethod
def _dummy_model():
stochastic_mode = uwiz.models.StochasticMode()
x = tf.keras.layers.Input(shape=1000)
output = UwizBernoulliDropout(rate=0.5, stochastic_mode=stochastic_mode)(x)
return StochasticFunctional(
inputs=x, outputs=output, stochastic_mode=stochastic_mode
)
def test_predict_is_deterministic(self):
model = self._dummy_model()
y = model.predict(x=np.ones((10, 1000)))
self.assertTrue(np.all(y == 1))
def test_sampled_predict_is_not_deterministic(self):
model = self._dummy_model()
self._assert_random_samples(model)
def test_sampled_turning_sampling_on_and_off_iteratively(self):
model = self._dummy_model()
self._test_randomized_on_off(model)
def _test_randomized_on_off(self, model):
for _ in range(2):
self._assert_random_samples(model)
y = model.predict(x=np.ones((10, 1000)))
self.assertTrue(np.all(y == 1))
def _assert_random_samples(self, model):
y, std = model.predict_quantified(
x=np.ones((10, 1000)), quantifier=StandardDeviation(), sample_size=20
)
self.assertFalse(np.all(y == 1), y)
self.assertFalse(np.all(std == 0), std)
def test_warns_on_compile_if_not_stochastic(self):
stochastic_mode = uwiz.models.StochasticMode()
x = tf.keras.layers.Input(shape=1000)
output = tf.keras.layers.Dropout(rate=0.5)(x)
model = StochasticFunctional(
inputs=x, outputs=output, stochastic_mode=stochastic_mode
)
with self.assertWarns(UncertaintyWizardWarning):
model.compile(loss="mse")
def test_save_and_load_model(self):
stochastic = self._dummy_model()
# Model can currently (as of tf 2.1) only be saved if build, fit or predict was called
stochastic.predict(np.ones((10, 1000)))
stochastic.save("/tmp/test_save_and_load_model_stochastic")
del stochastic
stochastic_loaded = uwiz.models.load_model(
"/tmp/test_save_and_load_model_stochastic"
)
self._test_randomized_on_off(stochastic_loaded)
def test_weights_and_stochastic_mode_on_clone_from_keras(self):
# Prepare a model with dropout to be used to create a StochasticModel
inputs = tf.keras.layers.Input(1000)
x = tf.keras.layers.Dense(
1000, kernel_initializer="random_normal", bias_initializer="zeros"
)(inputs)
x = tf.keras.layers.Dropout(0.5)(x)
x = tf.keras.layers.Dense(
10, kernel_initializer="random_normal", bias_initializer="zeros"
)(x)
x = tf.keras.layers.Dense(10, activation=tf.keras.activations.relu)(x)
keras_model = tf.keras.Model(inputs=inputs, outputs=x)
keras_model.compile(loss="mse", optimizer="adam", metrics=["mse"])
keras_model.fit(
np.ones((20, 1000), dtype=float), np.zeros((20, 10)), batch_size=1, epochs=1
)
# Call the model under test
uwiz_model = uwiz.models.stochastic_from_keras(keras_model)
# Demo input for tests
inp_data = np.ones((10, 1000), dtype=float)
# Assert that both models make the same predictions
keras_prediction = keras_model.predict(inp_data)
uwiz_prediction = uwiz_model.predict(inp_data)
np.testing.assert_array_equal(keras_prediction, uwiz_prediction)
# Test that stochastic mode is working on cloned model
self._assert_random_samples(uwiz_model)
def test_randomness_error_on_clone_from_keras(self):
inputs = tf.keras.layers.Input(10)
x = tf.keras.layers.Dense(
10, kernel_initializer="random_normal", bias_initializer="zeros"
)(inputs)
x = tf.keras.layers.Dense(10, activation=tf.keras.activations.relu)(x)
keras_model = tf.keras.Model(inputs=inputs, outputs=x)
keras_model.compile(loss="mse", optimizer="adam", metrics=["mse"])
keras_model.fit(
np.ones((20, 10), dtype=float), np.zeros((20, 10)), batch_size=1, epochs=1
)
# make sure no validation error is thrown when determinism is expected
_ = uwiz.models.stochastic_from_keras(keras_model, expect_determinism=True)
self.assertRaises(
ValueError, lambda: uwiz.models.stochastic_from_keras(keras_model)
)
| 40.541667 | 94 | 0.679753 |
9e6a4aab5a9025820b1cdcfbb0bb98026d2297e1 | 16,766 | py | Python | sympy/concrete/summations.py | Timeroot/sympy | f95bf4bbc548d326f4643d22faec32aca7880187 | [
"BSD-3-Clause"
] | null | null | null | sympy/concrete/summations.py | Timeroot/sympy | f95bf4bbc548d326f4643d22faec32aca7880187 | [
"BSD-3-Clause"
] | null | null | null | sympy/concrete/summations.py | Timeroot/sympy | f95bf4bbc548d326f4643d22faec32aca7880187 | [
"BSD-3-Clause"
] | null | null | null | from sympy.core import (Expr, S, C, sympify, Wild, Dummy, Derivative, Symbol, Add)
from sympy.functions.elementary.piecewise import piecewise_fold
from sympy.concrete.gosper import gosper_sum
from sympy.polys import apart, PolynomialError
from sympy.solvers import solve
def _free_symbols(function, limits):
"""Helper function to return the symbols that appear in a sum-like object
once it is evaluated.
"""
isyms = function.free_symbols
for xab in limits:
# take out the target symbol
if xab[0] in isyms:
isyms.remove(xab[0])
# add in the new symbols
for i in xab[1:]:
isyms.update(i.free_symbols)
return isyms
class Sum(Expr):
"""Represents unevaluated summation."""
def __new__(cls, function, *symbols, **assumptions):
from sympy.integrals.integrals import _process_limits
# Any embedded piecewise functions need to be brought out to the
# top level so that integration can go into piecewise mode at the
# earliest possible moment.
function = piecewise_fold(sympify(function))
if function is S.NaN:
return S.NaN
if not symbols:
raise ValueError("Summation variables must be given")
limits, sign = _process_limits(*symbols)
# Only limits with lower and upper bounds are supported; the indefinite Sum
# is not supported
if any(len(l) != 3 or None in l for l in limits):
raise ValueError('Sum requires values for lower and upper bounds.')
obj = Expr.__new__(cls, **assumptions)
arglist = [sign*function]
arglist.extend(limits)
obj._args = tuple(arglist)
return obj
@property
def function(self):
return self._args[0]
@property
def limits(self):
return self._args[1:]
@property
def variables(self):
"""Return a list of the summation variables
>>> from sympy import Sum
>>> from sympy.abc import x, i
>>> Sum(x**i, (i, 1, 3)).variables
[i]
"""
return [l[0] for l in self.limits]
@property
def free_symbols(self):
"""
This method returns the symbols that will exist when the
summation is evaluated. This is useful if one is trying to
determine whether a sum depends on a certain symbol or not.
>>> from sympy import Sum
>>> from sympy.abc import x, y
>>> Sum(x, (x, y, 1)).free_symbols
set([y])
"""
if self.function.is_zero:
return set()
return _free_symbols(self.function, self.limits)
@property
def is_zero(self):
"""A Sum is only zero if its function is zero or if all terms
cancel out. This only answers whether the summand zero."""
return self.function.is_zero
@property
def is_number(self):
"""
Return True if the Sum will result in a number, else False.
sympy considers anything that will result in a number to have
is_number == True.
>>> from sympy import log
>>> log(2).is_number
True
Sums are a special case since they contain symbols that can
be replaced with numbers. Whether the integral can be done or not is
another issue. But answering whether the final result is a number is
not difficult.
>>> from sympy import Sum
>>> from sympy.abc import x, y
>>> Sum(x, (y, 1, x)).is_number
False
>>> Sum(1, (y, 1, x)).is_number
False
>>> Sum(0, (y, 1, x)).is_number
True
>>> Sum(x, (y, 1, 2)).is_number
False
>>> Sum(x, (y, 1, 1)).is_number
False
>>> Sum(x, (x, 1, 2)).is_number
True
>>> Sum(x*y, (x, 1, 2), (y, 1, 3)).is_number
True
"""
return self.function.is_zero or not self.free_symbols
def doit(self, **hints):
#if not hints.get('sums', True):
# return self
f = self.function
for limit in self.limits:
i, a, b = limit
dif = b - a
if dif.is_Integer and dif < 0:
a, b = b, a
f = eval_sum(f, (i, a, b))
if f is None:
return self
if hints.get('deep', True):
return f.doit(**hints)
else:
return f
def _eval_summation(self, f, x):
return
def _eval_derivative(self, x):
"""
Differentiate wrt x as long as x is not in the free symbols of any of
the upper or lower limits.
Sum(a*b*x, (x, 1, a)) can be differentiated wrt x or b but not `a`
since the value of the sum is discontinuous in `a`. In a case
involving a limit variable, the unevaluated derivative is returned.
"""
# diff already confirmed that x is in the free symbols of self, but we
# don't want to differentiate wrt any free symbol in the upper or lower
# limits
# XXX remove this test for free_symbols when the default _eval_derivative is in
if x not in self.free_symbols:
return S.Zero
# get limits and the function
f, limits = self.function, list(self.limits)
limit = limits.pop(-1)
if limits: # f is the argument to a Sum
f = Sum(f, *limits)
if len(limit) == 3:
_, a, b = limit
if x in a.free_symbols or x in b.free_symbols:
return None
df = Derivative(f, x, **{'evaluate': True})
rv = Sum(df, limit)
if limit[0] not in df.free_symbols:
rv = rv.doit()
return rv
else:
return NotImplementedError('Lower and upper bound expected.')
def euler_maclaurin(self, m=0, n=0, eps=0, eval_integral=True):
"""
Return an Euler-Maclaurin approximation of self, where m is the
number of leading terms to sum directly and n is the number of
terms in the tail.
With m = n = 0, this is simply the corresponding integral
plus a first-order endpoint correction.
Returns (s, e) where s is the Euler-Maclaurin approximation
and e is the estimated error (taken to be the magnitude of
the first omitted term in the tail):
>>> from sympy.abc import k, a, b
>>> from sympy import Sum
>>> Sum(1/k, (k, 2, 5)).doit().evalf()
1.28333333333333
>>> s, e = Sum(1/k, (k, 2, 5)).euler_maclaurin()
>>> s
-log(2) + 7/20 + log(5)
>>> from sympy import sstr
>>> print sstr((s.evalf(), e.evalf()), full_prec=True)
(1.26629073187416, 0.0175000000000000)
The endpoints may be symbolic:
>>> s, e = Sum(1/k, (k, a, b)).euler_maclaurin()
>>> s
-log(a) + log(b) + 1/(2*b) + 1/(2*a)
>>> e
Abs(-1/(12*b**2) + 1/(12*a**2))
If the function is a polynomial of degree at most 2n+1, the
Euler-Maclaurin formula becomes exact (and e = 0 is returned):
>>> Sum(k, (k, 2, b)).euler_maclaurin()
(b**2/2 + b/2 - 1, 0)
>>> Sum(k, (k, 2, b)).doit()
b**2/2 + b/2 - 1
With a nonzero eps specified, the summation is ended
as soon as the remainder term is less than the epsilon.
"""
m = int(m)
n = int(n)
f = self.function
assert len(self.limits) == 1
i, a, b = self.limits[0]
s = S.Zero
if m:
for k in range(m):
term = f.subs(i, a+k)
if (eps and term and abs(term.evalf(3)) < eps):
return s, abs(term)
s += term
a += m
x = Dummy('x')
I = C.Integral(f.subs(i, x), (x, a, b))
if eval_integral:
I = I.doit()
s += I
def fpoint(expr):
if b is S.Infinity:
return expr.subs(i, a), 0
return expr.subs(i, a), expr.subs(i, b)
fa, fb = fpoint(f)
iterm = (fa + fb)/2
g = f.diff(i)
for k in xrange(1, n+2):
ga, gb = fpoint(g)
term = C.bernoulli(2*k)/C.factorial(2*k)*(gb-ga)
if (eps and term and abs(term.evalf(3)) < eps) or (k > n):
break
s += term
g = g.diff(i, 2)
return s + iterm, abs(term)
def _eval_subs(self, old, new):
if self == old:
return new
newlimits = []
for lim in self.limits:
if lim[0] == old:
return self
newlimits.append( (lim[0],lim[1].subs(old,new),lim[2].subs(old,new)) )
return Sum(self.args[0].subs(old, new), *newlimits)
def summation(f, *symbols, **kwargs):
"""
Compute the summation of f with respect to symbols.
The notation for symbols is similar to the notation used in Integral.
summation(f, (i, a, b)) computes the sum of f with respect to i from a to b,
i.e.,
b
____
\ `
summation(f, (i, a, b)) = ) f
/___,
i = a
If it cannot compute the sum, it returns an unevaluated Sum object.
Repeated sums can be computed by introducing additional symbols tuples::
>>> from sympy import summation, oo, symbols, log
>>> i, n, m = symbols('i n m', integer=True)
>>> summation(2*i - 1, (i, 1, n))
n**2
>>> summation(1/2**i, (i, 0, oo))
2
>>> summation(1/log(n)**n, (n, 2, oo))
Sum(log(n)**(-n), (n, 2, oo))
>>> summation(i, (i, 0, n), (n, 0, m))
m**3/6 + m**2/2 + m/3
>>> from sympy.abc import x
>>> from sympy import factorial
>>> summation(x**n/factorial(n), (n, 0, oo))
exp(x)
"""
return Sum(f, *symbols, **kwargs).doit(deep=False)
def telescopic_direct(L, R, n, limits):
"""Returns the direct summation of the terms of a telescopic sum
L is the term with lower index
R is the term with higher index
n difference between the indexes of L and R
For example:
>>> from sympy.concrete.summations import telescopic_direct
>>> from sympy.abc import k, a, b
>>> telescopic_direct(1/k, -1/(k+2), 2, (k, a, b))
-1/(b + 2) - 1/(b + 1) + 1/(a + 1) + 1/a
"""
(i, a, b) = limits
s = 0
for m in xrange(n):
s += L.subs(i,a+m) + R.subs(i,b-m)
return s
def telescopic(L, R, limits):
'''Tries to perform the summation using the telescopic property
return None if not possible
'''
(i, a, b) = limits
if L.is_Add or R.is_Add:
return None
# We want to solve(L.subs(i, i + m) + R, m)
# First we try a simple match since this does things that
# solve doesn't do, e.g. solve(f(k+m)-f(k), m) fails
k = Wild("k")
sol = (-R).match(L.subs(i, i + k))
s = None
if sol and k in sol:
s = sol[k]
if not (s.is_Integer and L.subs(i,i + s) == -R):
#sometimes match fail(f(x+2).match(-f(x+k))->{k: -2 - 2x}))
s = None
# But there are things that match doesn't do that solve
# can do, e.g. determine that 1/(x + m) = 1/(1 - x) when m = 1
if s is None:
m = Dummy('m')
try:
sol = solve(L.subs(i, i + m) + R, m) or []
except NotImplementedError:
return None
sol = [si for si in sol if si.is_Integer and
(L.subs(i,i + si) + R).expand().is_zero]
if len(sol) != 1:
return None
s = sol[0]
if s < 0:
return telescopic_direct(R, L, abs(s), (i, a, b))
elif s > 0:
return telescopic_direct(L, R, s, (i, a, b))
def eval_sum(f, limits):
(i, a, b) = limits
if f is S.Zero:
return S.Zero
if i not in f.free_symbols:
return f*(b - a + 1)
if a == b:
return f.subs(i, a)
dif = b - a
definite = dif.is_Integer
# Doing it directly may be faster if there are very few terms.
if definite and (dif < 100):
return eval_sum_direct(f, (i, a, b))
# Try to do it symbolically. Even when the number of terms is known,
# this can save time when b-a is big.
# We should try to transform to partial fractions
value = eval_sum_symbolic(f.expand(), (i, a, b))
if value is not None:
return value
# Do it directly
if definite:
return eval_sum_direct(f, (i, a, b))
def eval_sum_direct(expr, limits):
(i, a, b) = limits
dif = b - a
return Add(*[expr.subs(i, a + j) for j in xrange(dif + 1)])
def eval_sum_symbolic(f, limits):
(i, a, b) = limits
if not f.has(i):
return f*(b-a+1)
# Linearity
if f.is_Mul:
L, R = f.as_two_terms()
if not L.has(i):
sR = eval_sum_symbolic(R, (i, a, b))
if sR: return L*sR
if not R.has(i):
sL = eval_sum_symbolic(L, (i, a, b))
if sL: return R*sL
try:
f = apart(f, i) # see if it becomes an Add
except PolynomialError:
pass
if f.is_Add:
L, R = f.as_two_terms()
lrsum = telescopic(L, R, (i, a, b))
if lrsum:
return lrsum
lsum = eval_sum_symbolic(L, (i, a, b))
rsum = eval_sum_symbolic(R, (i, a, b))
if None not in (lsum, rsum):
return lsum + rsum
# Polynomial terms with Faulhaber's formula
n = Wild('n')
result = f.match(i**n)
if result is not None:
n = result[n]
if n.is_Integer:
if n >= 0:
return ((C.bernoulli(n+1, b+1) - C.bernoulli(n+1, a))/(n+1)).expand()
elif a.is_Integer and a >= 1:
if n == -1:
return C.harmonic(b) - C.harmonic(a - 1)
else:
return C.harmonic(b, abs(n)) - C.harmonic(a - 1, abs(n))
# Geometric terms
c1 = C.Wild('c1', exclude=[i])
c2 = C.Wild('c2', exclude=[i])
c3 = C.Wild('c3', exclude=[i])
e = f.match(c1**(c2*i+c3))
if e is not None:
c1 = c1.subs(e)
c2 = c2.subs(e)
c3 = c3.subs(e)
# TODO: more general limit handling
return c1**c3 * (c1**(a*c2) - c1**(c2+b*c2)) / (1 - c1**c2)
r = gosper_sum(f, (i, a, b))
if not r in (None, S.NaN):
return r
return eval_sum_hyper(f, (i, a, b))
def _eval_sum_hyper(f, i, a):
""" Returns (res, cond). Sums from a to oo. """
from sympy.functions import hyper
from sympy.simplify import hyperexpand, hypersimp, fraction
from sympy.polys.polytools import Poly, factor
if a != 0:
return _eval_sum_hyper(f.subs(i, i + a), i, 0)
if f.subs(i, 0) == 0:
return _eval_sum_hyper(f.subs(i, i + 1), i, 0)
hs = hypersimp(f, i)
if hs is None:
return None
numer, denom = fraction(factor(hs))
top, topl = numer.as_coeff_mul(i)
bot, botl = denom.as_coeff_mul(i)
ab = [top, bot]
factors = [topl, botl]
params = [[], []]
for k in range(2):
for fac in factors[k]:
mul = 1
if fac.is_Pow:
mul = fac.exp
fac = fac.base
if not mul.is_Integer:
return None
p = Poly(fac, i)
if p.degree() != 1:
return None
m, n = p.all_coeffs()
ab[k] *= m**mul
params[k] += [n/m]*mul
# Add "1" to numerator parameters, to account for implicit n! in
# hypergeometric series.
ap = params[0] + [1]
bq = params[1]
x = ab[0]/ab[1]
h = hyper(ap, bq, x)
return f.subs(i, 0)*hyperexpand(h), h.convergence_statement
def eval_sum_hyper(f, (i, a, b)):
from sympy.functions import Piecewise
from sympy import oo, And
if b != oo:
if a == -oo:
res = _eval_sum_hyper(f.subs(i, -i), i, -b)
if res is not None:
return Piecewise(res, (Sum(f, (i, a, b)), True))
else:
return None
if a == -oo:
res1 = _eval_sum_hyper(f.subs(i, -i), i, 1)
res2 = _eval_sum_hyper(f, i, 0)
if res1 is None or res2 is None:
return None
res1, cond1 = res1
res2, cond2 = res2
cond = And(cond1, cond2)
if cond is False:
return None
return Piecewise((res1 + res2, cond), (Sum(f, (i, a, b)), True))
# Now b == oo, a != -oo
res = _eval_sum_hyper(f, i, a)
if res is not None:
return Piecewise(res, (Sum(f, (i, a, b)), True))
| 30.046595 | 87 | 0.527377 |
802a54f38bc92215c9c4a55d77351d0e2407bdfa | 15,292 | py | Python | sasrl_env/common/env_pb2_grpc.py | sassoftware/sasrlenv | 2c8039276fdfe8071582f1e5053f9cfcb4a194e9 | [
"Apache-2.0"
] | 1 | 2021-04-23T15:10:58.000Z | 2021-04-23T15:10:58.000Z | sasrl_env/common/env_pb2_grpc.py | sassoftware/sasrlenv | 2c8039276fdfe8071582f1e5053f9cfcb4a194e9 | [
"Apache-2.0"
] | null | null | null | sasrl_env/common/env_pb2_grpc.py | sassoftware/sasrlenv | 2c8039276fdfe8071582f1e5053f9cfcb4a194e9 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import sasrl_env.common.env_pb2 as env__pb2
class EnvControlStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Start = channel.unary_unary(
'/EnvControl/Start',
request_serializer=env__pb2.Empty.SerializeToString,
response_deserializer=env__pb2.ServerInfo.FromString,
)
self.Close = channel.unary_unary(
'/EnvControl/Close',
request_serializer=env__pb2.ServerInfo.SerializeToString,
response_deserializer=env__pb2.Empty.FromString,
)
class EnvControlServicer(object):
"""Missing associated documentation comment in .proto file."""
def Start(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Close(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EnvControlServicer_to_server(servicer, server):
rpc_method_handlers = {
'Start': grpc.unary_unary_rpc_method_handler(
servicer.Start,
request_deserializer=env__pb2.Empty.FromString,
response_serializer=env__pb2.ServerInfo.SerializeToString,
),
'Close': grpc.unary_unary_rpc_method_handler(
servicer.Close,
request_deserializer=env__pb2.ServerInfo.FromString,
response_serializer=env__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'EnvControl', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class EnvControl(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Start(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/EnvControl/Start',
env__pb2.Empty.SerializeToString,
env__pb2.ServerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Close(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/EnvControl/Close',
env__pb2.ServerInfo.SerializeToString,
env__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class EnvStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Handshake = channel.unary_unary(
'/Env/Handshake',
request_serializer=env__pb2.Empty.SerializeToString,
response_deserializer=env__pb2.MetaData.FromString,
)
self.Make = channel.unary_unary(
'/Env/Make',
request_serializer=env__pb2.Name.SerializeToString,
response_deserializer=env__pb2.Info.FromString,
)
self.Reset = channel.unary_unary(
'/Env/Reset',
request_serializer=env__pb2.Empty.SerializeToString,
response_deserializer=env__pb2.Observation.FromString,
)
self.Step = channel.unary_unary(
'/Env/Step',
request_serializer=env__pb2.Action.SerializeToString,
response_deserializer=env__pb2.Transition.FromString,
)
self.Render = channel.unary_unary(
'/Env/Render',
request_serializer=env__pb2.RenderMode.SerializeToString,
response_deserializer=env__pb2.RenderOut.FromString,
)
self.Seed = channel.unary_unary(
'/Env/Seed',
request_serializer=env__pb2.EnvSeed.SerializeToString,
response_deserializer=env__pb2.Empty.FromString,
)
self.Sample = channel.unary_unary(
'/Env/Sample',
request_serializer=env__pb2.Empty.SerializeToString,
response_deserializer=env__pb2.Action.FromString,
)
self.Close = channel.unary_unary(
'/Env/Close',
request_serializer=env__pb2.Empty.SerializeToString,
response_deserializer=env__pb2.Empty.FromString,
)
class EnvServicer(object):
"""Missing associated documentation comment in .proto file."""
def Handshake(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Make(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Reset(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Step(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Render(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Seed(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Sample(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Close(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EnvServicer_to_server(servicer, server):
rpc_method_handlers = {
'Handshake': grpc.unary_unary_rpc_method_handler(
servicer.Handshake,
request_deserializer=env__pb2.Empty.FromString,
response_serializer=env__pb2.MetaData.SerializeToString,
),
'Make': grpc.unary_unary_rpc_method_handler(
servicer.Make,
request_deserializer=env__pb2.Name.FromString,
response_serializer=env__pb2.Info.SerializeToString,
),
'Reset': grpc.unary_unary_rpc_method_handler(
servicer.Reset,
request_deserializer=env__pb2.Empty.FromString,
response_serializer=env__pb2.Observation.SerializeToString,
),
'Step': grpc.unary_unary_rpc_method_handler(
servicer.Step,
request_deserializer=env__pb2.Action.FromString,
response_serializer=env__pb2.Transition.SerializeToString,
),
'Render': grpc.unary_unary_rpc_method_handler(
servicer.Render,
request_deserializer=env__pb2.RenderMode.FromString,
response_serializer=env__pb2.RenderOut.SerializeToString,
),
'Seed': grpc.unary_unary_rpc_method_handler(
servicer.Seed,
request_deserializer=env__pb2.EnvSeed.FromString,
response_serializer=env__pb2.Empty.SerializeToString,
),
'Sample': grpc.unary_unary_rpc_method_handler(
servicer.Sample,
request_deserializer=env__pb2.Empty.FromString,
response_serializer=env__pb2.Action.SerializeToString,
),
'Close': grpc.unary_unary_rpc_method_handler(
servicer.Close,
request_deserializer=env__pb2.Empty.FromString,
response_serializer=env__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Env', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Env(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Handshake(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Handshake',
env__pb2.Empty.SerializeToString,
env__pb2.MetaData.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Make(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Make',
env__pb2.Name.SerializeToString,
env__pb2.Info.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Reset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Reset',
env__pb2.Empty.SerializeToString,
env__pb2.Observation.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Step(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Step',
env__pb2.Action.SerializeToString,
env__pb2.Transition.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Render(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Render',
env__pb2.RenderMode.SerializeToString,
env__pb2.RenderOut.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Seed(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Seed',
env__pb2.EnvSeed.SerializeToString,
env__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Sample(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Sample',
env__pb2.Empty.SerializeToString,
env__pb2.Action.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Close(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Env/Close',
env__pb2.Empty.SerializeToString,
env__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 39.010204 | 87 | 0.618886 |
eb638b05e8cea2358e954ef428e833b5ea4b6224 | 807 | py | Python | pages/advisors_stratifi_pages/sign_in_page.py | DashkevichBy/Advisors.stratiFi.frontend | e59892e8c1ff0d15ce6ef520fe0b00355e1a6870 | [
"Apache-2.0"
] | null | null | null | pages/advisors_stratifi_pages/sign_in_page.py | DashkevichBy/Advisors.stratiFi.frontend | e59892e8c1ff0d15ce6ef520fe0b00355e1a6870 | [
"Apache-2.0"
] | null | null | null | pages/advisors_stratifi_pages/sign_in_page.py | DashkevichBy/Advisors.stratiFi.frontend | e59892e8c1ff0d15ce6ef520fe0b00355e1a6870 | [
"Apache-2.0"
] | null | null | null | from selenium.webdriver.common.by import By
from webium import Find
from abstract_base_page import AbstractBasePage
class SignInPage(AbstractBasePage):
emailField = Find(by=By.XPATH, value='.//*[@name="email"]')
passwordField = Find(by=By.XPATH, value='.//*[@name="password"]')
signInButton = Find(by=By.XPATH, value='.//*[text()="Sign in"]')
def __init__(self):
AbstractBasePage.__init__(self, "https://advisors.stratifi.com/")
# ------------Clicks------------------------------
def press_sign_in(self):
self.signInButton.click()
# ------------Send Keys------------------------------
def enter_login(self, login):
self.emailField.send_keys(login)
def enter_password(self, password):
self.passwordField.send_keys(password)
| 26.9 | 77 | 0.604709 |
e4e6082ec1345a4222f83c29d5ac970de99d07f8 | 755 | py | Python | diagan-pkg/diagan/datasets/get_celeba_index_with_attr.py | lee-jinhee/self-diagnosing-gan | da87dd1ef10f2d630d6904ced63ae8805b5db356 | [
"Apache-2.0"
] | 16 | 2021-02-25T06:48:51.000Z | 2022-03-04T13:08:12.000Z | diagan-pkg/diagan/datasets/get_celeba_index_with_attr.py | lee-jinhee/self-diagnosing-gan | da87dd1ef10f2d630d6904ced63ae8805b5db356 | [
"Apache-2.0"
] | null | null | null | diagan-pkg/diagan/datasets/get_celeba_index_with_attr.py | lee-jinhee/self-diagnosing-gan | da87dd1ef10f2d630d6904ced63ae8805b5db356 | [
"Apache-2.0"
] | 4 | 2021-03-02T02:03:17.000Z | 2022-03-24T03:31:12.000Z | from functools import partial
import os
import pandas
def get_celeba_index_with_attr(root, attr_name):
base_folder = "celeba"
fn = partial(os.path.join, root, base_folder)
attr = pandas.read_csv(fn("list_attr_celeba.txt"), delim_whitespace=True, header=1)
attribute = (attr.values + 1) // 2 # map from {-1, 1} to {0, 1}
attr_names = list(attr.columns)
try:
attr_num = attr_names.index(attr_name)
except:
raise ValueError("Invalid attribute name {}.".format(attr_name))
attr_index = []
not_attr_index = []
for i in range(len(attribute)):
if attribute[i][attr_num]:
attr_index.append(i)
else:
not_attr_index.append(i)
return attr_index, not_attr_index
| 29.038462 | 87 | 0.654305 |
5aaf7483469f3c71a0047e0951c9ad318d028517 | 5,098 | py | Python | gennghttpxfun.py | acesso/nghttpx-http2-push | fcd9ddc094ee5b81bf5d1cc18ba6897f7a03f12e | [
"MIT"
] | null | null | null | gennghttpxfun.py | acesso/nghttpx-http2-push | fcd9ddc094ee5b81bf5d1cc18ba6897f7a03f12e | [
"MIT"
] | null | null | null | gennghttpxfun.py | acesso/nghttpx-http2-push | fcd9ddc094ee5b81bf5d1cc18ba6897f7a03f12e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from gentokenlookup import gentokenlookup
OPTIONS = [
"private-key-file",
"private-key-passwd-file",
"certificate-file",
"dh-param-file",
"subcert",
"backend",
"frontend",
"workers",
"http2-max-concurrent-streams",
"log-level",
"daemon",
"http2-proxy",
"http2-bridge",
"client-proxy",
"add-x-forwarded-for",
"strip-incoming-x-forwarded-for",
"no-via",
"frontend-http2-read-timeout",
"frontend-read-timeout",
"frontend-write-timeout",
"backend-read-timeout",
"backend-write-timeout",
"stream-read-timeout",
"stream-write-timeout",
"accesslog-file",
"accesslog-syslog",
"accesslog-format",
"errorlog-file",
"errorlog-syslog",
"backend-keep-alive-timeout",
"frontend-http2-window-bits",
"backend-http2-window-bits",
"frontend-http2-connection-window-bits",
"backend-http2-connection-window-bits",
"frontend-no-tls",
"backend-no-tls",
"backend-tls-sni-field",
"pid-file",
"user",
"syslog-facility",
"backlog",
"ciphers",
"client",
"insecure",
"cacert",
"backend-ipv4",
"backend-ipv6",
"backend-http-proxy-uri",
"read-rate",
"read-burst",
"write-rate",
"write-burst",
"worker-read-rate",
"worker-read-burst",
"worker-write-rate",
"worker-write-burst",
"npn-list",
"tls-proto-list",
"verify-client",
"verify-client-cacert",
"client-private-key-file",
"client-cert-file",
"frontend-http2-dump-request-header",
"frontend-http2-dump-response-header",
"http2-no-cookie-crumbling",
"frontend-frame-debug",
"padding",
"altsvc",
"add-request-header",
"add-response-header",
"worker-frontend-connections",
"no-location-rewrite",
"no-host-rewrite",
"backend-http1-connections-per-host",
"backend-http1-connections-per-frontend",
"listener-disable-timeout",
"tls-ticket-key-file",
"rlimit-nofile",
"backend-request-buffer",
"backend-response-buffer",
"no-server-push",
"backend-http2-connections-per-worker",
"fetch-ocsp-response-file",
"ocsp-update-interval",
"no-ocsp",
"include",
"tls-ticket-key-cipher",
"host-rewrite",
"tls-session-cache-memcached",
"tls-session-cache-memcached-tls",
"tls-ticket-key-memcached",
"tls-ticket-key-memcached-interval",
"tls-ticket-key-memcached-max-retry",
"tls-ticket-key-memcached-max-fail",
"mruby-file",
"accept-proxy-protocol",
"conf",
"fastopen",
"tls-dyn-rec-warmup-threshold",
"tls-dyn-rec-idle-timeout",
"add-forwarded",
"strip-incoming-forwarded",
"forwarded-by",
"forwarded-for",
"response-header-field-buffer",
"max-response-header-fields",
"request-header-field-buffer",
"max-request-header-fields",
"header-field-buffer",
"max-header-fields",
"no-http2-cipher-black-list",
"backend-http1-tls",
"tls-session-cache-memcached-cert-file",
"tls-session-cache-memcached-private-key-file",
"tls-session-cache-memcached-address-family",
"tls-ticket-key-memcached-tls",
"tls-ticket-key-memcached-cert-file",
"tls-ticket-key-memcached-private-key-file",
"tls-ticket-key-memcached-address-family",
"backend-address-family",
"frontend-http2-max-concurrent-streams",
"backend-http2-max-concurrent-streams",
"backend-connections-per-frontend",
"backend-tls",
"backend-connections-per-host",
"error-page",
"no-kqueue",
"frontend-http2-settings-timeout",
"backend-http2-settings-timeout",
"api-max-request-body",
"backend-max-backoff",
"server-name",
"no-server-rewrite",
"frontend-http2-optimize-write-buffer-size",
"frontend-http2-optimize-window-size",
"frontend-http2-window-size",
"frontend-http2-connection-window-size",
"backend-http2-window-size",
"backend-http2-connection-window-size",
"frontend-http2-encoder-dynamic-table-size",
"frontend-http2-decoder-dynamic-table-size",
"backend-http2-encoder-dynamic-table-size",
"backend-http2-decoder-dynamic-table-size",
"ecdh-curves",
"tls-sct-dir",
"backend-connect-timeout",
"dns-cache-timeout",
"dns-lookup-timeout",
"dns-max-try",
"frontend-keep-alive-timeout",
"psk-secrets",
"client-psk-secrets",
"client-no-http2-cipher-black-list",
"client-ciphers",
"accesslog-write-early",
]
LOGVARS = [
"remote_addr",
"time_local",
"time_iso8601",
"request",
"status",
"body_bytes_sent",
"remote_port",
"server_port",
"request_time",
"pid",
"alpn",
"ssl_cipher",
"ssl_protocol",
"ssl_session_id",
"ssl_session_reused",
"backend_host",
"backend_port",
]
if __name__ == '__main__':
gentokenlookup(OPTIONS, 'SHRPX_OPTID', value_type='char', comp_fun='util::strieq_l')
gentokenlookup(LOGVARS, 'SHRPX_LOGF', value_type='char', comp_fun='util::strieq_l', return_type='LogFragmentType', fail_value='SHRPX_LOGF_NONE')
| 27.408602 | 148 | 0.642409 |
210b27c3f2f6b257fc25092d7419e5aa1159f7ca | 212 | py | Python | python/grid_utils.py | trestoa/mazes | 71466bf7bb433399c55365b9e69226a5b8fd963c | [
"Unlicense"
] | null | null | null | python/grid_utils.py | trestoa/mazes | 71466bf7bb433399c55365b9e69226a5b8fd963c | [
"Unlicense"
] | null | null | null | python/grid_utils.py | trestoa/mazes | 71466bf7bb433399c55365b9e69226a5b8fd963c | [
"Unlicense"
] | null | null | null | def distance_cell_body(distances):
def format_body(cell):
if distances[cell] is None:
return ' '
else:
return '{:^3}'.format(distances[cell])
return format_body
| 23.555556 | 50 | 0.575472 |
5abea5b62e6983654ee6d85a9721a38e1bfe3c9b | 1,582 | py | Python | authlete/conf/authlete_ini_configuration.py | authlete/authlete-python | 751514c525cd04a930373de78463a1fe71b6da60 | [
"Apache-2.0"
] | 5 | 2019-07-30T01:37:04.000Z | 2021-02-15T05:55:55.000Z | authlete/conf/authlete_ini_configuration.py | DestinyCall/authlete-python | 751514c525cd04a930373de78463a1fe71b6da60 | [
"Apache-2.0"
] | null | null | null | authlete/conf/authlete_ini_configuration.py | DestinyCall/authlete-python | 751514c525cd04a930373de78463a1fe71b6da60 | [
"Apache-2.0"
] | 1 | 2021-02-15T05:55:56.000Z | 2021-02-15T05:55:56.000Z | #
# Copyright (C) 2019 Authlete, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the
# License.
from configparser import ConfigParser
from authlete.conf.authlete_configuration import AuthleteConfiguration
class AuthleteIniConfiguration(AuthleteConfiguration):
def __init__(self, file="authlete.ini"):
ini = ConfigParser()
ini.read(file)
if ini.has_section('authlete'):
section = ini['authlete']
else:
section = ini.defaults()
nameAndValues = {
'baseUrl': section.get('base_url'),
'serviceOwnerApiKey': section.get('service_owner.api_key'),
'serviceOwnerApiSecret': section.get('service_owner.api_secret'),
'serviceOwnerAccessToken': section.get('service_owner.access_token'),
'serviceApiKey': section.get('service.api_key'),
'serviceApiSecret': section.get('service.api_secret'),
'serviceAccessToken': section.get('service.access_token')
}
super().__init__(nameAndValues)
| 36.790698 | 81 | 0.67067 |
34b7d5dc779dc2f1d601578ed60be19eadf280e7 | 2,188 | py | Python | mmdet/models/dense_heads/posenc_yolo_head.py | clintonjwang/mmdetection | f0d1aebdc162ab7e3748d7ac050b523476639818 | [
"Apache-2.0"
] | null | null | null | mmdet/models/dense_heads/posenc_yolo_head.py | clintonjwang/mmdetection | f0d1aebdc162ab7e3748d7ac050b523476639818 | [
"Apache-2.0"
] | 1 | 2021-12-01T04:15:47.000Z | 2021-12-01T04:15:47.000Z | mmdet/models/dense_heads/posenc_yolo_head.py | clintonjwang/mmdetection | f0d1aebdc162ab7e3748d7ac050b523476639818 | [
"Apache-2.0"
] | null | null | null | from ..builder import HEADS
from .yolo_head import YOLOV3Head
from mmcv.cnn import ConvModule
import torch, einops
nn = torch.nn
F = torch.nn.functional
@HEADS.register_module()
class PositionalEncodingYOLOV3Head(YOLOV3Head):
def __init__(self, num_frequencies, **kwargs):
self.L = num_frequencies
super().__init__(**kwargs)
def _init_layers(self):
self.convs_bridge = nn.ModuleList()
self.convs_pred = nn.ModuleList()
for i in range(self.num_levels):
conv_bridge = ConvModule(
self.in_channels[i] + self.L*4,
self.out_channels[i],
3,
padding=1,
conv_cfg=self.conv_cfg,
norm_cfg=self.norm_cfg,
act_cfg=self.act_cfg)
conv_pred = nn.Conv2d(self.out_channels[i],
self.num_base_priors * self.num_attrib, 1)
self.convs_bridge.append(conv_bridge)
self.convs_pred.append(conv_pred)
def forward(self, feats):
assert len(feats) == self.num_levels
pred_maps = []
feats = self.add_positional_encoding(feats)
for i in range(self.num_levels):
x = feats[i]
x = self.convs_bridge[i](x)
pred_map = self.convs_pred[i](x)
pred_maps.append(pred_map)
return (tuple(pred_maps),)
def add_positional_encoding(self, feats):
B,C,H,W = feats[0].shape
device = feats[0].device
freq = 2**torch.arange(self.L, dtype=torch.float32, device=device) * torch.pi
Y,X = torch.meshgrid(torch.linspace(0,1,H, device=device),torch.linspace(0,1,W, device=device), indexing='ij')
XY = torch.stack([X,Y], dim=0)
enc = torch.cat([(XY[...,None]*freq).sin(), (XY[...,None]*freq).cos()], dim=0)
enc = einops.rearrange(enc, 'Z H W L -> (Z L) H W').repeat(B,1,1,1)
feats_w_enc = [torch.cat([feats[0], enc], dim=1)]
for feat in feats[1:]:
feats_w_enc.append(torch.cat([feat, F.interpolate(enc, size=feat.shape[-2:],
align_corners=True, mode="bilinear")], dim=1))
return feats_w_enc
| 36.466667 | 118 | 0.587294 |
4b268f0b3eaecf2f80e1a4786df8069425831440 | 253 | py | Python | tinkerforge_remote_switch/__init__.py | MoeweX/RemoteSwitchBricklet_MQTT | 23c4d96dc18c6a137e6257a2ad1974310f435078 | [
"MIT"
] | null | null | null | tinkerforge_remote_switch/__init__.py | MoeweX/RemoteSwitchBricklet_MQTT | 23c4d96dc18c6a137e6257a2ad1974310f435078 | [
"MIT"
] | null | null | null | tinkerforge_remote_switch/__init__.py | MoeweX/RemoteSwitchBricklet_MQTT | 23c4d96dc18c6a137e6257a2ad1974310f435078 | [
"MIT"
] | null | null | null | # coding=utf-8
from logging.config import dictConfig
import toml
from tinkerforge_remote_switch.Config import Config
# enable logging
dictConfig(toml.load("../resources/logging.toml"))
# load configuration
CONFIG = Config("../resources/config.toml")
| 21.083333 | 51 | 0.786561 |
a6e208e8938b3027a7afee45ef4e4d3668f46728 | 881 | py | Python | netbox_icinga/livestatus.py | izabela1337/netbox-icinga | 37ec689fe0bfc524c692a7777354c336c2a98cf2 | [
"BSD-3-Clause"
] | null | null | null | netbox_icinga/livestatus.py | izabela1337/netbox-icinga | 37ec689fe0bfc524c692a7777354c336c2a98cf2 | [
"BSD-3-Clause"
] | null | null | null | netbox_icinga/livestatus.py | izabela1337/netbox-icinga | 37ec689fe0bfc524c692a7777354c336c2a98cf2 | [
"BSD-3-Clause"
] | null | null | null | import json
import socket
TIMEOUT = 3 # seconds
BUFFER_SIZE = 4096 # bytes
def hoststatus(hostname: str, livestatus_host: str, livestatus_port: int):
"""Fetches livestatus from icinga about hostname."""
query = (
"GET hosts\n"
+ "Filter: host_name = %s\n" % hostname
+ "OutputFormat: json\n"
+ "\n"
)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(TIMEOUT)
s.connect((livestatus_host, livestatus_port))
s.sendall(query.encode("utf-8"))
s.shutdown(socket.SHUT_WR)
data = []
while True:
buf_data = s.recv(BUFFER_SIZE)
if not buf_data:
break
data.append(buf_data)
s.close()
if not data:
return None
data = json.loads(b"".join(data))
if not data or len(data) <= 1:
return None
return dict(zip(data[0], data[1]))
| 23.184211 | 74 | 0.601589 |
299787d80432c80b9052ccbc398de764a531c601 | 4,486 | py | Python | sdks/python/http_client/v1/polyaxon_sdk/models/v1_hp_log_normal.py | rimon-safesitehq/polyaxon | c456d5bec00b36d75feabdccffa45b2be9a6346e | [
"Apache-2.0"
] | null | null | null | sdks/python/http_client/v1/polyaxon_sdk/models/v1_hp_log_normal.py | rimon-safesitehq/polyaxon | c456d5bec00b36d75feabdccffa45b2be9a6346e | [
"Apache-2.0"
] | null | null | null | sdks/python/http_client/v1/polyaxon_sdk/models/v1_hp_log_normal.py | rimon-safesitehq/polyaxon | c456d5bec00b36d75feabdccffa45b2be9a6346e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.8.3
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1HpLogNormal(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'kind': 'str',
'value': 'object'
}
attribute_map = {
'kind': 'kind',
'value': 'value'
}
def __init__(self, kind='lognormal', value=None, local_vars_configuration=None): # noqa: E501
"""V1HpLogNormal - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._kind = None
self._value = None
self.discriminator = None
if kind is not None:
self.kind = kind
if value is not None:
self.value = value
@property
def kind(self):
"""Gets the kind of this V1HpLogNormal. # noqa: E501
:return: The kind of this V1HpLogNormal. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1HpLogNormal.
:param kind: The kind of this V1HpLogNormal. # noqa: E501
:type: str
"""
self._kind = kind
@property
def value(self):
"""Gets the value of this V1HpLogNormal. # noqa: E501
:return: The value of this V1HpLogNormal. # noqa: E501
:rtype: object
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this V1HpLogNormal.
:param value: The value of this V1HpLogNormal. # noqa: E501
:type: object
"""
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1HpLogNormal):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1HpLogNormal):
return True
return self.to_dict() != other.to_dict()
| 27.353659 | 98 | 0.588498 |
caacc66453ba664512c03e32c602d3a7ebc21721 | 9,468 | py | Python | vision_transform_codes/analysis_transforms/convolutional/ista_fista.py | spencerkent/vision-transform-codes | 63258ce698e436ee3ce29def75c89337759fb98b | [
"BSD-3-Clause"
] | null | null | null | vision_transform_codes/analysis_transforms/convolutional/ista_fista.py | spencerkent/vision-transform-codes | 63258ce698e436ee3ce29def75c89337759fb98b | [
"BSD-3-Clause"
] | null | null | null | vision_transform_codes/analysis_transforms/convolutional/ista_fista.py | spencerkent/vision-transform-codes | 63258ce698e436ee3ce29def75c89337759fb98b | [
"BSD-3-Clause"
] | null | null | null | """
Iterative Shrinkage/Thresholding for convolutional sparse inference
What I mean by convolutional is that the basis functions are convolved with
the sparse codes to produce an image. The basis functions will be much smaller
than the images. Implements both the vanilla and
accelerated variant (FISTA).
.. [1] Beck, A., & Teboulle, M. (2009). A fast iterative
shrinkage-thresholding algorithm for linear inverse problems.
SIAM Journal on Imaging Sciences, 2(1), 183–202.
"""
import torch
from utils.convolutions import code_dim_from_padded_img_dim
from utils.convolutions import create_mask
def run(images_padded, dictionary, kernel_stride, padding_dims,
sparsity_weight, num_iters, variant='fista', initial_codes=None,
early_stopping_epsilon=None, nonnegative_only=False,
hard_threshold=False):
"""
Runs steps of Iterative Shrinkage/Thresholding with a constant stepsize
Computes ISTA/FISTA updates on samples in parallel. Written to
minimize data copies, for speed. Ideally, one could stop computing updates
on a sample whose code is no longer changing very much. However, the
copying overhead this requires (to only update a newer, smaller set of
samples) seems to overpower the savings. Further optimization may be
possible.
To deal with boundary effects when we do convolution with overlapping
kernels, the images are alreaddy padded. The reconstruction error in this
padded region is ignored via multiplication with a simple mask. Reconstructed
images will include the padded region, but the user should strip away this
when measuring reconstruction accuracy. This is a simple, clean, and
effective way to deal with the boundary effects inherent in using
convolutions.
Parameters
----------
images_padded : torch.Tensor(float32, size=(b, c, h, w))
A batch of images that we want to find the CONVOLUTIONAL sparse code
for. b is the number of images. c is the number of image channels, h is
the (padded) height of the image, while w is the (padded) width.
dictionary : torch.Tensor(float32, size=(s, c, kh, kw))
The dictionary of basis functions which we can use to describe the
images. s is the number of basis functions, the number of channels in the
resultant code. c is the number of image channels and consequently the
number of channels for each basis function. kh is the kernel height in
pixels, while kw is the kernel width.
kernel_stride : tuple(int, int)
The stride of the kernels in the vertical direction is kernel_stride[0]
whereas stride in the horizontal direction is kernel_stride[1]
padding_dims : tuple(tuple(int, int), tuple(int, int))
The amount of padding that was done to the images--is used to determine
the mask. padding_dims[0] is vertical padding and padding_dims[1] is
horizontal padding. The first component of each of these is the leading
padding while the second component is the trailing padding.
sparsity_weight : float
This is the weight on the sparsity cost term in the sparse coding cost
function. It is often denoted as \lambda
num_iters : int
Number of steps of ISTA/FISTA to run.
variant : str, optional
One of {'ista', 'fista'}. Fista is the "accelerated" version of ista.
Default 'fista'.
initial_codes : torch.Tensor(float32, size=(b, s, sh, sw)), optional
Start with these initial values when computing the codes. b is the number
of images. s is the number of basis functions, the number of channels in
the resultant code. sh is the height of the code. sw is the width of the
code. These can both be inferred from the image size and kernel size.
Default None.
early_stopping_epsilon : float, optional
Terminate if code changes by less than this amount per component,
normalized by stepsize. Beware, requires some overhead computation.
Default None.
nonnegative_only : bool, optional
If true, our code values can only be nonnegative. We just chop off the
left half of the ISTA thresholding function and it becomes a
shifted RELU function. The amount of the shift from a generic RELU is
precisely the sparsity_weight. Default False
hard_threshold : bool, optional
The hard thresholding function is the identity outside of the zeroed
region. Default False.
Returns
-------
codes : torch.Tensor(float32, size=(b, s, sh, sw))
The inferred convolutional codes for this set of images. b is the number
of images. s is the number of basis functions, the number of channels in
the resultant code. sh is the height of the code. sw is the width of the
code. These can both be inferred from the image size and kernel size.
"""
assert variant in ['ista', 'fista']
# We can take the stepsize from the largest eigenvalue of the Gram matrix,
# which contains all the innerproducts between kernels. We only need to
# compute this once, here. This guarantees convergence but may be a bit
# conservative and also could be expensive to compute. If the number of
# kernels s is > the flattened kernel size c x kh x kw, then we want to use
# the covariance matrix instead of the gram matrix. For now we use the
# Gram matrix. One could instead perform a linesearch to
# find the stepsize, but in my experience this does not work well.
flattened_dict = torch.flatten(dictionary, start_dim=1)
gram_matrix = torch.mm(flattened_dict, flattened_dict.t())
try:
lipschitz_constant = torch.symeig(gram_matrix)[0][-1]
except RuntimeError:
print('symeig threw an exception. Likely due to one of the dictionary',
'elements overflowing. The norm of each dictionary element is')
print(torch.norm(dictionary, dim=[1, 2, 3], p=2))
raise RuntimeError()
stepsize = 1. / lipschitz_constant
# The difference between ISTA and FISTA is *where* we calculate the gradient
# and make a steepest-descent update. In ISTA this is just the previous
# estimate for the codes. In FISTA, we keep a set of auxilliary points which
# combine the past two updates. See [1] for a nice description.
code_height = code_dim_from_padded_img_dim(
images_padded.shape[2], dictionary.shape[2], kernel_stride[0])
code_width = code_dim_from_padded_img_dim(
images_padded.shape[3], dictionary.shape[3], kernel_stride[1])
if initial_codes is None:
grad_eval_points = images_padded.new_zeros((
images_padded.shape[0], dictionary.shape[0], code_height, code_width))
else:
# warm restart, we'll begin with these values
assert initial_codes.shape[0] == images_padded.shape[0]
assert initial_codes.shape[1] == dictionary.shape[0]
assert initial_codes.shape[2] == code_height
assert initial_codes.shape[3] == code_width
grad_eval_points = initial_codes
if early_stopping_epsilon is not None:
avg_per_component_delta = float('inf')
if variant == 'ista':
old_codes = torch.zeros_like(grad_eval_points).copy_(grad_eval_points)
if variant == 'fista':
old_codes = torch.zeros_like(grad_eval_points).copy_(grad_eval_points)
t_kplusone = 1.
reconstruction_mask = create_mask(images_padded, padding_dims)
stop_early = False
iter_idx = 0
while (iter_idx < num_iters and not stop_early):
if variant == 'fista':
t_k = t_kplusone
#### Proximal update ####
# what we really want is (where gep is {g}radient {e}val {p}oints):
# corr(dictionary, (conv(dictionary, gep) - images)), but with PyTorch's
# weird semantics conv is conv_transpose2d and corr is conv2d...
codes = grad_eval_points - stepsize * torch.nn.functional.conv2d(
reconstruction_mask * (torch.nn.functional.conv_transpose2d(
grad_eval_points, dictionary, stride=kernel_stride) - images_padded),
dictionary, stride=kernel_stride)
if hard_threshold:
if nonnegative_only:
codes[codes < (sparsity_weight*stepsize)] = 0
else:
codes[torch.abs(codes) < (sparsity_weight*stepsize)] = 0
else:
if nonnegative_only:
codes.sub_(sparsity_weight * stepsize).clamp_(min=0.)
#^ shifted rectified linear activation
else:
pre_threshold_sign = torch.sign(codes)
codes.abs_()
codes.sub_(sparsity_weight * stepsize).clamp_(min=0.)
codes.mul_(pre_threshold_sign)
#^ now contains the "soft thresholded" (non-rectified) output x_{k+1}
if variant == 'fista':
t_kplusone = (1 + (1 + (4 * t_k**2))**0.5) / 2
beta_kplusone = (t_k - 1) / t_kplusone
change_in_codes = codes - old_codes
grad_eval_points = codes + beta_kplusone*(change_in_codes)
#^ the above two lines are responsible for a ~30% longer per-iteration
# cost for FISTA as opposed to ISTA. For certain problems though, FISTA
# may require many fewer steps to get a solution of similar quality.
old_codes.copy_(codes)
else:
grad_eval_points = codes
if early_stopping_epsilon is not None:
if variant == 'fista':
avg_per_component_delta = torch.mean(
torch.abs(change_in_codes) / stepsize)
else:
avg_per_component_delta = torch.mean(
torch.abs(codes - old_codes) / stepsize)
old_codes.copy_(codes)
stop_early = (avg_per_component_delta < early_stopping_epsilon
and iter_idx > 0)
iter_idx += 1
return codes
| 47.818182 | 79 | 0.717892 |
af74c22cf779026b4d0b5884603292a51eb01b9b | 25,189 | py | Python | src/elf.py | madushan1000/ghidra2dwarf | 51c6c3810010a564a31a2d1de93a1067d02080b7 | [
"MIT"
] | null | null | null | src/elf.py | madushan1000/ghidra2dwarf | 51c6c3810010a564a31a2d1de93a1067d02080b7 | [
"MIT"
] | null | null | null | src/elf.py | madushan1000/ghidra2dwarf | 51c6c3810010a564a31a2d1de93a1067d02080b7 | [
"MIT"
] | null | null | null | import struct
class ElfBase(object):
def __init__(self, file_offset, map, values, **kwargs):
self.__dict__['map'] = map
self.file_offset = file_offset
for n, v in zip(map, values):
if isinstance(n, tuple):
n, f = n
v = f(v)
setattr(self, n, v)
@property
def values(self):
vv = (getattr(self, n[0] if isinstance(n, tuple) else n) for n in self.map)
return [v.code if isinstance(v, DumbEnumValue) else v for v in vv]
def __setattr__(self, name, value):
if not hasattr(self, 'repr_pos'):
object.__setattr__(self, 'repr_pos', {})
if name not in self.repr_pos:
self.repr_pos[name] = len(self.repr_pos)
return object.__setattr__(self, name, value)
def __repr__(self):
args = ', '.join('%s=%r' % (n, getattr(self, n)) for n, _ in sorted(self.repr_pos.items(), key=lambda x: x[1]))
return '%s(%s)' % (self.__class__.__name__, args)
class ElfIdent(ElfBase):
def __init__(self, values, file_offset):
return ElfBase.__init__(self, file_offset, [
'magic',
('elf_class', ElfClass.__getitem__),
('elf_data', ElfData.__getitem__),
'file_version',
'osabi',
'abi_version',
], values)
class ElfHeader(ElfBase):
def __init__(self, values, file_offset):
return ElfBase.__init__(self, file_offset, [
('type', ET.__getitem__),
('machine', EM.__getitem__),
'version',
'entry',
'phoff',
'shoff',
'flags',
'ehsize',
'phentsize',
'phnum',
'shentsize',
'shnum',
'shstrndx',
], values)
class ElfSectionHeader(ElfBase):
def __init__(self, values, file_offset):
self.name = ''
return ElfBase.__init__(self, file_offset, [
'name_offset',
('type', SHT.__getitem__),
'flags',
'addr',
'offset',
'section_size',
'link',
'info',
'addralign',
'entsize',
], values)
struct_coders = {
'ElfIdent': struct.Struct('=4sBBBBBxxxxxxx'),
'ElfHeader': {
'32le': struct.Struct('<HHIIIIIHHHHHH'),
'32be': struct.Struct('>HHIIIIIHHHHHH'),
'64le': struct.Struct('<HHIQQQIHHHHHH'),
'64be': struct.Struct('>HHIQQQIHHHHHH'),
},
'ElfSectionHeader': {
'32le': struct.Struct('<IIIIIIIIII'),
'32be': struct.Struct('>IIIIIIIIII'),
'64le': struct.Struct('<IIQQQQIIQQ'),
'64be': struct.Struct('>IIQQQQIIQQ'),
}
}
class Elf:
def __init__(self, bytes):
self.bytes = bytearray(bytes)
self.extract_ident()
bits = '64' if self.ident.elf_class == ElfClass.ELFCLASS64 else '32'
#bits = '64' if ElfClass[self.ident.elf_class] == ElfClass.ELFCLASS64 else '32'
endianness = 'le' if self.ident.elf_data == ElfData.ELFDATA2LSB else 'be'
#endianness = 'le' if ElfData[self.ident.elf_data] == ElfData.ELFDATA2LSB else 'be'
self.type = bits + endianness
self.new_sections = []
def _get_struct(self, cls):
s = struct_coders[cls.__name__]
return s[self.type] if isinstance(s, dict) else s
def _dump_struct(self, cls, off):
s = self._get_struct(cls)
# unpack_from doesn't work with jython
# return cls(s.unpack_from(self.bytes, off), file_offset=off)
bb = self.bytes[off:off+s.size]
return cls(s.unpack(str(bb)), file_offset=off)
def _export_struct(self, val, off):
s = self._get_struct(val.__class__)
# unpack_into doesn't work with jython
# s.pack_into(self.bytes, off, *val.values)
self.bytes[off:off+s.size] = s.pack(*val.values)
def extract_ident(self):
if hasattr(self, 'ident'):
return self.ident
self.ident = self._dump_struct(ElfIdent, 0)
self.header_off = self._get_struct(ElfIdent).size
return self.ident
def extract_header(self):
if hasattr(self, 'header'):
return self.header
self.header = self._dump_struct(ElfHeader, self.header_off)
return self.header
def extract_section_headers(self):
if hasattr(self, 'section_headers'):
return self.section_headers
self.section_headers = []
h = self.extract_header()
for i in range(h.shnum):
self.section_headers.append(self._dump_struct(ElfSectionHeader, h.shoff + i * h.shentsize))
self.section_names = self.extract_section(self.section_headers[h.shstrndx])
for s in self.section_headers:
s.name = self.section_names[s.name_offset:self.section_names.find('\x00', s.name_offset)]
return self.section_headers
def extract_section(self, section_header):
return self.bytes[section_header.offset:section_header.offset+section_header.section_size]
def encode_section_header(self, section_header):
return self._get_struct(ElfSectionHeader).pack(*section_header.values)
def add_section(self, name, body):
self.new_sections.append((name, body))
def generate_updated_elf(self):
section_headers = self.extract_section_headers()
added_sections = False
for name, body in self.new_sections:
try:
s = next(s for s in section_headers if s.name == name)
except:
added_sections = True
name_off = len(self.section_names)
self.section_names += name + '\x00'
s = ElfSectionHeader([name_off, 1, 0, 0, -1, -1, 0, 0, 1, 0], file_offset=-1)
s.name = name
section_headers.append(s)
s.offset = len(self.bytes)
s.section_size = len(body)
self.bytes += body
h = self.header
if added_sections:
shstr = section_headers[h.shstrndx]
shstr.section_size = len(self.section_names)
shstr.offset = len(self.bytes)
self.bytes += self.section_names
h.shoff = len(self.bytes)
h.shnum = len(section_headers)
self.bytes += '\x00' * h.shentsize * h.shnum
self._export_struct(h, self.header_off)
for i, s in enumerate(section_headers):
s.file_offset = h.shoff + i * h.shentsize
self._export_struct(s, s.file_offset)
return self.bytes
def add_sections_to_elf(from_file, to_file, sections):
with open(from_file, 'rb') as f:
bb = f.read()
e = Elf(bb)
for name, s in sections:
e.add_section(name, s)
out = e.generate_updated_elf()
with open(to_file, 'wb') as f:
f.write(out)
class DumbEnumValue:
def __init__(self, name, code, desc): self.name, self.code, self.desc = name, code, desc
def __repr__(self): return '%s(%r, %r)' % (self.name, self.code, self.desc)
class DumbEnum(object):
class __metaclass__(type):
def __init__(cls, *args):
cls._bycode = {}
for n in dir(cls):
if n[0] != '_':
v = DumbEnumValue(n, *getattr(cls, n))
setattr(cls, n, v)
cls._bycode[v.code] = v
def __getitem__(cls, idx):
try:
return cls._bycode[idx]
except KeyError:
raise Exception('enum %s has no entry with code %d.' % (cls.__name__, idx))
# All the constants are parsed from https://github.com/slorquet/elffile2/blob/master/elffile.py
class ElfClass(DumbEnum):
"""
Encodes the word size of the elf file as from the `ident portion
of the ELF file header
<http://www.sco.com/developers/gabi/latest/ch4.eheader.html#elfid>`_.
This encodes :py:attr:`ElfFileIdent.elfClass`.
"""
ELFCLASSNONE = 0, 'Invalid class'
ELFCLASS32 = 1, '32-bit objects'
ELFCLASS64 = 2, '64-bit objects'
ELFCLASSNUM = 3, '' # from libelf
class ElfData(DumbEnum):
"""
Encodes the byte-wise endianness of the elf file as from the
`ident portion of the elf file header
<http://www.sco.com/developers/gabi/latest/ch4.eheader.html#elfid>`_.
This encodes :py:attr:`ElfFileIdent.elfData`.
"""
ELFDATANONE = 0, 'Invalid data encoding'
ELFDATA2LSB = 1, 'least significant byte first'
ELFDATA2MSB = 2, 'most significant byte first'
ELFDATANUM = 3, ''
class EV(DumbEnum):
"""
Encodes the elf file format version of this elf file as from the `ident portion of the elf file
header
<http://www.sco.com/developers/gabi/latest/ch4.eheader.html#elfid>`_.
"""
EV_NONE = 0, 'Invalid version'
EV_CURRENT = 1, 'Current version'
EV_NUM = 2, ''
class ElfOsabi(DumbEnum):
"""
Encodes OSABI values which represent operating system ELF format
extensions as from the `'ident' portion of the elf file header
<http://www.sco.com/developers/gabi/latest/ch4.eheader.html#elfid>`_.
This encodes :py:attr:`ElfFileIdent.osabi`.
"""
ELFOSABI_NONE = 0, 'No extensions or unspecified'
ELFOSABI_SYSV = 0, 'No extensions or unspecified'
ELFOSABI_HPUX = 1, 'Hewlett-Packard HP-UX'
ELFOSABI_NETBSD = 2, 'NetBSD'
ELFOSABI_LINUX = 3, 'Linux'
ELFOSABI_SOLARIS = 6, 'Sun Solaris'
ELFOSABI_AIX = 7, 'AIX'
ELFOSABI_IRIX = 8, 'IRIX'
ELFOSABI_FREEBSD = 9, 'FreeBSD'
ELFOSABI_TRU64 = 10, 'Compaq TRU64 UNIX'
ELFOSABI_MODESTO = 11, 'Novell Modesto'
ELFOSABI_OPENBSD = 12, 'Open BSD'
ELFOSABI_OPENVMS = 13, 'Open VMS'
ELFOSABI_NSK = 14, 'Hewlett-Packard Non-Stop Kernel'
ELFOSABI_AROS = 15, 'Amiga Research OS'
ELFOSABI_FENIXOS = 16, 'The FenixOS highly scalable multi-core OS'
ELFOSABI_ARM_EABI = 64, 'ARM EABI'
ELFOSABI_ARM = 97, 'ARM'
ELFOSABI_STANDALONE = 255, 'Standalone (embedded) application'
class ET(DumbEnum):
"""
Encodes the type of this elf file, (relocatable, executable,
shared library, etc.), as represented in the `ELF file header
<http://www.sco.com/developers/gabi/latest/ch4.eheader.html>`_.
This encodes :py:attr:`ElfFileHeader.type`.
"""
ET_NONE = 0, 'No file type'
ET_REL = 1, 'Relocatable file'
ET_EXEC = 2, 'Executable file'
ET_DYN = 3, 'Shared object file'
ET_CORE = 4, 'Core file'
ET_NUM = 5, ''
ET_LOOS = 0xfe00, 'Operating system-specific'
ET_HIOS = 0xfeff, 'Operating system-specific'
ET_LOPROC = 0xff00, 'Processor-specific'
ET_HIPROC = 0xffff, 'Processor-specific'
class EM(DumbEnum):
"""
Encodes the processor type represented in this elf file as
recorded in the `ELF file header <http://www.sco.com/developers/gabi/latest/ch4.eheader.html>`_.
This encodes :py:attr:`ElfFileHeader.machine`.
"""
EM_NONE = 0, 'No machine'
EM_M32 = 1, 'AT&T WE 32100'
EM_SPARC = 2, 'SPARC'
EM_386 = 3, 'Intel 80386'
EM_68K = 4, 'Motorola 68000'
EM_88K = 5, 'Motorola 88000'
EM_486 = 6, 'Reserved for future use (was EM_486)'
EM_860 = 7, 'Intel 80860'
EM_MIPS = 8, 'MIPS I Architecture'
EM_S370 = 9, 'IBM System/370 Processor'
EM_MIPS_RS3_LE = 10, 'MIPS RS3000 Little-endian'
# 11 - 14 reserved
EM_PARISC = 15, 'Hewlett-Packard PA-RISC'
# 16 reserved
EM_VPP500 = 17, 'Fujitsu VPP500'
EM_SPARC32PLUS = 18, 'Enhanced instruction set SPARC'
EM_960 = 19, 'Intel 80960'
EM_PPC = 20, 'PowerPC'
EM_PPC64 = 21, '64-bit PowerPC'
EM_S390 = 22, 'IBM System/390 Processor'
EM_SPU = 23, 'IBM SPU/SPC'
# 24 - 35 reserved
EM_V800 = 36, 'NEC V800'
EM_FR20 = 37, 'Fujitsu FR20'
EM_RH32 = 38, 'TRW RH-32'
EM_RCE = 39, 'Motorola RCE'
EM_ARM = 40, 'Advanced RISC Machines ARM'
EM_ALPHA = 41, 'Digital Alpha'
EM_SH = 42, 'Hitachi SH'
EM_SPARCV9 = 43, 'SPARC Version 9'
EM_TRICORE = 44, 'Siemens TriCore embedded processor'
EM_ARC = 45, 'Argonaut RISC Core, Argonaut Technologies Inc.'
EM_H8_300 = 46, 'Hitachi H8/300'
EM_H8_300H = 47, 'Hitachi H8/300H'
EM_H8S = 48, 'Hitachi H8S'
EM_H8_500 = 49, 'Hitachi H8/500'
EM_IA_64 = 50, 'Intel IA-64 processor architecture'
EM_MIPS_X = 51, 'Stanford MIPS-X'
EM_COLDFIRE = 52, 'Motorola ColdFire'
EM_68HC12 = 53, 'Motorola M68HC12'
EM_MMA = 54, 'Fujitsu MMA Multimedia Accelerator'
EM_PCP = 55, 'Siemens PCP'
EM_NCPU = 56, 'Sony nCPU embedded RISC processor'
EM_NDR1 = 57, 'Denso NDR1 microprocessor'
EM_STARCORE = 58, 'Motorola Star*Core processor'
EM_ME16 = 59, 'Toyota ME16 processor'
EM_ST100 = 60, 'STMicroelectronics ST100 processor'
EM_TINYJ = 61, 'Advanced Logic Corp. TinyJ embedded processor family'
EM_X86_64 = 62, 'AMD x86-64 architecture'
EM_PDSP = 63, 'Sony DSP Processor'
EM_PDP10 = 64, 'Digital Equipment Corp. PDP-10'
EM_PDP11 = 65, 'Digital Equipment Corp. PDP-11'
EM_FX66 = 66, 'Siemens FX66 microcontroller'
EM_ST9PLUS = 67, 'STMicroelectronics ST9+ 8/16 bit microcontroller'
EM_ST7 = 68, 'STMicroelectronics ST7 8-bit microcontroller'
EM_68HC16 = 69, 'Motorola MC68HC16 Microcontroller'
EM_68HC11 = 70, 'Motorola MC68HC11 Microcontroller'
EM_68HC08 = 71, 'Motorola MC68HC08 Microcontroller'
EM_68HC05 = 72, 'Motorola MC68HC05 Microcontroller'
EM_SVX = 73, 'Silicon Graphics SVx'
EM_ST19 = 74, 'STMicroelectronics ST19 8-bit microcontroller'
EM_VAX = 75, 'Digital VAX'
EM_CRIS = 76, 'Axis Communications 32-bit embedded processor'
EM_JAVELIN = 77, 'Infineon Technologies 32-bit embedded processor'
EM_FIREPATH = 78, 'Element 14 64-bit DSP Processor'
EM_ZSP = 79, 'LSI Logic 16-bit DSP Processor'
EM_MMIX = 80, 'Donald Knuth\'s educational 64-bit processor'
EM_HUANY = 81, 'Harvard University machine-independent object files'
EM_PRISM = 82, 'SiTera Prism'
EM_AVR = 83, 'Atmel AVR 8-bit microcontroller'
EM_FR30 = 84, 'Fujitsu FR30'
EM_D10V = 85, 'Mitsubishi D10V'
EM_D30V = 86, 'Mitsubishi D30V'
EM_V850 = 87, 'NEC v850'
EM_M32R = 88, 'Mitsubishi M32R'
EM_MN10300 = 89, 'Matsushita MN10300'
EM_MN10200 = 90, 'Matsushita MN10200'
EM_PJ = 91, 'picoJava'
EM_OPENRISC = 92, 'OpenRISC 32-bit embedded processor'
EM_ARC_COMPACT = 93, 'ARC International ARCompact processor (old spelling/synonym: EM_ARC_A5)'
EM_XTENSA = 94, 'Tensilica Xtensa Architecture'
EM_VIDEOCORE = 95, 'Alphamosaic VideoCore processor'
EM_TMM_GPP = 96, 'Thompson Multimedia General Purpose Processor'
EM_NS32K = 97, 'National Semiconductor 32000 series'
EM_TPC = 98, 'Tenor Network TPC processor'
EM_SNP1K = 99, 'Trebia SNP 1000 processor'
EM_ST200 = 100, 'STMicroelectronics (www.st.com) ST200 microcontroller'
EM_IP2K = 101, 'Ubicom IP2xxx microcontroller family'
EM_MAX = 102, 'MAX Processor'
EM_CR = 103, 'National Semiconductor CompactRISC microprocessor'
EM_F2MC16 = 104, 'Fujitsu F2MC16'
EM_MSP430 = 105, 'Texas Instruments embedded microcontroller msp430'
EM_BLACKFIN = 106, 'Analog Devices Blackfin (DSP) processor'
EM_SE_C33 = 107, 'S1C33 Family of Seiko Epson processors'
EM_SEP = 108, 'Sharp embedded microprocessor'
EM_ARCA = 109, 'Arca RISC Microprocessor'
EM_UNICORE = 110, 'Microprocessor series from PKU-Unity Ltd. and MPRC of Peking University'
EM_EXCESS = 111, 'eXcess: 16/32/64-bit configurable embedded CPU'
EM_DXP = 112, 'Icera Semiconductor Inc. Deep Execution Processor'
EM_ALTERA_NIOS2 = 113, 'Altera Nios II soft-core processor'
EM_CRX = 114, 'National Semiconductor CompactRISC CRX microprocessor'
EM_XGATE = 115, 'Motorola XGATE embedded processor'
EM_C166 = 116, 'Infineon C16x/XC16x processor'
EM_M16C = 117, 'Renesas M16C series microprocessors'
EM_DSPIC30F = 118, 'Microchip Technology dsPIC30F Digital Signal Controller'
EM_CE = 119, 'Freescale Communication Engine RISC core'
EM_M32C = 120, 'Renesas M32C series microprocessors'
# 121 - 130 reserved
EM_TSK3000 = 131, 'Altium TSK3000 core'
EM_RS08 = 132, 'Freescale RS08 embedded processor'
# 133 reserved
EM_ECOG2 = 134, 'Cyan Technology eCOG2 microprocessor'
EM_SCORE7 = 135, 'Sunplus S+core7 RISC processor'
EM_DSP24 = 136, 'New Japan Radio (NJR) 24-bit DSP Processor'
EM_VIDEOCORE3 = 137, 'Broadcom VideoCore III processor'
EM_LATTICEMICO32 = 138, 'RISC processor for Lattice FPGA architecture'
EM_SE_C17 = 139, 'Seiko Epson C17 family'
EM_TI_C6000 = 140, 'The Texas Instruments TMS320C6000 DSP family'
EM_TI_C2000 = 141, 'The Texas Instruments TMS320C2000 DSP family'
EM_TI_C5500 = 142, 'The Texas Instruments TMS320C55x DSP family'
# 143 - 159 reserved
EM_MMDSP_PLUS = 160, 'STMicroelectronics 64bit VLIW Data Signal Processor'
EM_CYPRESS_M8C = 161, 'Cypress M8C microprocessor'
EM_R32C = 162, 'Renesas R32C series microprocessors'
EM_TRIMEDIA = 163, 'NXP Semiconductors TriMedia architecture family'
EM_QDSP6 = 164, 'QUALCOMM DSP6 Processor'
EM_8051 = 165, 'Intel 8051 and variants'
EM_STXP7X = 166, 'STMicroelectronics STxP7x family of configurable and extensible RISC processors'
EM_NDS32 = 167, 'Andes Technology compact code size embedded RISC processor family'
EM_ECOG1 = 168, 'Cyan Technology eCOG1X family'
EM_ECOG1X = 168, 'Cyan Technology eCOG1X family'
EM_MAXQ30 = 169, 'Dallas Semiconductor MAXQ30 Core Micro-controllers'
EM_XIMO16 = 170, 'New Japan Radio (NJR) 16-bit DSP Processor'
EM_MANIK = 171, 'M2000 Reconfigurable RISC Microprocessor'
EM_CRAYNV2 = 172, 'Cray Inc. NV2 vector architecture'
EM_RX = 173, 'Renesas RX family'
EM_METAG = 174, 'Imagination Technologies META processor architecture'
EM_MCST_ELBRUS = 175, 'MCST Elbrus general purpose hardware architecture'
EM_ECOG16 = 176, 'Cyan Technology eCOG16 family'
EM_CR16 = 177, 'National Semiconductor CompactRISC CR16 16-bit microprocessor'
EM_ETPU = 178, 'Freescale Extended Time Processing Unit'
EM_SLE9X = 179, 'Infineon Technologies SLE9X core'
# 180-182 Reserved for future Intel use
# 183-184 Reserved for future ARM use
EM_AVR32 = 185, 'Atmel Corporation 32-bit microprocessor family'
EM_STM8 = 186, 'STMicroeletronics STM8 8-bit microcontroller'
EM_TILE64 = 187, 'Tilera TILE64 multicore architecture family'
EM_TILEPRO = 188, 'Tilera TILEPro multicore architecture family'
EM_MICROBLAZE = 189, 'Xilinx MicroBlaze 32-bit RISC soft processor core'
EM_CUDA = 190, 'NVIDIA CUDA architecture'
EM_TILEGX = 191, 'Tilera TILE-Gx multicore architecture family'
EM_CLOUDSHIELD = 192, 'CloudShield architecture family'
EM_COREA_1ST = 193, 'KIPO-KAIST Core-A 1st generation processor family'
EM_COREA_2ND = 194, 'KIPO-KAIST Core-A 2nd generation processor family'
class SHN(DumbEnum):
"""
Encodes special section indices into the section header table.
This is a subclass of :py:class:`coding.Coding`.
"""
SHN_UNDEF = 0, 'marks an undefined, missing, irrelevant, or otherwise meaningless section reference'
SHN_LORESERVE = 0xff00, 'specifies the lower bound of the range of reserved indexes'
SHN_BEFORE = 0xff00, 'Order section before all others (Solaris).'
SHN_LOPROC = 0xff00, ''
SHN_AFTER = 0xff01, 'Order section after all others (Solaris).'
SHN_HIPROC = 0xff1f, ''
SHN_LOOS = 0xff20, ''
SHN_HIOS = 0xff3f, ''
SHN_ABS = 0xfff1, 'specifies absolute values for the corresponding reference'
SHN_COMMON = 0xfff2, 'symbols defined relative to this section are common symbols, such as FORTRAN COMMON or unallocated C external variables.'
SHN_XINDEX = 0xffff, 'This value is an escape value. It indicates that the actual section header index is too large to fit in the containing field and is to be found in another location (specific to the structure where it appears). '
SHN_HIRESERVE = 0xffff, 'specifies the upper bound of the range of reserved indexes'
class SHT(DumbEnum):
"""
Encodes the type of a section as represented in the section header
entry of `the section header table
<http://www.sco.com/developers/gabi/latest/ch4.sheader.html#section_header>`_.
This encodes :py:attr:`ElfSectionHeader.type`.
"""
SHT_NULL = 0, 'marks the section header as inactive; it does not have an associated section. Other members of the section header have undefined values.'
SHT_PROGBITS = 1, 'The section holds information defined by the program, whose format and meaning are determined solely by the program.'
SHT_SYMTAB = 2, 'provides symbols for link editing, though it may also be used for dynamic linking.'
SHT_STRTAB = 3, 'section holds a string table. An object file may have multiple string table sections.'
SHT_RELA = 4, 'section holds relocation entries with explicit addends, such as type Elf32_Rela for the 32-bit class of object files or type Elf64_Rela for the 64-bit class of object files.'
SHT_HASH = 5, 'section holds a symbol hash table'
SHT_DYNAMIC = 6, 'section holds information for dynamic linking'
SHT_NOTE = 7, 'section holds information that marks the file in some way'
SHT_NOBITS = 8, 'A section of this type occupies no space in the file but otherwise resembles SHT_PROGBITS'
SHT_REL = 9, 'section holds relocation entries without explicit addends'
SHT_SHLIB = 10, 'section type is reserved but has unspecified semantics'
SHT_DYNSYM = 11, 'holds a minimal set of dynamic linking symbols,'
SHT_INIT_ARRAY = 14, 'section contains an array of pointers to initialization functions'
SHT_FINI_ARRAY = 15, 'section contains an array of pointers to termination functions'
SHT_PREINIT_ARRAY = 16, 'section contains an array of pointers to functions that are invoked before all other initialization functions'
SHT_GROUP = 17, 'section defines a section group'
SHT_SYMTAB_SHNDX = 18, 'section is associated with a section of type SHT_SYMTAB and is required if any of the section header indexes referenced by that symbol table contain the escape value SHN_XINDEX'
SHT_LOOS = 0x60000000, ''
SHT_GNU_ATTRIBUTES = 0x6ffffff5, 'Object attributes.'
SHT_GNU_HASH = 0x6ffffff6, 'GNU-style hash table.'
SHT_GNU_LIBLIST = 0x6ffffff7, 'Prelink library lis'
SHT_CHECKSUM = 0x6ffffff8, 'Checksum for DSO content.'
SHT_LOSUNW = 0x6ffffffa, 'Sun-specific low bound.'
SHT_SUNW_move = 0x6ffffffa, 'efine SHT_SUNW_COMDAT'
SHT_SUNW_COMDAT = 0x6ffffffb, ''
SHT_SUNW_syminfo = 0x6ffffffc, ''
SHT_GNU_verdef = 0x6ffffffd, 'Version definition section.'
SHT_GNU_verneed = 0x6ffffffe, 'Version needs section.'
SHT_GNU_versym = 0x6fffffff, 'Version symbol table.'
SHT_HISUNW = 0x6fffffff, 'Sun-specific high bound.'
SHT_HIOS = 0x6fffffff, ''
SHT_LOPROC = 0x70000000, ''
SHT_HIPROC = 0x7fffffff, ''
SHT_LOUSER = 0x80000000, ''
SHT_HIUSER = 0xffffffff, ''
class SHF(DumbEnum):
"""
Encodes the section flags as represented in the section header
entry of `the section header table
<http://www.sco.com/developers/gabi/latest/ch4.sheader.html#section_header>`_.
This encodes :py:attr:`ElfSectionHeader.flags`. These are bit flags which are
or'd together.
"""
SHF_WRITE = 0x1, 'section contains data that should be writable during process execution'
SHF_ALLOC = 0x2, 'section occupies memory during process execution'
SHF_EXECINSTR = 0x4, 'section contains executable machine instructions'
SHF_MERGE = 0x10, 'data in the section may be merged to eliminate duplication'
SHF_STRINGS = 0x20, 'data elements in the section consist of null-terminated character strings'
SHF_INFO_LINK = 0x40, 'The sh_info field of this section header holds a section header table index'
SHF_LINK_ORDER = 0x80, 'adds special ordering requirements for link editors'
SHF_OS_NONCONFORMING = 0x100, 'section requires special OS-specific processing'
SHF_GROUP = 0x200, 'section is a member of a section group'
SHF_TLS = 0x400, 'section holds Thread-Local Storage'
SHF_MASKOS = 0x0ff00000, 'All bits included in this mask are reserved for operating system-specific semantics'
SHF_MASKPROC = 0xf0000000, 'All bits included in this mask are reserved for processor-specific semantics'
SHF_ORDERED = (1 << 30), 'Special ordering requirement (Solaris).'
SHF_EXCLUDE = (1 << 31), 'Section is excluded unless referenced or allocated (Solaris).'
class PT(DumbEnum):
"""
Encodes the segment type as recorded in the `program header
<http://www.sco.com/developers/gabi/latest/ch5.pheader.html>`_.
This encodes :py:attr:`ElfProgramHeader.type`.
"""
PT_NULL = 0, 'array element is unused'
PT_LOAD = 1, 'array element specifies a loadable segment'
PT_DYNAMIC = 2, 'array element specifies dynamic linking information'
PT_INTERP = 3, 'array element specifies the location and size of a null-terminated path name to invoke as an interpreter'
PT_NOTE = 4, 'array element specifies the location and size of auxiliary information'
PT_SHLIB = 5, 'segment type is reserved'
PT_PHDR = 6, 'specifies the location and size of the program header table itself'
PT_TLS = 7, 'array element specifies the Thread-Local Storage template'
PT_LOOS = 0x60000000, ''
PT_GNU_EH_FRAME = 0x6474e550, 'GCC .eh_frame_hdr segment'
PT_GNU_STACK = 0x6474e551, 'Indicates stack executability'
PT_GNU_RELRO = 0x6474e552, 'Read only after relocation'
PT_LOSUNW = 0x6ffffffa, ''
PT_SUNWBSS = 0x6ffffffa, 'Sun Specific segment'
PT_SUNWSTACK = 0x6ffffffb, 'Stack segment'
PT_HISUNW = 0x6fffffff, ''
PT_HIOS = 0x6fffffff, ''
PT_LOPROC = 0x70000000, ''
PT_HIPROC = 0x7fffffff, ''
class PF(DumbEnum):
"""
Encodes the segment flags as recorded in the `program header
<http://www.sco.com/developers/gabi/latest/ch5.pheader.html>`_.
This encodes :py:attr:`ElfProgramHeader.flags`.
"""
PF_X = 0x1, 'Execute'
PF_W = 0x2, 'Write'
PF_R = 0x4, 'Read'
PF_MASKOS = 0x0ff00000, 'Unspecified'
PF_MASKPROC = 0xf0000000, 'Unspecified'
class GRP(DumbEnum):
GRP_COMDAT = 0x1, 'This is a COMDAT group'
GRP_MASKOS = 0x0ff00000, 'All bits included in this mask are reserved for operating system-specific semantics'
GRP_MASKPROC = 0xf0000000, 'All bits included in this mask are reserved for processor-specific semantics'
if __name__ == '__main__':
from sys import argv
from glob import glob
from_file = argv[1]
to_file = argv[2]
section_names = argv[3:]
sections = [(n, open(n, 'rb').read()) for nu in section_names for n in glob(nu)]
add_sections_to_elf(from_file, to_file, sections)
| 41.703642 | 235 | 0.718607 |
cbff7f0a241e27ac588cbc07b859704fed2f931f | 4,130 | py | Python | pytheos/utils.py | nilsbeck/pytheos | de4f3a03330ddb28e68ddcaa7b4888ea9a25e238 | [
"MIT"
] | null | null | null | pytheos/utils.py | nilsbeck/pytheos | de4f3a03330ddb28e68ddcaa7b4888ea9a25e238 | [
"MIT"
] | 1 | 2021-10-30T16:31:41.000Z | 2021-10-30T16:31:41.000Z | pytheos/utils.py | nilsbeck/pytheos | de4f3a03330ddb28e68ddcaa7b4888ea9a25e238 | [
"MIT"
] | 1 | 2021-10-30T14:24:58.000Z | 2021-10-30T14:24:58.000Z | #!/usr/bin/env python
""" General utility functions """
from __future__ import annotations
import re
from socket import socket
from typing import Optional
import netifaces
CHARACTER_REPLACE_MAP = {
'&': '%26',
'=': '%3D',
'%': '%25',
}
def extract_host(url: str) -> Optional[str]:
""" Extracts the hostname or IP address from the supplied URL.
:param url: URL string
:return: Matching string or None if not found
"""
match = re.match(r"https?://([^:/]+)[:/]?", url) # Should match any valid url host.
return match.group(1) if match else None
def build_command_string(group: str, command: str, **kwargs) -> str:
""" Builds the command string to send to the HEOS service.
:param group: Group name (e.g. system, player, etc)
:param command: Command name (e.g. heart_beat)
:param kwargs: Any parameters that should be sent along with the command
:return: The command string
"""
# Concatenate our vars string together from the keys and values we're provided.
attributes = '&'.join(
'='.join(
(k, _encode_characters(v))
) for k, v in kwargs.items()
)
command_string = f"heos://{group}/{command}"
if attributes:
command_string += f"?{attributes}"
return command_string + "\n"
def _encode_characters(input_string) -> str:
""" Encodes certain special characters as defined by the HEOS specification.
:param input_string: String to encode
:return: New string with encoded characters
"""
if not isinstance(input_string, str):
input_string = str(input_string)
results = ''
for c in input_string:
replacement_char = CHARACTER_REPLACE_MAP.get(c)
results += replacement_char if replacement_char else c
return results
def parse_var_string(input_string: str) -> dict:
""" Parses a URL parameter string (sorta) like "var1='val1'&var2='val2'" - also supports the special case
where there is no value specified, such as "signed_in&un=username", for the player/signed_in command.
:param input_string: Input string to parse
:return: dict
"""
variables = {}
if input_string is not None:
var_strings = [var_string.split('=') for var_string in input_string.split('&')]
for elements in var_strings:
# Copy name to value for vars with no value specified - e.g. signed_in&un=username
name = elements[0]
value = name
if len(elements) > 1:
value = elements[1]
variables[name] = _decode_characters(value.strip("'"))
return variables
def _decode_characters(input_string: str) -> str:
""" Decodes certain special characters as defined by the HEOS specification.
:param input_string: String to decode
:return: New string with decoded characters
"""
results = input_string
for replacement_str, original_str in CHARACTER_REPLACE_MAP.items():
results = results.replace(original_str, replacement_str)
return results
def get_default_ip(address_family: socket.AddressFamily) -> str:
""" Retrieves the IP address on the default interface
:param address_family: Address family
:return: str
"""
gateway, inf = get_default_interface(address_family)
return get_interface_ip(inf, address_family)
def get_interface_ip(interface: str, address_family: socket.AddressFamily) -> Optional[str]:
""" Retrieves the IP address of the specified interface.
:param interface: Interface name
:param address_family: Address family
:return: str or None if not found
"""
addresses = netifaces.ifaddresses(interface)
proto_address = addresses.get(address_family)
if not proto_address:
return None
return proto_address[0].get('addr')
def get_default_interface(address_family: socket.AddressFamily) -> tuple:
""" Retrieves the default gateway and interface for the specified address family.
:param address_family: Address family
:return: tuple
"""
gateways = netifaces.gateways()
return gateways['default'].get(address_family)
| 30.145985 | 109 | 0.67724 |
2f298a84489542127f81d495823158d13ea333fd | 2,579 | py | Python | io_scene_xray/details/types.py | vika-sonne/blender-xray | a9b9b9e3c83500e5431ec9e692ed57aff63ea43c | [
"BSD-2-Clause"
] | null | null | null | io_scene_xray/details/types.py | vika-sonne/blender-xray | a9b9b9e3c83500e5431ec9e692ed57aff63ea43c | [
"BSD-2-Clause"
] | null | null | null | io_scene_xray/details/types.py | vika-sonne/blender-xray | a9b9b9e3c83500e5431ec9e692ed57aff63ea43c | [
"BSD-2-Clause"
] | null | null | null | import bpy
from . import utils
class XRayObjectDetailsSlotsMeshesProperties(bpy.types.PropertyGroup):
mesh_0 = bpy.props.StringProperty()
mesh_1 = bpy.props.StringProperty()
mesh_2 = bpy.props.StringProperty()
mesh_3 = bpy.props.StringProperty()
class XRayObjectDetailsSlotsLightingProperties(bpy.types.PropertyGroup):
format = bpy.props.EnumProperty(
name='Format',
items=(
(
'builds_1569-cop',
'Builds 1569-CoP',
'level.details version 3 (builds 1569-CoP)'
),
(
'builds_1096-1558',
'Builds 1096-1558',
'level.details version 2 (builds 1096-1558)'
)
),
default='builds_1569-cop'
)
lights_image = bpy.props.StringProperty()
hemi_image = bpy.props.StringProperty()
shadows_image = bpy.props.StringProperty()
class XRayObjectDetailsSlotsProperties(bpy.types.PropertyGroup):
meshes = bpy.props.PointerProperty(
type=XRayObjectDetailsSlotsMeshesProperties
)
ligthing = bpy.props.PointerProperty(
type=XRayObjectDetailsSlotsLightingProperties
)
meshes_object = bpy.props.StringProperty()
slots_base_object = bpy.props.StringProperty()
slots_top_object = bpy.props.StringProperty()
class XRayObjectDetailsModelProperties(bpy.types.PropertyGroup):
no_waving = bpy.props.BoolProperty(
description='No Waving',
options={'SKIP_SAVE'},
default=False
)
min_scale = bpy.props.FloatProperty(default=1.0, min=0.1, max=100.0)
max_scale = bpy.props.FloatProperty(default=1.0, min=0.1, max=100.0)
def _update_detail_color_by_index(self, context):
if hasattr(context.object, 'xray'):
color_indices = utils.generate_color_indices()
context.object.xray.detail.model.color = \
color_indices[context.object.xray.detail.model.index][0 : 3]
index = bpy.props.IntProperty(
default=0,
min=0,
max=62,
update=_update_detail_color_by_index
)
color = bpy.props.FloatVectorProperty(
default=(1.0, 0.0, 0.0),
max=1.0,
min=0.0,
subtype='COLOR_GAMMA',
size=3
)
class XRayObjectDetailsProperties(bpy.types.PropertyGroup):
# detail model options
model = bpy.props.PointerProperty(type=XRayObjectDetailsModelProperties)
# detail slots options
slots = bpy.props.PointerProperty(type=XRayObjectDetailsSlotsProperties)
| 27.43617 | 76 | 0.645599 |
453150774e4ada6553f9a6fae527082c95ab8352 | 9,536 | py | Python | sgnlp/models/lif_3way_ap/modules/layers.py | jonheng/sgnlp | aeee85b78de2e449ca1dc6b18686a060cb938d07 | [
"MIT"
] | null | null | null | sgnlp/models/lif_3way_ap/modules/layers.py | jonheng/sgnlp | aeee85b78de2e449ca1dc6b18686a060cb938d07 | [
"MIT"
] | null | null | null | sgnlp/models/lif_3way_ap/modules/layers.py | jonheng/sgnlp | aeee85b78de2e449ca1dc6b18686a060cb938d07 | [
"MIT"
] | null | null | null | import numpy as np
import torch
import torch.nn as nn
from allennlp.nn.util import masked_softmax
class CharCNNEmbedding(nn.Module):
def __init__(self, num_embeddings, embedding_dim, num_filters, kernel_size, padding_idx=0, stride=1, dropout=0.3):
super(CharCNNEmbedding, self).__init__()
self.embedding = nn.Embedding(num_embeddings=num_embeddings, embedding_dim=embedding_dim,
padding_idx=padding_idx)
self.conv1d = nn.Conv1d(in_channels=embedding_dim, out_channels=num_filters,
kernel_size=kernel_size, stride=stride)
self.activation = nn.ReLU()
self.dropout = nn.Dropout(dropout)
def forward(self, x):
"""
Assumes x is of shape (batch_size, max_num_words, max_num_chars).
"""
input_shape = x.shape
flattened_shape = [-1] + list(input_shape[2:])
x = x.contiguous().view(*flattened_shape) # (batch_size * max_num_words, max_num_chars)
x = self.embedding(x) # (batch_size * max_num_words, max_num_chars, embedding_dim)
# transpose to correct shape that conv1d expects
x = torch.transpose(x, 1, 2) # (batch_size * max_num_words, embedding_dim, max_num_chars)
x = self.conv1d(x) # (batch_size * max_num_words, num_filters, num_strides)
# Max pool over last dimension
# Equivalent to nn.functional.max_pool1d(x, kernel_size=num_strides).squeeze()
x = x.max(dim=2).values # (batch_size * max_num_words, num_filters)
output_shape = list(input_shape[:2]) + [-1]
x = x.contiguous().view(*output_shape) # (batch_size, max_num_words, num_filters)
x = self.activation(x)
x = self.dropout(x)
return x
class WordEmbedding(nn.Module):
def __init__(self, num_embeddings, embedding_dim, padding_idx=0, trainable=False):
super(WordEmbedding, self).__init__()
self.embedding = nn.Embedding(num_embeddings=num_embeddings, embedding_dim=embedding_dim,
padding_idx=padding_idx)
self.embedding.weight.requires_grad = trainable
def forward(self, x):
return self.embedding(x)
def load_pretrained_embeddings(self, file_path, vocab):
assert len(vocab) == self.embedding.num_embeddings
# Process glove embeddings: File format is word followed by weights (space-separated)
word_to_embedding = {}
with open(file_path, "r", encoding="utf-8") as f:
for index, line in enumerate(f):
try:
values = line.split()
word = values[0]
word_embedding = np.asarray(values[1:], dtype=np.float32)
word_to_embedding[word] = word_embedding
except ValueError as e:
# Some words have spaces in them, leading to an error in the above logic
# Skip these words
pass
# Take arbitrary word
emb_dim = word_to_embedding["a"].shape[0]
weights_matrix = np.zeros((len(vocab), emb_dim))
for word, idx in vocab.stoi.items():
try:
weights_matrix[idx] = word_to_embedding[word]
except KeyError:
weights_matrix[idx] = np.random.normal(scale=0.6, size=(emb_dim,))
self.embedding.load_state_dict({'weight': torch.Tensor(weights_matrix)})
class Linear(torch.nn.Module):
def __init__(self, dropout, input_dim, hidden_dims, num_layers=None, activation=None):
super(Linear, self).__init__()
if num_layers:
assert isinstance(num_layers, int)
if num_layers > 1:
assert len(hidden_dims) == num_layers
elif num_layers == 1:
assert isinstance(hidden_dims, int)
self.layers = []
if isinstance(hidden_dims, list):
prev_dim = input_dim
for hidden_dim in hidden_dims:
self.layers.append(nn.Linear(in_features=prev_dim, out_features=hidden_dim))
elif isinstance(hidden_dims, int) and num_layers == 1:
self.layers.append(nn.Linear(in_features=input_dim, out_features=hidden_dims))
else:
raise ValueError
self.layers = nn.ModuleList(self.layers)
self.dropout = nn.Dropout(dropout)
if activation == "relu":
self.activation = torch.relu
elif activation == "tanh":
self.activation = torch.tanh
else:
self.activation = None
def forward(self, x):
for linear in self.layers:
x = linear(x)
if self.activation:
x = self.activation(x)
x = self.dropout(x)
return x
class SeqAttnMat(torch.nn.Module):
"""
Given sequences X and Y, calculate the attention matrix.
"""
def __init__(self, projector_args: dict = None,
identity: bool = True) -> None:
super(SeqAttnMat, self).__init__()
if not identity:
assert projector_args is not None
self.linear = Linear(**projector_args)
else:
self.linear = None
def forward(self, x, y, y_mask):
"""
Args:
x: batch * len1 * hdim
y: batch * len2 * hdim
y_mask: batch * len2
Output:
scores: batch * len1 * len2
alpha: batch * len1 * len2
"""
# Project vectors
if self.linear:
x_proj = self.linear(x.contiguous().view(-1, x.size(2))).view(x.size())
y_proj = self.linear(y.contiguous().view(-1, y.size(2))).view(y.size())
else:
x_proj = x
y_proj = y
# Compute scores
scores = x_proj.bmm(y_proj.transpose(2, 1)) # batch * len1 * len2
# Mask padding
y_mask = y_mask.unsqueeze(1).expand(scores.size()) # b * l1 * l2
alpha = masked_softmax(scores, y_mask, dim=-1).view(-1, x.size(1), y.size(1))
return scores, alpha
class GatedEncoding(torch.nn.Module):
"""
Gating over a sequence:
* o_i = sigmoid(Wx_i) * x_i for x_i in X.
"""
def __init__(self, gate_args: dict):
super(GatedEncoding, self).__init__()
self.linear = Linear(**gate_args)
def forward(self, x):
"""
Args:
x: batch * len * hdim
Output:
gated_x: batch * len * hdim
"""
gate = self.linear(x.view(-1, x.size(2))).view(x.size())
gate = torch.sigmoid(gate)
gated_x = torch.mul(gate, x)
return gated_x
class GatedMultifactorSelfAttnEnc(torch.nn.Module):
"""
Gated multi-factor self attentive encoding over a sequence:
"""
def __init__(self, projector_args: dict,
gate_args: dict,
num_factor: int = 4,
attn_pooling: str = 'max'):
super(GatedMultifactorSelfAttnEnc, self).__init__()
self.num_factor = num_factor
if self.num_factor > 0:
self.linear = Linear(**projector_args)
else:
self.linear = None
self.linear_gate = Linear(**gate_args)
self.attn_pooling = attn_pooling
def forward(self, x, x_mask):
"""
Args:
x: batch * len * hdim
x_mask: batch * len
Output:
gated_multi_attentive_enc: batch * len * 2hdim
"""
x = x.contiguous()
if self.linear is not None:
self_attn_multi = []
y_multi = self.linear(x.view(-1, x.size(2)))
y_multi = y_multi.view(x.size(0), x.size(1), x.size(2), self.num_factor)
for fac in range(self.num_factor):
y = y_multi.narrow(3, fac, 1).squeeze(-1)
attn_fac = y.bmm(y.transpose(2, 1))
attn_fac = attn_fac.unsqueeze(-1)
self_attn_multi.append(attn_fac)
self_attn_multi = torch.cat(self_attn_multi, -1) # batch * len * len * num_factor
if self.attn_pooling == 'max':
self_attn, _ = torch.max(self_attn_multi, 3) # batch * len * len
elif self.attn_pooling == 'min':
self_attn, _ = torch.min(self_attn_multi, 3)
else:
self_attn = torch.mean(self_attn_multi, 3)
else:
self_attn = x.bmm(x.transpose(2, 1)) # batch * len * len
mask = x_mask.reshape(x_mask.size(0), x_mask.size(1), 1) \
* x_mask.reshape(x_mask.size(0), 1, x_mask.size(1)) # batch * len * len
self_mask = torch.eye(x_mask.size(1), x_mask.size(1), device=x_mask.device)
self_mask = self_mask.reshape(1, x_mask.size(1), x_mask.size(1))
mask = mask * (1 - self_mask.long())
# Normalize with softmax
alpha = masked_softmax(self_attn, mask, dim=-1) # batch * len * len
# multifactor attentive enc
multi_attn_enc = alpha.bmm(x) # batch * len * hdim
# merge with original x
gate_input = [x, multi_attn_enc]
joint_ctx_input = torch.cat(gate_input, 2)
# gating
gate_joint_ctx_self_match = self.linear_gate(joint_ctx_input.view(-1, joint_ctx_input.size(2))).view(
joint_ctx_input.size())
gate_joint_ctx_self_match = torch.sigmoid(gate_joint_ctx_self_match)
gated_multi_attentive_enc = torch.mul(gate_joint_ctx_self_match, joint_ctx_input)
return gated_multi_attentive_enc
| 36.396947 | 118 | 0.586514 |
c4320e6ecae62d0c63235d40353267caece1fc85 | 13,676 | py | Python | protocol/udp_chat_server.py | AntoineHus/C2W | 8313ef9a8407ff6fe68cc410d6fc656966751485 | [
"Apache-2.0"
] | null | null | null | protocol/udp_chat_server.py | AntoineHus/C2W | 8313ef9a8407ff6fe68cc410d6fc656966751485 | [
"Apache-2.0"
] | null | null | null | protocol/udp_chat_server.py | AntoineHus/C2W | 8313ef9a8407ff6fe68cc410d6fc656966751485 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from twisted.internet.protocol import DatagramProtocol
from c2w.main.lossy_transport import LossyTransport
from c2w.main.constants import ROOM_IDS
import logging
import ctypes
import struct
import random
from twisted.internet import reactor
from c2w.main.movie import *
from c2w.main.user import *
from c2w.main.constants import *
import core
#version 53
logging.basicConfig()
moduleLogger = logging.getLogger('c2w.protocol.udp_chat_server_protocol')
class c2wUdpChatServerProtocol(DatagramProtocol):
def __init__(self, serverProxy, lossPr):
"""
:param serverProxy: The serverProxy, which the protocol must use
to interact with the user and movie store (i.e., the list of users
and movies) in the server.
:param lossPr: The packet loss probability for outgoing packets. Do
not modify this value!
Class implementing the UDP version of the client protocol.
.. note::
You must write the implementation of this class.
Each instance must have at least the following attribute:
.. attribute:: serverProxy
The serverProxy, which the protocol must use
to interact with the user and movie store in the server.
.. attribute:: lossPr
The packet loss probability for outgoing packets. Do
not modify this value! (It is used by startProtocol.)
.. note::
You must add attributes and methods to this class in order
to have a working and complete implementation of the c2w
protocol.
"""
self.serverProxy = serverProxy
self.lossPr = lossPr
self.ListOfNames_sqNum_hp = []
self.ack = 0
self.seq_num = 0
self.listOfloginResponseReceived=[]
self.idcall = None
def startProtocol(self):
"""
DO NOT MODIFY THE FIRST TWO LINES OF THIS METHOD!!
If in doubt, do not add anything to this method. Just ignore it.
It is used to randomly drop outgoing packets if the -l
command line option is used.
"""
self.transport = LossyTransport(self.transport, self.lossPr)
DatagramProtocol.transport = self.transport
def datagramReceived(self, datagram, (host, port)):
"""
:param string datagram: the payload of the UDP packet.
:param host: the IP address of the source.
:param port: the source port.
Called **by Twisted** when the server has received a UDP
packet.
"""
#extraction du type du message avec l'entête dans une liste
message_client = core.parseReceivedData(datagram)
TYPE=message_client[0]
data=message_client[6]
if (TYPE == 1):
username=message_client[6]
if not(self.serverProxy.userExists(username)):
User_id=self.serverProxy.addUser(username,ROOM_IDS.MAIN_ROOM,None,None)
for a in self.ListOfNames_sqNum_hp :
self.sendUserMessage(a[2])
a=(username,0,(host,port))
self.ListOfNames_sqNum_hp.append(a)
self.listOfloginResponseReceived.append(((host,port),False,User_id,False,False))
self.sendLoginResponse(message_client[4],User_id,(host,port))
else:
v=0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
if(self.listOfloginResponseReceived[v][1]==False):
self.sendLoginResponse(message_client[4],self.listOfloginResponseReceived[v][2],(host,port))
else:
self.sendAck(message_client[5],(host, port))
error = 1
self.sendError(message_client[4],error,(host,port))
if (TYPE==6):
user_id = struct.unpack_from('!H',data[0:])[0]
movie_id1 = struct.unpack_from('!I',data[2:])[0]
movie_id2 = struct.unpack_from('!H',data[6:])[0]
movie_id = (movie_id1 << 2*8) + (movie_id2 << 0)
UserName = self.serverProxy.getUserById(user_id).userName
MovieName = self.serverProxy.getMovieById(movie_id).movieTitle
self.serverProxy.updateUserChatroom(UserName,MovieName)
for a in self.ListOfNames_sqNum_hp :
user = self.serverProxy.getUserByName(a[0])
if (user.userChatRoom == ROOM_IDS.MAIN_ROOM):
self.sendUserMessage(a[2])
elif(user.userChatRoom == MovieName):
self.sendUserMessageForMovieRoom(MovieName,a[2])
self.serverProxy.startStreamingMovie(MovieName)
self.sendAck(message_client[5],(host, port))
if (TYPE==7):
user_id=struct.unpack_from('!H',data[0:])[0]
UserName = self.serverProxy.getUserById(user_id).userName
self.serverProxy.updateUserChatroom(UserName,ROOM_IDS.MAIN_ROOM)
for a in self.ListOfNames_sqNum_hp :
user = self.serverProxy.getUserByName(a[0])
if (user.userChatRoom == ROOM_IDS.MAIN_ROOM):
self.sendUserMessage(a[2])
else:
self.sendUserMessageForMovieRoom(user.userChatRoom,a[2])
self.sendAck(message_client[5],(host, port))
if (TYPE == 8) :
userId = struct.unpack_from('!H',data)[0]
userName = self.serverProxy.getUserById(userId).userName
self.serverProxy.removeUser(userName)
for p in self.ListOfNames_sqNum_hp:
if (p[0] == userName):
self.ListOfNames_sqNum_hp.remove(p)
for a in self.ListOfNames_sqNum_hp :
if not(userName==a[0]):
self.sendUserMessage(a[2])
self.sendAck(message_client[5],(host, port))
if(TYPE==15):
if(message_client[4]==0):
v = 0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
if(self.listOfloginResponseReceived[v][1]==False):
a=((host,port),True,self.listOfloginResponseReceived[v][2],False,False)
self.listOfloginResponseReceived.remove(((host,port),False,self.listOfloginResponseReceived[v][2],False,False))
self.listOfloginResponseReceived.append(a)
self.sendMovieMessage((host, port))
else:
self.sendUserMessage((host, port))
if(message_client[4]==1):
self.idcall.cancel()
v = 0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
a=((host,port),True,self.listOfloginResponseReceived[v][2],True,False)
self.listOfloginResponseReceived.remove(((host,port),True,self.listOfloginResponseReceived[v][2],False,False))
self.listOfloginResponseReceived.append(a)
self.sendUserMessage((host, port))
if (message_client[4]==2):
v=0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
a=((host,port),True,self.listOfloginResponseReceived[v][2],True,True)
self.listOfloginResponseReceived.remove(((host,port),True,self.listOfloginResponseReceived[v][2],True,False))
self.listOfloginResponseReceived.append(a)
self.sendMoviesIdlist((host, port))
self.idcall.cancel()
return
if(TYPE == 4):
user_id=struct.unpack_from('!H',data[0:])[0]
message = data[8:]
user=self.serverProxy.getUserById(user_id)
CurrentUserName = user.userName
self.sendAck(message_client[5],(host, port))
for userName_seq in self.ListOfNames_sqNum_hp:
if not( CurrentUserName == userName_seq[0]):
dest_user=self.serverProxy.getUserByName(userName_seq[0])
if (user.userChatRoom == dest_user.userChatRoom):
(h,p) = self.getHost_Port(userName_seq[0])
self.send_Public_Message(userName_seq[1],user_id,message,(h,p))
if(TYPE == 5):
ScrId=struct.unpack_from('!H',data[0:])[0]
DestId=struct.unpack_from('!H',data[2:])[0]
UserNameDest = self.serverProxy.getUserById(DestId).userName
(h,p) = self.getHost_Port(UserNameDest)
message= data[4:]
self.sendAck(message_client[5],(host, port))
self.send_Private_Message(self.seq_num,ScrId,DestId,message,(h,p))
def sendError(self,ACK_NUM,error,(host,port)):
TYPE = '0000'
A='1'
R='0'
buffer_length= 7
buf= ctypes.create_string_buffer(buffer_length)
I= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(ACK_NUM),2) << 13) + (int(bin(0),2) << 0)
struct.pack_into('>IHB',buf,0,I,buffer_length,error)
self.transport.write(buf.raw,(host,port))
def sendLoginResponse(self,ACK_NUM,UserId,(host,port)):
TYPE = '1110'
A='1'
R='0'
buffer_length= 8
buf= ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(ACK_NUM),2) << 13) + (int(bin(0),2) << 0)
struct.pack_into('>IHH',buf,0,fourbyte,buffer_length,UserId)
self.transport.write(buf.raw,(host,port))
def sendAck(self,SEQ_NUM,(host,port)):
SEQ_NUM=bin(SEQ_NUM)
TYPE='1111'
A='1'
R='0'
buffer_length= 6
buf= ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(str(SEQ_NUM),2) << 13) + (int('0',2) << 0)
struct.pack_into('>IH',buf,0,fourbyte,buffer_length)
self.transport.write(buf.raw,(host,port))
def sendUserMessage(self,(host,port)):
TYPE = '0010'
A='0'
R='0'
ROO='0'
list_users=self.serverProxy.getUserList()
Number = len(list_users)
Name_Length = 0
for user in list_users:
Name_Length += len(user.userName)
buffer_length = 8 + Number*3 + Name_Length
buf=ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(0),2) << 13) + (int(bin(2),2) << 0)
ROO_Number = (int(ROO,2) << 14 ) + (int(bin(Number),2) << 0)
struct.pack_into('>IHH',buf,0,fourbyte,buffer_length,ROO_Number)
offset = 8
for user in list_users:
current_status='0'
if user.userChatRoom==ROOM_IDS.MAIN_ROOM:
current_status='1'
lenUsername_status= (int(bin(len(user.userName)),2) <<1) + (int(current_status,2) << 0 )
struct.pack_into('>BH'+str(len(user.userName))+'s',buf,offset,lenUsername_status,user.userId,user.userName)
offset+=3+len(user.userName)
self.transport.write(buf.raw,(host, port))
v=0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
if(self.listOfloginResponseReceived[v][4]==False):
self.idcall = reactor.callLater(0.5,self.sendUserMessage,(host,port))
def sendUserMessageForMovieRoom(self,movieName,(host,port)):
TYPE = '0010'
A='0'
R='0'
ROO='0'
list_users=self.serverProxy.getUserList()
Number = len(list_users)
Name_Length = 0
for user in list_users:
Name_Length += len(user.userName)
buffer_length = 8 + Number*3 + Name_Length
buf=ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(0),2) << 13) + (int(bin(2),2) << 0)
ROO_Number = (int(ROO,2) << 14 ) + (int(bin(Number),2) << 0)
struct.pack_into('>IHH',buf,0,fourbyte,buffer_length,ROO_Number)
offset = 8
for user in list_users:
if user.userChatRoom==movieName and not(user.userName==''):
current_status='0'
lenUsername_status= (int(bin(len(user.userName)),2) <<1) + (int(current_status,2) << 0 )
struct.pack_into('>BH'+str(len(user.userName))+'s',buf,offset,lenUsername_status,user.userId,user.userName)
offset+=3+len(user.userName)
self.transport.write(buf.raw,(host, port))
def sendMovieMessage(self,(host, port)):
TYPE = '0011'
A='0'
R='0'
ACK_NUM='0'
listOfMovies=self.serverProxy.getMovieList()
Number = len(listOfMovies)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(ACK_NUM,2) << 13) + (int(bin(1),2) << 0)
Name_Length = 0
for movie in listOfMovies:
Name_Length += len(movie.movieTitle)
buffer_length = 8 + Number*7 + Name_Length
buf=ctypes.create_string_buffer(buffer_length)
struct.pack_into('>IHH',buf,0,fourbyte,buffer_length,Number)
offset = 8
for movie in listOfMovies:
current_ip_adress=core.tohex(movie.movieIpAddress)
struct.pack_into('>BBBBHB'+str(len(movie.movieTitle))+'s',buf,offset,current_ip_adress[0],current_ip_adress[1],current_ip_adress[2],current_ip_adress[3],movie.moviePort,len(movie.movieTitle),movie.movieTitle)
offset+=7+len(movie.movieTitle)
self.transport.write(buf.raw,(host, port))
v=0
while(not(self.listOfloginResponseReceived[v][0]==(host,port))):
v+=1
if(self.listOfloginResponseReceived[v][3]==False):
self.idcall = reactor.callLater(0.5,self.sendMovieMessage,(host,port))
def sendMoviesIdlist(self,(host, port)):
l=self.serverProxy.getMovieList()
s=""
for movie in l:
s+=str(movie.movieId)
s+="."
buf = ctypes.create_string_buffer(len(s)+4)
struct.pack_into('!I'+str(len(s))+'s',buf,0,12,s)
self.transport.write(buf.raw,(host, port))
def getHost_Port(self,userName):
for element in self.ListOfNames_sqNum_hp:
if(element[0]==userName):
return element[2]
def send_Public_Message(self,SEQ_NUM,UserId,MessageText,(host,port)):
TYPE='0100'
A='0'
R='0'
ACK_NUM = 0
buffer_length= 14 + len(MessageText)
buf= ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(ACK_NUM),2) << 13) + (int(bin(SEQ_NUM),2) << 0)
Message = (UserId << 6*8) + (int(str(0),2) << 0)
struct.pack_into('>IHQ'+str(len(MessageText))+'s',buf,0,fourbyte,buffer_length,Message, MessageText)
self.type_last_message=int(TYPE,2)
self.transport.write(buf.raw,(host, port))
def send_Private_Message(self,SEQ_NUM,ScrId,DestId,MessageText,(host,port)):
TYPE='0101'
A='0'
R='0'
ACK_NUM = 0
buffer_length= 10 + len(MessageText)
buf= ctypes.create_string_buffer(buffer_length)
fourbyte= (int(TYPE,2) << 28) + (int(A,2) << 27) + (int(R,2) << 26) + (int(bin(ACK_NUM),2) << 13) + (int(bin(SEQ_NUM),2) << 0)
Message = (ScrId << 2*8) + (DestId << 0)
struct.pack_into('>IHI'+str(len(MessageText))+'s',buf,0,fourbyte,buffer_length,Message, MessageText)
self.type_last_message=int(TYPE,2)
self.transport.write(buf.raw,(host, port))
def sendUpdate(self,(host,port)):
TYPE = '1010'
A = '0000'
buffer_length = 1
buf = ctypes.create_string_buffer(buffer_length)
byte = (int(TYPE,2) << 4) + (int(A,2) << 0)
struct.pack_into('>B',buf,0,byte)
self.transport.write(buf.raw,(host,port))
| 37.264305 | 211 | 0.690333 |
8ef8360c1d179f0fa647ad68098261cbc9b6ad8b | 756 | py | Python | Q012_LCM_Variant.py | latika18/learning | a57c9aacc0157bf7c318f46c1e7c4971d1d55aea | [
"Unlicense"
] | null | null | null | Q012_LCM_Variant.py | latika18/learning | a57c9aacc0157bf7c318f46c1e7c4971d1d55aea | [
"Unlicense"
] | null | null | null | Q012_LCM_Variant.py | latika18/learning | a57c9aacc0157bf7c318f46c1e7c4971d1d55aea | [
"Unlicense"
] | null | null | null | #Find LCM (Lowest Common Multiple) of two numbers a, b
#Example Inputs >>> 9, 3 o/p 9
# >>> 10, 75 o/p 150
# >>> 190, 51 o/p 9690
#Using the LCM formula LCM = a*b / gcd(a,b)
def LCM(x , y):
""" define a function LCM which takes two integer inputs and return their LCM using the formula LCM(a,b) = a*b / gcd(a,b) """
if x==0 or y == 0:
return "0"
return (x * y)/GCD(x,y)
def GCD(a , b):
""" define a function GCD which takes two integer inputs and return their common divisor"""
com_div =[1]
i =2
while i<= min(a,b):
if a % i == 0 and b % i ==0:
com_div.append(i)
i = i+1
return com_div[-1]
print LCM(350,1)
print LCM(920,350)
| 28 | 129 | 0.527778 |
8fa1cbc28e6a7bb986475ce16dda5a3a2fe2276e | 6,389 | py | Python | vjezba12/Vjezba12-Vedran Milkovic.py | Miillky/uvod_u_programiranje | 209611e38c8fe84c727649df4b868a4278eb77c3 | [
"MIT"
] | null | null | null | vjezba12/Vjezba12-Vedran Milkovic.py | Miillky/uvod_u_programiranje | 209611e38c8fe84c727649df4b868a4278eb77c3 | [
"MIT"
] | null | null | null | vjezba12/Vjezba12-Vedran Milkovic.py | Miillky/uvod_u_programiranje | 209611e38c8fe84c727649df4b868a4278eb77c3 | [
"MIT"
] | null | null | null | from tkinter import *
def prikaziObavijest(v):
messagebox.showinfo("ZAHVALA",str(v)+" hvala na suradnji!")
glavni = Tk()
oznaka = Label(glavni, text="Unesi ime:")
oznaka.pack()
unos = Entry(glavni)
unos.pack()
tipka = Button(glavni, text="Poruka", command=lambda:prikaziObavijest(unos.get()))
tipka.pack()
glavni = Tk()
c1 = Canvas(glavni, bg="blue", height=200, width=400)
c1.pack(expand=1,fill=BOTH)
c2 = Canvas(glavni, bg="red", height=200, width=400)
c2.pack(expand=1,fill=BOTH)
c11 = Canvas(c1, bg="brown", height=200, width=200)
c11.pack(side=LEFT,expand=1,fill=BOTH )
c12 = Canvas(c1, bg="green", height=200, width=200)
c12.pack(side=LEFT,expand=1,fill=BOTH )
c4 = Canvas(c12, bg="yellow", height=100, width=200)
c4.pack(expand=1,fill=BOTH)
c5 = Canvas(c12, bg="pink", height=100, width=200)
c5.pack(expand=1,fill=BOTH)
c21 = Canvas(c2, bg="green", height=200, width=200)
c21.pack(side=LEFT,expand=1,fill=BOTH )
c22 = Canvas(c2, bg="brown", height=200, width=200)
c22.pack(side=LEFT,expand=1,fill=BOTH )
c6 = Canvas(c21, bg="teal", height=100, width=200)
c6.pack(expand=1,fill=BOTH )
c7 = Canvas(c21, bg="teal", height=100, width=200)
c7.pack(expand=1,fill=BOTH )
c8 = Canvas(c6, bg="orange", height=100, width=100)
c8.pack(side=LEFT,expand=1,fill=BOTH )
c9 = Canvas(c6, bg="green", height=100, width=100)
c9.pack(side=LEFT,expand=1,fill=BOTH )
glavni = Tk()
c1 = Canvas(glavni, bg="brown", height=200, width=200)
c1.grid(row =1,column=1,rowspan=2,columnspan=2)
c2 = Canvas(glavni, bg="yellow", height=100, width=200)
c2.grid(row =1,column=3,columnspan=2)
c3 = Canvas(glavni, bg="pink", height=100, width=200)
c3.grid(row =2,column=3,columnspan=2)
c4 = Canvas(glavni, bg="orange", height=100, width=100)
c4.grid(row =3,column=1)
c5 = Canvas(glavni, bg="green", height=100, width=100)
c5.grid(row =3,column=2)
c6 = Canvas(glavni, bg="teal", height=100, width=200)
c6.grid(row =4,column=1,columnspan=2)
c7 = Canvas(glavni, bg="brown", height=200, width=200)
c7.grid(row =3,column=3,rowspan=2,columnspan=2)
glavni = Tk()
glavni.geometry("400x400")
c1 = Canvas(glavni, bg="brown")
c1.place(height=200, width=200)
c2 = Canvas(glavni, bg="yellow")
c2.place(height=100, width=200,x=200)
c3 = Canvas(glavni, bg="pink")
c3.place(height=100, width=200,x=200,y=100)
c4 = Canvas(glavni, bg="orange")
c4.place(height=100, width=100,y=200)
c5 = Canvas(glavni, bg="green")
c5.place(height=100, width=100,x=100,y=200)
c6 = Canvas(glavni, bg="teal")
c6.place(height=100, width=200,y=300)
c7 = Canvas(glavni, bg="brown")
c7.place(height=200, width=200,x=200,y=200)
def pohrani(ime,pre,god,dat):
datoteka = open(dat+str(".txt"),"a")
datoteka.write(str(ime)+" "+str(pre)+" "+str(god)+"\n")
datoteka.close()
glavni = Tk()
oznaka1 = Label(glavni, text="Ime osobe:")
oznaka1.pack()
unos1 = Entry(glavni, bd =1)
unos1.pack()
oznaka2 = Label(glavni, text="Prezime osobe:")
oznaka2.pack()
unos2 = Entry(glavni, bd =1)
unos2.pack()
oznaka3 = Label(glavni, text="Godine starosti(1-99):")
oznaka3.pack()
unos3 = Spinbox(glavni, from_=1,to=99)
unos3.pack()
tipka = Button (glavni, text ="Pohrani podatke", command = lambda:pohrani(unos1.get(),unos2.get(),unos3.get(),unos4.get()))
tipka.pack()
oznaka4 = Label(glavni, text="Ime datoteke za pohranu:")
oznaka4.pack()
unos4 = Entry(glavni, bd =1)
unos4.pack()
def pohrani(ime,pre,god,dat):
datoteka = open(dat+str(".txt"),"a")
datoteka.write(str(ime)+" "+str(pre)+" "+str(god)+"\n")
datoteka.close()
def citaj(dat):
datoteka = open(dat+str(".txt"),"r")
lista = datoteka.readlines()
messagebox.showinfo("INFORMACIJA","Datoteka sadrži {0} zapisa".format(len(lista)))
datoteka.close()
glavni = Tk()
oznaka1 = Label(glavni, text="Ime osobe:")
oznaka1.pack()
unos1 = Entry(glavni, bd =1)
unos1.pack()
oznaka2 = Label(glavni, text="Prezime osobe:")
oznaka2.pack()
unos2 = Entry(glavni, bd =1)
unos2.pack()
oznaka3 = Label(glavni, text="Godine starosti(1-99):")
oznaka3.pack()
unos3 = Spinbox(glavni, from_=1,to=99)
unos3.pack()
tipka = Button (glavni, text ="Pohrani podatke", command =lambda:pohrani(unos1.get(),unos2.get(),unos3.get(),unos4.get()))
tipka.pack()
oznaka4 = Label(glavni, text="Ime datoteke za pohranu:")
oznaka4.pack()
unos4 = Entry(glavni, bd =1)
unos4.pack()
tipka2 = Button (glavni, text ="Čitaj", command = lambda:citaj(unos5.get()))
tipka2.pack()
oznaka5 = Label(glavni, text="Ime datoteke za čitanje:")
oznaka5.pack()
unos5 = Entry(glavni, bd =1)
unos5.pack()
glavni.geometry("600x300")
c1 = Canvas(glavni)
c1.place(height=150, width=300)
c2 = Canvas(glavni)
c2.place(height=150, width=300,x=300)
c3 = Canvas(glavni)
c3.place(height=150, width=300,y=150)
c4 = Canvas(glavni)
c4.place(height=150, width=300,x=300,y=150)
oznaka1 = Label(c1, text="Ime osobe:")
oznaka1.place(x=40,y=40)
unos1 = Entry(c1, bd =1)
unos1.place(x=140,y=40)
oznaka2 = Label(c1, text="Prezime osobe:")
oznaka2.place(x=40,y=100)
unos2 = Entry(c1, bd =1)
unos2.place(x=140,y=100)
oznaka3 = Label(c2, text="Godine starosti(1-99):")
oznaka3.place(x=20,y=70)
unos3 = Spinbox(c2, from_=1,to=99)
unos3.place(x=150,y=70)
tipka = Button (c3, text ="Pohrani podatke", command =lambda:pohrani(unos1.get(),unos2.get(),unos3.get(),unos4.get()))
tipka.place(x=100,y=40)
oznaka4 = Label(c3, text="Ime datoteke za pohranu:")
oznaka4.place(x=80,y=70)
unos4 = Entry(c3, bd =1)
unos4.place(x=90,y=90)
tipka2 = Button (c4, text ="Čitaj", command = lambda:citaj(unos5.get()))
tipka2.place(x=130,y=40)
oznaka5 = Label(c4, text="Ime datoteke za čitanje:")
oznaka5.place(x=86,y=70)
unos5 = Entry(c4, bd =1)
unos5.place(x=90,y=90)
glavni = Tk()
glavni.geometry("400x400")
c1 = Canvas(glavni, bg="blue")
c1.place(relheight=0.5, relwidth=0.5)
c2 = Canvas(glavni, bg="yellow")
c2.place(relheight=0.25, relwidth=0.5,relx=0.5)
c3 = Canvas(glavni, bg="pink")
c3.place(relheight=0.25, relwidth=0.5,relx=0.5,rely=0.25)
c4 = Canvas(glavni, bg="orange")
c4.place(relheight=0.25, relwidth=0.25,rely=0.5)
c5 = Canvas(glavni, bg="green")
c5.place(relheight=0.25, relwidth=0.25,relx=0.25,rely=0.5)
c6 = Canvas(glavni, bg="teal")
c6.place(relheight=0.25, relwidth=0.5,rely=0.75)
c7 = Canvas(glavni, bg="blue")
c7.place(relheight=0.5, relwidth=0.5,relx=0.5,rely=0.5)
glavni.mainloop() | 34.722826 | 127 | 0.674753 |
0adf6ca24287003cee5561020caa7ba36d192e85 | 1,665 | py | Python | renewable_energy_analysis/cleaning/meridian_countries.py | Batto1300/renewable-energy-analysis | 0825f024779a9469ac46175a80069f2e3ae0943a | [
"MIT"
] | 1 | 2018-11-25T14:46:21.000Z | 2018-11-25T14:46:21.000Z | renewable_energy_analysis/cleaning/meridian_countries.py | Batto1300/renewable-energy-analysis | 0825f024779a9469ac46175a80069f2e3ae0943a | [
"MIT"
] | 4 | 2018-10-10T14:48:54.000Z | 2018-11-20T12:10:30.000Z | renewable_energy_analysis/cleaning/meridian_countries.py | Batto1300/renewable-energy-analysis | 0825f024779a9469ac46175a80069f2e3ae0943a | [
"MIT"
] | 1 | 2018-12-18T06:38:08.000Z | 2018-12-18T06:38:08.000Z | """ This script cleans the original data set
containing the latitude distance.
Firsly, we filter the dataset for those
countries which are common to all datasets.
Secondly, just as a data manipulation exercise,
we convert the latitude figures from degrees to
kilometres (meridian distance).
The analysis does not change, however the units of
measure are more informative.
"""
import pandas as pd
import math
# import class with full paths to files
import file_names as fn
# original data (csv file) - countries latitude
ORIGINAL_LATITUDE = fn.OriginalPaths.LATITUDE
# countries common to all datasets
COUNTRIES = fn.CleanedPaths.COUNTRIES
# save transformed dataframe to new file - meridian distance km
CLEANED_MERIDIAN = fn.CleanedPaths.MERIDIAN
# import only countries and their latitude from the LATITUDE file
df_latitude = pd.read_csv(ORIGINAL_LATITUDE, usecols=["country", "latitude"])
# import the list of countries common to all dataframes
df_countries = pd.read_csv(COUNTRIES)
# filter the dataframe for common countries with merge method
df_latitude = pd.merge(df_latitude, df_countries, on="country", how="inner")
# average earth radius (km)
earth_radius = 6371
# convert latitude distance (degrees) to merdian distance (km)
df_latitude["latitude"] = ((df_latitude["latitude"] * math.pi)/180) * earth_radius
# change name "latitude" to "meridian"
df_latitude.rename(columns = {"latitude":"meridian"}, inplace = True)
# set country to be column index to avoid extra column of row indeces
df_latitude.set_index("country", inplace = True)
# save new dataframe to CLEANED_MERIDIAN
df_latitude.to_csv(CLEANED_MERIDIAN)
| 40.609756 | 82 | 0.772372 |
d9bda51ac189f0284fe03ccff3526079dbc3137b | 4,428 | py | Python | kubernetes/client/models/v1_local_volume_source.py | sgwilliams-ebsco/python | 35e6406536c96d4769ff7e2a02bf0fdcb902a509 | [
"Apache-2.0"
] | 1 | 2021-06-10T23:44:11.000Z | 2021-06-10T23:44:11.000Z | kubernetes/client/models/v1_local_volume_source.py | sgwilliams-ebsco/python | 35e6406536c96d4769ff7e2a02bf0fdcb902a509 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_local_volume_source.py | sgwilliams-ebsco/python | 35e6406536c96d4769ff7e2a02bf0fdcb902a509 | [
"Apache-2.0"
] | 1 | 2018-11-06T16:33:43.000Z | 2018-11-06T16:33:43.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.12.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1LocalVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'fs_type': 'str',
'path': 'str'
}
attribute_map = {
'fs_type': 'fsType',
'path': 'path'
}
def __init__(self, fs_type=None, path=None):
"""
V1LocalVolumeSource - a model defined in Swagger
"""
self._fs_type = None
self._path = None
self.discriminator = None
if fs_type is not None:
self.fs_type = fs_type
self.path = path
@property
def fs_type(self):
"""
Gets the fs_type of this V1LocalVolumeSource.
Filesystem type to mount. It applies only when the Path is a block device. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default value is to auto-select a fileystem if unspecified.
:return: The fs_type of this V1LocalVolumeSource.
:rtype: str
"""
return self._fs_type
@fs_type.setter
def fs_type(self, fs_type):
"""
Sets the fs_type of this V1LocalVolumeSource.
Filesystem type to mount. It applies only when the Path is a block device. Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\", \"ntfs\". The default value is to auto-select a fileystem if unspecified.
:param fs_type: The fs_type of this V1LocalVolumeSource.
:type: str
"""
self._fs_type = fs_type
@property
def path(self):
"""
Gets the path of this V1LocalVolumeSource.
The full path to the volume on the node. It can be either a directory or block device (disk, partition, ...).
:return: The path of this V1LocalVolumeSource.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1LocalVolumeSource.
The full path to the volume on the node. It can be either a directory or block device (disk, partition, ...).
:param path: The path of this V1LocalVolumeSource.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1LocalVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 28.384615 | 245 | 0.567299 |
1bea130de285c629059c1e70fa855ff1b261e2e7 | 124 | py | Python | hackerrank/miniMaxSum.py | irvandindaprakoso/online-test-py | a7a6cd98ba3e0b74558ecb7e431eb2729077a38a | [
"W3C"
] | null | null | null | hackerrank/miniMaxSum.py | irvandindaprakoso/online-test-py | a7a6cd98ba3e0b74558ecb7e431eb2729077a38a | [
"W3C"
] | null | null | null | hackerrank/miniMaxSum.py | irvandindaprakoso/online-test-py | a7a6cd98ba3e0b74558ecb7e431eb2729077a38a | [
"W3C"
] | null | null | null | def miniMaxSum(arr):
arr.sort()
sum1 = sum(arr[0:len(arr)-1])
sum2 = sum(arr[1:len(arr)])
print(sum1, sum2)
| 20.666667 | 33 | 0.572581 |
f60747181629588d612a87e4abc99c0f764efc65 | 485 | py | Python | env/Lib/site-packages/plotly/validators/scattercarpet/_hovertemplate.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 11,750 | 2015-10-12T07:03:39.000Z | 2022-03-31T20:43:15.000Z | env/Lib/site-packages/plotly/validators/scattercarpet/_hovertemplate.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,951 | 2015-10-12T00:41:25.000Z | 2022-03-31T22:19:26.000Z | env/Lib/site-packages/plotly/validators/scattercarpet/_hovertemplate.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,623 | 2015-10-15T14:40:27.000Z | 2022-03-28T16:05:50.000Z | import _plotly_utils.basevalidators
class HovertemplateValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="hovertemplate", parent_name="scattercarpet", **kwargs
):
super(HovertemplateValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| 32.333333 | 80 | 0.659794 |
09d31ab502ec9eacfdea44661c80aaad11c1fff3 | 2,359 | py | Python | pybo/views/answer_views.py | PARKINHYO/pybo-docker | 04e4aba61ae7b40f03f865ecf3b9a59cbaca90da | [
"MIT"
] | 1 | 2021-09-25T23:40:53.000Z | 2021-09-25T23:40:53.000Z | pybo/views/answer_views.py | PARKINHYO/pybo-docker | 04e4aba61ae7b40f03f865ecf3b9a59cbaca90da | [
"MIT"
] | null | null | null | pybo/views/answer_views.py | PARKINHYO/pybo-docker | 04e4aba61ae7b40f03f865ecf3b9a59cbaca90da | [
"MIT"
] | null | null | null | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, redirect, resolve_url
from django.utils import timezone
from ..forms import AnswerForm
from ..models import Question, Answer
@login_required(login_url='common:login')
def answer_create(request, question_id):
"""
pybo 답변등록
"""
question = get_object_or_404(Question, pk=question_id)
if request.method == "POST":
form = AnswerForm(request.POST)
if form.is_valid():
answer = form.save(commit=False)
answer.author = request.user # 추가한 속성 author 적용
answer.create_date = timezone.now()
answer.question = question
answer.save()
return redirect('{}#answer_{}'.format(
resolve_url('pybo:detail', question_id=question.id), answer.id))
else:
form = AnswerForm()
context = {'question': question, 'form': form}
return render(request, 'pybo/question_detail.html', context)
@login_required(login_url='common:login')
def answer_modify(request, answer_id):
"""
pybo 답변수정
"""
answer = get_object_or_404(Answer, pk=answer_id)
if request.user != answer.author:
messages.error(request, 'You do not have permission to edit.')
return redirect('pybo:detail', question_id=answer.question.id)
if request.method == "POST":
form = AnswerForm(request.POST, instance=answer)
if form.is_valid():
answer = form.save(commit=False)
answer.author = request.user
answer.modify_date = timezone.now()
answer.save()
return redirect('{}#answer_{}'.format(
resolve_url('pybo:detail', question_id=answer.question.id), answer.id))
else:
form = AnswerForm(instance=answer)
context = {'answer': answer, 'form': form}
return render(request, 'pybo/answer_form.html', context)
@login_required(login_url='common:login')
def answer_delete(request, answer_id):
"""
pybo 답변삭제
"""
answer = get_object_or_404(Answer, pk=answer_id)
if request.user != answer.author:
messages.error(request, 'You do not have permission to delete.')
else:
answer.delete()
return redirect('pybo:detail', question_id=answer.question.id)
| 34.691176 | 87 | 0.65621 |
5855a6c49fdb5f15e80d23f53b524c32db1a704d | 2,273 | py | Python | scripts/process_TCGA_clinical.py | pdxgx/immunorx_response_pipeline | 5a3faa72370e96545b46caa790090d022eaa5ece | [
"MIT"
] | 4 | 2020-07-19T19:31:41.000Z | 2020-12-18T06:46:21.000Z | scripts/process_TCGA_clinical.py | pdxgx/immunorx_response_pipeline | 5a3faa72370e96545b46caa790090d022eaa5ece | [
"MIT"
] | null | null | null | scripts/process_TCGA_clinical.py | pdxgx/immunorx_response_pipeline | 5a3faa72370e96545b46caa790090d022eaa5ece | [
"MIT"
] | 1 | 2021-05-05T14:28:53.000Z | 2021-05-05T14:28:53.000Z | #!/usr/bin/env python
from __future__ import print_function
import argparse
import glob
import os
import pandas as pd
from collections import defaultdict
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--maf-dir', '-m', type=str,required=True, help='path to directory containing MAFs')
parser.add_argument('--clinical', '-c', type=str,required=True, help='path to file containing clinical_data')
parser.add_argument("--output-file", "-o", type=str, required=True, help="path to output file")
args = parser.parse_args()
# Get mutational burden
tmb_dict = defaultdict(int)
maf_files = glob.glob(os.path.join(os.path.abspath(args.maf_dir), '*.maf.txt'))
for maf in maf_files:
# Get barcode
barcode = maf.split('/')[-1].replace('.maf.txt', '').lower()
df = pd.read_csv(maf, sep='\t', encoding='latin-1')
# tally mutations
for index, row in df.iterrows():
tmb_dict[barcode] += 1
# Get clinical data
with open(os.path.abspath(args.clinical)) as f:
for line in f:
tokens = line.strip().split('\t')
if tokens[0] == 'patient.days_to_birth':
days_to_birth = tokens[1:]
elif tokens[0] == 'patient.days_to_death':
days_to_death = tokens[1:]
elif tokens[0] == 'patient.days_to_last_followup':
days_to_censor = tokens[1:]
elif tokens[0] == 'patient.stage_event.pathologic_stage':
cancer_stage = [x.replace('stage ', '').replace(' nos', '').upper() for x in tokens[1:]]
elif tokens[0] == 'patient.samples.sample.bcr_sample_barcode':
barcodes = [x[:-1] for x in tokens[1:]]
# Write output file
with open(os.path.abspath(args.output_file), 'w') as o:
header = ['Patient', 'Stage', 'TMB', 'Overall_survival', 'Days_to_last_followup', 'OS_event', 'Censored']
print('\t'.join(header), file=o)
for i in range(len(barcodes)):
if tmb_dict[barcodes[i]] > 0:
out_line = [barcodes[i].upper(), cancer_stage[i], str(tmb_dict[barcodes[i]])]
if days_to_death[i] != 'NA':
out_line.extend([days_to_death[i], 'NA', '1', '0'])
print('\t'.join(out_line), file=o)
else:
if days_to_censor[i] != 'NA':
if int(days_to_censor[i]) > 0:
out_line.extend([days_to_censor[i], days_to_censor[i], '0', '1'])
print('\t'.join(out_line), file=o)
| 39.189655 | 113 | 0.66564 |
6e824bfce6ff31500f330d14622190f045363c83 | 25,446 | py | Python | Jupyter/ChapitresTheoriques/ReguLabFct.py | AlbanVl/ReguCoursGramme | 292a063ac9a4bb23631e53bdb6fa05f5953308f8 | [
"MIT"
] | null | null | null | Jupyter/ChapitresTheoriques/ReguLabFct.py | AlbanVl/ReguCoursGramme | 292a063ac9a4bb23631e53bdb6fa05f5953308f8 | [
"MIT"
] | null | null | null | Jupyter/ChapitresTheoriques/ReguLabFct.py | AlbanVl/ReguCoursGramme | 292a063ac9a4bb23631e53bdb6fa05f5953308f8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue May 11 10:23:05 2021
Fonctions utiles pour utiliser python en régu
@author: Alban Van Laethem
"""
from control.matlab import *
#from control.freqplot import default_frequency_range
import control.nichols as cn
import numpy as np
import matplotlib.pyplot as plt
import scipy as sp
import math
from matplotlib.offsetbox import AnchoredText # To print text inside a plot
#%% Object to store all the important informations provided by the step
class info():
"""
Object to store all the interesting informations provided by the step
response.
"""
RiseTime = None
SettlingTime = None
SettlingMin = None
SettlingMax = None
Overshoot = None
Undershoot = None
Peak = None
PeakTime = None
RiseTime = None
DCGain = None
#%% Function stepWithInfo
def stepWithInfo(sys, info, T=None, SettlingTimeThreshold=0.05,
RiseTimeThresholdMin=.10, RiseTimeThresholdMax=.90,
resolution = 10000, NameOfFigure = "", sysName = '',
linestyle='-'):
"""
Trace the step response and the interesting points and return those
interesting informations.
WARNING: Overshoot is in %!
Parameters
----------
sys: Linear Time Invariant (LTI)
System analysed.
info: info
Object in which to store all the informations of the step response
T: 1D array, optional
Time vector.
SettlingTimeThreshold: float, optional
Threshold of the settling time.
RiseTimeThresholdMin: float, optional
Lower rise time threshold.
RiseTimeThresholdMax: float, optional
Upper rise time threshold.
resolution: long, optional
Number of points calculated to trace the step response.
NameOfFigure: String, optional
Name of the figure in which plot the step response.
sysName: String, optional
Name of the system to plot.
linestyle: '-.' , '--' , ':' , '-' , optional
The line style used to plot the step response (default is '-').
Returns
-------
None
"""
[yout, t] = step_(sys, T, resolution, NameOfFigure, sysName, linestyle=linestyle)
step_info(t,yout, info, SettlingTimeThreshold, RiseTimeThresholdMin, RiseTimeThresholdMax) # Rajoute les points intéressants au graphe
#%% Fonction pour tracer les résultats du step
def step_(sys, T=None, resolution = 10000, NameOfFigure = "",
sysName = '', linestyle='-'):
"""
Trace the step with the given parameters.
Parameters
----------
sys: Linear Time Invariant (LTI)
System analysed.
T: 1D array
Time vector.
resolution: long
Number of points calculated to trace the step response.
NameOfFigure: String
Name of the figure in which plot the step response.
sysName: String
Name of the system to plot.
linestyle: '-.' , '--' , ':' , '-' , optional
The line style used to plot the step response (default is '-').
Returns
-------
None
"""
if NameOfFigure == "" :
plt.figure()
else:
plt.figure(NameOfFigure)
[yout, t] = step(sys, T)
if T is None:
[yout, t] = step(sys, np.linspace(t[0], t[-1], resolution)) # Pour modifier la résolution
# Arrondi les valeurs à x décimales
#yout = np.around(yout, decimals=6)
#t = np.around(t, decimals=6)
plt.plot(t, yout, label=sysName, linestyle=linestyle)
plt.title("Step Response")
plt.ylabel("Amplitude")
plt.xlabel("Time (seconds)")
if sysName != '':
plt.legend()
return [yout, t]
#%% Fonction step_info
def step_info(t, yout, info, SettlingTimeThreshold=0.05,
RiseTimeThresholdMin=.10, RiseTimeThresholdMax=.90):
"""
Trace the interesting points of a given step plot.
Parameters
----------
t: 1D array
Time vector.
y: 1D array
Response of the system.
info: info
Object in which to store all the informations of the step response
SettlingTimeThreshold: float, optional
Threshold of the settling time (default is 0.05).
RiseTimeThresholdMin: float, optional
Lower rise time threshold (default is 0.10).
RiseTimeThresholdMax: float, optional
Upper rise time threshold (default is 0.90).
Returns
-------
None
"""
# Calcul du dépassement en prenant la valeur max retourné par step et en la divisant par la valeur finale
osIndice = np.where(yout == np.amax(yout)) # renvoie un tuple d'array
osIndice = osIndice[-1][-1] # Pour avoir le dernier indice répondant à la condition
plt.plot(t[osIndice], yout[osIndice],'ko')
plt.plot([t[osIndice], t[osIndice]], [0, yout[osIndice]], 'k-.', linewidth=0.5) # Vertical
plt.plot([t[0], t[osIndice]], [yout[osIndice], yout[osIndice]], 'k-.', linewidth=0.5) # Horizontale
info.Peak = yout.max()
info.Overshoot = (yout.max()/yout[-1]-1)*100
info.PeakTime = float(t[osIndice])
#print ("Overshoot:", info.Overshoot)
# Calcul du temps de montée en fonction du treshold (par défaut: de 10% à 90% de la valeur finale)
RiseTimeThresholdMinIndice = next(i for i in range(0,len(yout)-1) if yout[i]>yout[-1]*RiseTimeThresholdMin)
RiseTimeThresholdMaxIndice = next(i for i in range(0,len(yout)-1) if yout[i]>yout[-1]*RiseTimeThresholdMax)
RiseTimeThreshold = [None] * 2
RiseTimeThreshold[0] = t[RiseTimeThresholdMinIndice]-t[0]
RiseTimeThreshold[1] = t[RiseTimeThresholdMaxIndice]-t[0]
info.RiseTime = RiseTimeThreshold[1] - RiseTimeThreshold[0]
#print ("RiseTime:", info.RiseTime)
plt.plot(t[RiseTimeThresholdMaxIndice], yout[RiseTimeThresholdMaxIndice],'ko')
plt.plot([t[RiseTimeThresholdMinIndice], t[RiseTimeThresholdMinIndice]], [0, yout[RiseTimeThresholdMaxIndice]], 'k-.', linewidth=0.5) # Limite gauche
plt.plot([t[RiseTimeThresholdMaxIndice], t[RiseTimeThresholdMaxIndice]], [0, yout[RiseTimeThresholdMaxIndice]], 'k-.', linewidth=0.5) # Limite droite
plt.plot([t[0], t[RiseTimeThresholdMaxIndice]], [yout[RiseTimeThresholdMaxIndice], yout[RiseTimeThresholdMaxIndice]], 'k-.', linewidth=0.5) # Limite horizontale
# Calcul du temps de réponse à x% (5% par défaut)
settlingTimeIndice = next(i for i in range(len(yout)-1, 1, -1) if abs(yout[i]/yout[-1])>(1+SettlingTimeThreshold) or abs(yout[i]/yout[-1])<(1-SettlingTimeThreshold))
info.SettlingTime= t[settlingTimeIndice]-t[0]
#print ("SettlingTime:", info.SettlingTime)
plt.plot(t[settlingTimeIndice], yout[settlingTimeIndice],'ko')
plt.plot([0, max(t)], [yout[-1]*(1+SettlingTimeThreshold), yout[-1]*(1+SettlingTimeThreshold)], 'k-.', linewidth=0.5) # Limite haute
plt.plot([0, max(t)], [yout[-1]*(1-SettlingTimeThreshold), yout[-1]*(1-SettlingTimeThreshold)], 'k-.', linewidth=0.5) # Limite basse
plt.plot([t[settlingTimeIndice], t[settlingTimeIndice]], [0, yout[settlingTimeIndice]], 'k-.', linewidth=0.5) # Vertical
# Traçage du gain statique
info.DCGain = yout[-1]
plt.plot(t[-1], yout[-1],'ko')
plt.plot([0, max(t)], [yout[-1], yout[-1]], 'k:', linewidth=0.5)
#print ("DC gain:", info.DCGain)
#%% Get the gain and the frequency at a given phase
def getValues(sys, phaseValue, mag = None, phase = None, omega = None,
printValue = True, NameOfFigure = ""):
"""
Get the values of the gain and the frequency at a given phase of the
system.
Get the values of the gain and the frequency at a given phase from given
arrays of gains, phases and frequencies.
Parameters
----------
sys: Linear Time Invariant (LTI)
System analysed.
phaseValue: float
Phase at which we want to get the gain and frequency values.
mag: 1D array, optional
Array of gains (not in dB).
phase: 1D array, optional
Array of phases.
omega: 1D array, optional
Array of frequencies (in radians).
printValue: boolean, optional
print values if True (by default).
NameOfFigure: String, optional
Name of the figure in which to plot.
Returns
-------
mag: float
The gain value for the given phase.
omega: float
The gain value in rad/sec for the given phase.
"""
lowLimit = -2
highLimit = 2
if NameOfFigure == "" :
plt.figure()
else:
plt.figure(NameOfFigure)
if(np.all(mag == None) and np.all(phase == None) and np.all(omega == None)):
w = np.logspace(lowLimit, highLimit, 10000) # liste de fréquences afin d'augmenter la résolution de calcul (par défaut: 50 éléments)
mag, phase, omega = bode(sys, w, dB = True, Hz = False, deg = True)
phase = phase*180/math.pi # Pour avoir les phases en degrés plutôt qu'en radians
idx = (np.abs(phase-phaseValue)).argmin()
while idx == np.size(phase)-1 or idx == 0:
if idx == 0:
lowLimit-=1
else:
highLimit+=1
w = np.logspace(lowLimit, highLimit, 10000) # liste de fréquences afin d'augmenter la résolution de calcul (par défaut: 50 éléments)
mag, phase, omega = bode(sys, w, dB = True, Hz = False, deg = True)
phase = phase*180/math.pi # Pour avoir les phases en degrés plutôt qu'en radians
idx = (np.abs(phase-phaseValue)).argmin()
else:
phase = phase*180/math.pi # Pour avoir les phases en degrés plutôt qu'en radians
idx = (np.abs(phase-phaseValue)).argmin()
if printValue:
mag_dB = 20*np.log10(mag[idx]) # Pour avoir les gains en dB
print(f"Gain à {phaseValue}° = {mag_dB} dB")
print(f"Fréquence à {phaseValue}° = {omega[idx]} rad/sec")
return mag[idx], omega[idx]
#%% Compute reasonable defaults for axes
def default_frequency_range(syslist):
"""Compute a reasonable default frequency range for frequency
domain plots.
Finds a reasonable default frequency range by examining the features
(poles and zeros) of the systems in syslist.
Parameters
----------
syslist: list of Lti
List of linear input/output systems (single system is OK)
Returns
-------
omega: array
Range of frequencies in rad/sec
Examples
--------
>>> from matlab import ss
>>> sys = ss("1. -2; 3. -4", "5.; 7", "6. 8", "9.")
>>> omega = default_frequency_range(sys)
"""
# This code looks at the poles and zeros of all of the systems that
# we are plotting and sets the frequency range to be one decade above
# and below the min and max feature frequencies, rounded to the nearest
# integer. It excludes poles and zeros at the origin. If no features
# are found, it turns logspace(-1, 1)
# Find the list of all poles and zeros in the systems
features = np.array(())
# detect if single sys passed by checking if it is sequence-like
if (not getattr(syslist, '__iter__', False)):
syslist = (syslist,)
for sys in syslist:
try:
# Add new features to the list
features = np.concatenate((features, np.abs(sys.pole())))
features = np.concatenate((features, np.abs(sys.zero())))
except:
pass
# Get rid of poles and zeros at the origin
features = features[features != 0];
# Make sure there is at least one point in the range
if (features.shape[0] == 0): features = [1];
# Take the log of the features
features = np.log10(features)
#! TODO: Add a check in discrete case to make sure we don't get aliasing
# Set the range to be an order of magnitude beyond any features
omega = sp.logspace(np.floor(np.min(features))-1,
np.ceil(np.max(features))+1)
return omega
#%% Function to trace Nichols as needed for the laboratory
# Nichols function reviewed by Alban Van Laethem
def nichols(sys_list, omega=None, grid=None, labels=[''], NameOfFigure = "",
data = False, fig = None, ax = None, linestyle='-'):
"""Nichols plot for a system
Plots a Nichols plot for the system over a (optional) frequency range.
Parameters
----------
sys_list: list of LTI, or LTI
List of linear input/output systems (single system is OK)
omega: array_like, optional
Range of frequencies (list or bounds) in rad/sec
grid: boolean, optional
True if the plot should include a Nichols-chart grid. Default is True.
data: boolean, optional
True if we must return x and y (default is False)
ax: axes.subplots.AxesSubplot, optional
The axe on which to plot
linestyle: '-.' , '--' , ':' , '-' , optional
The line style used to plot the nichols graph (default is '-').
Returns
-------
if data == True:
x: 1D array
Abscisse vector
y: 1D array
Ordinate vector
"""
# Open a figure with the given name or open a new one
if NameOfFigure == "" :
plt.figure()
else:
plt.figure(NameOfFigure)
ax = ax or plt.gca()
# Get parameter values
#grid = config._get_param('nichols', 'grid', grid, True)
# If argument was a singleton, turn it into a list
if not getattr(sys_list, '__iter__', False):
sys_list = (sys_list,)
# Select a default range if none is provided
if omega is None:
omega = default_frequency_range(sys_list)
for id, sys in enumerate(sys_list):
# Get the magnitude and phase of the system
mag_tmp, phase_tmp, omega = sys.freqresp(omega)
mag = np.squeeze(mag_tmp)
phase = np.squeeze(phase_tmp)
# Convert to Nichols-plot format (phase in degrees,
# and magnitude in dB)
x = unwrap(sp.degrees(phase), 360)
y = 20*sp.log10(mag)
# Generate the plot
if labels != ['']:
ax.plot(x, y, label=labels[id], linestyle=linestyle)
else:
ax.plot(x, y, linestyle=linestyle)
ax.set_xlabel('Phase (deg)')
ax.set_ylabel('Magnitude (dB)')
ax.set_title('Nichols Plot')
# Mark the -180 point
ax.plot([-180], [0], 'r+', label='_nolegend_')
# Add grid
if grid:
nichols_grid()
# Add legend
if labels != ['']:
plt.legend()
if data == True:
return x, y
#%% Function to generate a second order transfer function based on its typical characteristics.
def generateTfFromCharac(G, wn, zeta):
"""
Generate a second order transfer function based on its typical
characteristics.
Parameters
----------
G: float
Gain of the transfer function.
wn: float
Frequency of the transfer function.
zeta: float
Damping coefficient of the transfer function.
Returns
-------
ft: TransferFunction
The linear system with those charcteristics
"""
ft = tf([G], [1/wn**2, 2*zeta/wn, 1])
return ft
#%% Function to add noise to a given signal
def addNoise(t, signal, variance=0.05, rndNb=None):
"""
Add noise to a given signal.
Parameters
----------
t: 1D array
Time vector.
signal: 1D array
Signal at which to add noise.
variance: float, optional
Variance of random numbers. The default is 0.05.
rndNb: int, optional
Seed for RandomState. The default is None.
Returns
-------
signal_noisy: 1D array
Noisy signal.
"""
if(rndNb!=None):
np.random.seed(rndNb) # To master the random numbers
noise = np.random.normal(0, variance, len(signal))
signal_noisy = signal + noise
plt.figure()
plt.plot(t, signal, label="Original")
plt.plot(t, signal_noisy, label="With noise")
return signal_noisy
#%% Save data into a csv file
def saveFT(t, y, x=None, name="data"):
"""
Save the data of the transfert function into a csv file.
Parameters
----------
t: 1D array
Time vector.
y: 1D array
Response of the system.
x: 1D array, optional
Input of the system (default = [0, 1, ..., 1])
name: String
Name of the csv file (default = "data").
Returns
-------
None
"""
if x == None:
x = np.ones(len(t))
x[0] = 0
np.savetxt(name + ".csv",
np.transpose([t, x, y]),
delimiter =",",
fmt ='%s',
header='Temps(s),Consigne,Mesure',
comments='')
#%% Load data from a csv file
def loadFT(file="data.csv"):
"""
Load the data of the transfert function from a given csv file.
Parameters
----------
file: String
Name of the csv file (default = "data.csv").
Returns
-------
None
"""
# Reading of the data headers with a comma as delimiter
head = np.loadtxt(file, delimiter=',', max_rows=1, dtype=np.str)
# Reading of the data
data = np.loadtxt(file, delimiter=',', skiprows=1, dtype=np.str)
# Printing of the headers
print(head)
# Data selections based on header and convert to float
# The sign - adapts the input data to be positive
t = np.asarray(data[:, 0], dtype=np.float, order='C').flatten()
x = np.asarray(data[:, 1], dtype=np.float, order='C').flatten()
y = np.asarray(data[:, 2], dtype=np.float, order='C').flatten()
return [t, x, y]
#%% Function to get the class of a given system.
def getClass(sys):
"""
Get the class of the given system.
Parameters
----------
sys: LTI
System analysed.
Returns
-------
sysClass: int
Class of the given system.
"""
num, den = tfdata(sys)
den = den[0][0] # To have the array as it's a list with one array
# Reverse the direction of loop because the smallest power is at the last index
for sysClass, item in enumerate(reversed(den)):
if item != 0:
return sysClass
#%% Function to get the order of a given system.
def getOrder(sys):
"""
Get the order of the given system.
Parameters
----------
sys: LTI
System analysed.
Returns
-------
sysClass: int
Order of the given system.
"""
num, den = tfdata(sys)
den = den[0][0] # To have the array as it's a list with one array
sysOrder = len(den)-1
return sysOrder
#%% PID Tuner to see the modifications of the PID parameters on a given system.
def pidTuner(H, Kp=1, Ki=0, Kd=0):
"""
PID Tuner to see the modifications of the PID parameters on a given system.
Parameters
----------
H: LTI
Transfert function of the system (open loop) to regulate.
Kp: float, optionnal
Proportionnal parameter of the PID controller (default = 1).
Ki: float, optionnal
Integral parameter of the PID controller (default = 0).
Reminder: Ki = Kp/tI
Kd: float, optionnal
Derivative parameter of the PID controller (default = 0).
Reminder: Kd = Kp*tD
Returns
-------
None
"""
from matplotlib.widgets import Slider, Button, RadioButtons
# Create the figure
fig = plt.figure("PID Tuner")
axGraph = fig.subplots()
plt.subplots_adjust(bottom=0.3)
# Frames to contain the sliders
axcolor = 'lightgoldenrodyellow'
axKp = plt.axes([0.125, 0.2, 0.775, 0.03], facecolor=axcolor)
axKi = plt.axes([0.125, 0.15, 0.775, 0.03], facecolor=axcolor)
axKd = plt.axes([0.125, 0.1, 0.775, 0.03], facecolor=axcolor)
# Slider
sKp = Slider(axKp, 'Kp', Kp/20, Kp*20, valinit=Kp)
if Ki == 0:
sKi = Slider(axKi, 'Ki', 0, 100, valinit=Ki)
else:
sKi = Slider(axKi, 'Ki', Ki/20, Ki*20, valinit=Ki)
if Kd == 0:
sKd = Slider(axKd, 'Kd', 0, 100, valinit=Kd)
else:
sKd = Slider(axKd, 'Kd', Kd/20, Kd*20, valinit=Kd)
def update(val):
KpNew = sKp.val
KiNew = sKi.val
KdNew = sKd.val
c = KpNew*tf(1,1) + KiNew*tf(1,[1, 0]) + KdNew*tf([1, 0],1)
Gbo = c*H
if radio.value_selected == 'Step':
axGraph.clear()
plotStep(axGraph, Gbo)
elif radio.value_selected == 'Nichols':
axGraph.clear()
plotNichols(axGraph, Gbo)
fig.canvas.draw_idle() # Refresh the plots
sKp.on_changed(update)
sKi.on_changed(update)
sKd.on_changed(update)
# Reset button
resetax = plt.axes([0.8, 0.025, 0.1, 0.04])
reset_button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
def reset(event):
sKp.reset()
sKi.reset()
sKd.reset()
reset_button.on_clicked(reset)
def plotNichols(ax, Gbo):
nichols([Gbo_init, Gbo], NameOfFigure = "PID Tuner", ax=ax)
# Print infos inside the plot
textInfo = getNicholsTextInfos(Gbo)
at = AnchoredText(textInfo,
prop=dict(size=10), frameon=True,
loc='lower right',
)
at.patch.set_boxstyle("round,pad=0.,rounding_size=0.2")
axGraph.add_artist(at)
def plotStep(ax, Gbo):
Gbf = feedback(Gbo)
[Y_New, T_new]= step(Gbf, T)
ax.plot(T, linspace(1, 1, len(T)), linestyle=':', lw=1, color='grey') # 1 line
l0, = ax.plot(T, Y, label="Initial system", lw=1) # Original
l, = ax.plot(T, Y_New, label="Modified system", lw=1)
#l.set_ydata(Y_New)
ax.set_title("Step Response")
ax.set_ylabel("Amplitude")
ax.set_xlabel("Time (seconds)")
# Print button
printax = plt.axes([0.6, 0.025, 0.1, 0.04])
print_button = Button(printax, 'Infos', color=axcolor, hovercolor='0.975')
# Function to create a string with the usefull info fo nichols plot
def getNicholsTextInfos(Gbo):
gm, pm, wg, wp = margin(Gbo) # Extract the gain margin (Gm) and the phase margin (Pm)
gm = 20*np.log10(gm) # Conversion of gm in dB
return """Phase Margin = {PM}°
Gain Margin = {GM} dB""".format(PM=pm, GM=gm)
def printInfos(event):
KpNew = sKp.val
KiNew = sKi.val
KdNew = sKd.val
print("") # To let space before the informations
print("Kp =", KpNew)
print("Ki =", KiNew)
print("Kd =", KdNew)
c = KpNew*tf(1,1) + KiNew*tf(1,[1, 0]) + KdNew*tf([1, 0],1)
print("Corr =", c)
Gbo = c*H
print("Gbo =", Gbo)
if radio.value_selected == 'Step':
Gbf = feedback(Gbo)
[Y_New, T_new]= step(Gbf, T)
stepInfo = info()
plt.sca(axGraph) # To change the current axes to be the graphs's one
step_info(T, Y_New, stepInfo)
# Printing of the step infos
temp = vars(stepInfo) # Transform into a dict to be able to iterate
for item in temp:
print(item, ':', temp[item])
elif radio.value_selected == 'Nichols':
gm, pm, wg, wp = margin(Gbo) # Extract the gain margin (Gm) and the phase margin (Pm)
print("Phase Margin =", pm, "°")
gm = 20*np.log10(gm) # Conversion of gm in dB
print("Gain Margin =", gm, "dB")
# Plotting
if pm != math.inf:
axGraph.plot([-180, -180+pm], [0, 0], 'k-', linewidth=1)
axGraph.plot(-180+pm, 0,'ko')
if gm != math.inf:
axGraph.plot([-180, -180], [-gm, 0], 'k-', linewidth=1)
axGraph.plot(-180, -gm,'ko')
print_button.on_clicked(printInfos)
# Radio button
rax = plt.axes([0.905, 0.5, 0.09, 0.1], facecolor=axcolor)
radio = RadioButtons(rax, ('Step', 'Nichols'), active=0)
def changeGraph(label):
# Get the new parameters values
KpNew = sKp.val
KiNew = sKi.val
KdNew = sKd.val
c = KpNew*tf(1,1) + KiNew*tf(1,[1, 0]) + KdNew*tf([1, 0],1)
Gbo = c*H
# Deleting of the graphs
axGraph.clear()
# Base for the original graph
if label == 'Step':
plotStep(axGraph, Gbo)
elif label == 'Nichols':
plotNichols(axGraph, Gbo)
fig.canvas.draw_idle() # To refresh the plots
radio.on_clicked(changeGraph)
# Declaration of the transfer function of the system in BO and BF with the given control parameters
c = Kp*tf(1,1) + Ki*tf(1,[1, 0]) + Kd*tf([1, 0],1)
Gbo_init = c*H
Gbf_init = feedback(Gbo_init)
[Y, T] = step(Gbf_init)
# Plot the step
plotStep(axGraph, Gbo_init)
plt.show()
return sKp, print_button, reset_button, radio # It's needed to return those variables to keep the widgets references or they don't work.
| 31.76779 | 169 | 0.589641 |
ebb98d1a921c468667c4f091165a4cfc08797776 | 13,969 | py | Python | recipes/protobuf/all/conanfile.py | dpronin/conan-center-index | 5c6e41a618097d04e731c9831118a51dcb39ab3f | [
"MIT"
] | null | null | null | recipes/protobuf/all/conanfile.py | dpronin/conan-center-index | 5c6e41a618097d04e731c9831118a51dcb39ab3f | [
"MIT"
] | 1 | 2021-11-22T13:54:48.000Z | 2021-11-22T14:09:45.000Z | recipes/protobuf/all/conanfile.py | dpronin/conan-center-index | 5c6e41a618097d04e731c9831118a51dcb39ab3f | [
"MIT"
] | null | null | null | from conan.tools.files import rename
from conan.tools.microsoft import msvc_runtime_flag
from conans import ConanFile, CMake, tools
from conans.errors import ConanInvalidConfiguration
import functools
import os
import textwrap
required_conan_version = ">=1.43.0"
class ProtobufConan(ConanFile):
name = "protobuf"
description = "Protocol Buffers - Google's data interchange format"
topics = ("protocol-buffers", "protocol-compiler", "serialization", "rpc", "protocol-compiler")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/protocolbuffers/protobuf"
license = "BSD-3-Clause"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"with_zlib": [True, False],
"with_rtti": [True, False],
"lite": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"with_zlib": True,
"with_rtti": True,
"lite": False,
}
short_paths = True
generators = "cmake"
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _build_subfolder(self):
return "build_subfolder"
@property
def _is_msvc(self):
return str(self.settings.compiler) in ["Visual Studio", "msvc"]
@property
def _is_clang_cl(self):
return self.settings.compiler == 'clang' and self.settings.os == 'Windows'
@property
def _is_clang_x86(self):
return self.settings.compiler == "clang" and self.settings.arch == "x86"
@property
def _can_disable_rtti(self):
return tools.Version(self.version) >= "3.15.4"
def export_sources(self):
self.copy("CMakeLists.txt")
for patch in self.conan_data.get("patches", {}).get(self.version, []):
self.copy(patch["patch_file"])
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if not self._can_disable_rtti:
del self.options.with_rtti
def configure(self):
if self.options.shared:
del self.options.fPIC
def requirements(self):
if self.options.with_zlib:
self.requires("zlib/1.2.12")
def validate(self):
if self.options.shared and str(self.settings.compiler.get_safe("runtime")) in ["MT", "MTd", "static"]:
raise ConanInvalidConfiguration("Protobuf can't be built with shared + MT(d) runtimes")
if self.settings.compiler == "Visual Studio":
if tools.Version(self.settings.compiler.version) < "14":
raise ConanInvalidConfiguration("On Windows Protobuf can only be built with "
"Visual Studio 2015 or higher.")
if self.settings.compiler == "clang":
if tools.Version(self.version) >= "3.15.4" and tools.Version(self.settings.compiler.version) < "4":
raise ConanInvalidConfiguration("protobuf {} doesn't support clang < 4".format(self.version))
if hasattr(self, "settings_build") and tools.cross_building(self) and \
self.settings.os == "Macos" and self.options.shared:
# FIXME: should be allowed, actually build succeeds but it fails at build time of test package due to SIP
raise ConanInvalidConfiguration("protobuf shared not supported yet in CCI while cross-building on Macos")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
@property
def _cmake_install_base_path(self):
return os.path.join("lib", "cmake", "protobuf")
@functools.lru_cache(1)
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["CMAKE_INSTALL_CMAKEDIR"] = self._cmake_install_base_path.replace("\\", "/")
cmake.definitions["protobuf_WITH_ZLIB"] = self.options.with_zlib
cmake.definitions["protobuf_BUILD_TESTS"] = False
cmake.definitions["protobuf_BUILD_PROTOC_BINARIES"] = True
if tools.Version(self.version) >= "3.14.0":
cmake.definitions["protobuf_BUILD_LIBPROTOC"] = True
if self._can_disable_rtti:
cmake.definitions["protobuf_DISABLE_RTTI"] = not self.options.with_rtti
if self._is_msvc or self._is_clang_cl:
runtime = msvc_runtime_flag(self)
if not runtime:
runtime = self.settings.get_safe("compiler.runtime")
cmake.definitions["protobuf_MSVC_STATIC_RUNTIME"] = "MT" in runtime
if tools.Version(self.version) < "3.18.0" and self._is_clang_cl:
cmake.definitions["CMAKE_RC_COMPILER"] = os.environ.get("RC", "llvm-rc")
cmake.configure(build_folder=self._build_subfolder)
return cmake
def _patch_sources(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
# Provide relocatable protobuf::protoc target and Protobuf_PROTOC_EXECUTABLE cache variable
# TODO: some of the following logic might be disabled when conan will
# allow to create executable imported targets in package_info()
protobuf_config_cmake = os.path.join(self._source_subfolder, "cmake", "protobuf-config.cmake.in")
tools.replace_in_file(
protobuf_config_cmake,
"@_protobuf_FIND_ZLIB@",
"# BEGIN CONAN PATCH\n#_protobuf_FIND_ZLIB@\n# END CONAN PATCH"
)
exe_ext = ".exe" if self.settings.os == "Windows" else ""
protoc_filename = "protoc" + exe_ext
module_folder_depth = len(os.path.normpath(self._cmake_install_base_path).split(os.path.sep))
protoc_rel_path = "{}bin/{}".format("".join(["../"] * module_folder_depth), protoc_filename)
protoc_target = textwrap.dedent("""\
if(NOT TARGET protobuf::protoc)
if(CMAKE_CROSSCOMPILING)
find_program(PROTOC_PROGRAM protoc PATHS ENV PATH NO_DEFAULT_PATH)
endif()
if(NOT PROTOC_PROGRAM)
set(PROTOC_PROGRAM \"${{CMAKE_CURRENT_LIST_DIR}}/{protoc_rel_path}\")
endif()
get_filename_component(PROTOC_PROGRAM \"${{PROTOC_PROGRAM}}\" ABSOLUTE)
set(Protobuf_PROTOC_EXECUTABLE ${{PROTOC_PROGRAM}} CACHE FILEPATH \"The protoc compiler\")
add_executable(protobuf::protoc IMPORTED)
set_property(TARGET protobuf::protoc PROPERTY IMPORTED_LOCATION ${{Protobuf_PROTOC_EXECUTABLE}})
endif()
""".format(protoc_rel_path=protoc_rel_path))
tools.replace_in_file(
protobuf_config_cmake,
"include(\"${CMAKE_CURRENT_LIST_DIR}/protobuf-targets.cmake\")",
protoc_target
)
# Set DYLD_LIBRARY_PATH in command line to avoid issues with shared protobuf
# (even with virtualrunenv, this fix might be required due to SIP)
# Only works with cmake, cmake_find_package or cmake_find_package_multi generators
if tools.is_apple_os(self.settings.os):
tools.replace_in_file(
protobuf_config_cmake,
"add_custom_command(",
("set(CUSTOM_DYLD_LIBRARY_PATH ${CONAN_LIB_DIRS} ${Protobuf_LIB_DIRS} ${Protobuf_LIB_DIRS_RELEASE} ${Protobuf_LIB_DIRS_DEBUG} ${Protobuf_LIB_DIRS_RELWITHDEBINFO} ${Protobuf_LIB_DIRS_MINSIZEREL})\n"
"string(REPLACE \";\" \":\" CUSTOM_DYLD_LIBRARY_PATH \"${CUSTOM_DYLD_LIBRARY_PATH}\")\n"
"add_custom_command(")
)
cmd_str = "COMMAND protobuf::protoc" if tools.Version(self.version) < "3.20.0" else "COMMAND protobuf::protoc"
tools.replace_in_file(
protobuf_config_cmake,
cmd_str,
"COMMAND ${CMAKE_COMMAND} -E env \"DYLD_LIBRARY_PATH=${CUSTOM_DYLD_LIBRARY_PATH}\" $<TARGET_FILE:protobuf::protoc>"
)
# Disable a potential warning in protobuf-module.cmake.in
# TODO: remove this patch? Is it really useful?
protobuf_module_cmake = os.path.join(self._source_subfolder, "cmake", "protobuf-module.cmake.in")
tools.replace_in_file(
protobuf_module_cmake,
"if(DEFINED Protobuf_SRC_ROOT_FOLDER)",
"if(0)\nif(DEFINED Protobuf_SRC_ROOT_FOLDER)",
)
tools.replace_in_file(
protobuf_module_cmake,
"# Define upper case versions of output variables",
"endif()",
)
def build(self):
self._patch_sources()
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
os.unlink(os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-config-version.cmake"))
os.unlink(os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-targets.cmake"))
os.unlink(os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-targets-{}.cmake".format(str(self.settings.build_type).lower())))
rename(self, os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-config.cmake"),
os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-generate.cmake"))
if not self.options.lite:
tools.remove_files_by_mask(os.path.join(self.package_folder, "lib"), "libprotobuf-lite.*")
tools.remove_files_by_mask(os.path.join(self.package_folder, "bin"), "libprotobuf-lite.*")
def package_info(self):
self.cpp_info.set_property("cmake_find_mode", "both")
self.cpp_info.set_property("cmake_module_file_name", "Protobuf")
self.cpp_info.set_property("cmake_file_name", "protobuf")
self.cpp_info.set_property("pkg_config_name", "protobuf_full_package") # unofficial, but required to avoid side effects (libprotobuf component "steals" the default global pkg_config name)
build_modules = [
os.path.join(self._cmake_install_base_path, "protobuf-generate.cmake"),
os.path.join(self._cmake_install_base_path, "protobuf-module.cmake"),
os.path.join(self._cmake_install_base_path, "protobuf-options.cmake"),
]
self.cpp_info.set_property("cmake_build_modules", build_modules)
lib_prefix = "lib" if (self._is_msvc or self._is_clang_cl) else ""
lib_suffix = "d" if self.settings.build_type == "Debug" else ""
# libprotobuf
self.cpp_info.components["libprotobuf"].set_property("cmake_target_name", "protobuf::libprotobuf")
self.cpp_info.components["libprotobuf"].set_property("pkg_config_name", "protobuf")
self.cpp_info.components["libprotobuf"].builddirs.append(self._cmake_install_base_path)
self.cpp_info.components["libprotobuf"].libs = [lib_prefix + "protobuf" + lib_suffix]
if self.options.with_zlib:
self.cpp_info.components["libprotobuf"].requires = ["zlib::zlib"]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libprotobuf"].system_libs.append("pthread")
if self._is_clang_x86 or "arm" in str(self.settings.arch):
self.cpp_info.components["libprotobuf"].system_libs.append("atomic")
if self.settings.os == "Android":
self.cpp_info.components["libprotobuf"].system_libs.append("log")
if self.settings.os == "Windows":
if self.options.shared:
self.cpp_info.components["libprotobuf"].defines = ["PROTOBUF_USE_DLLS"]
# libprotoc
self.cpp_info.components["libprotoc"].set_property("cmake_target_name", "protobuf::libprotoc")
self.cpp_info.components["libprotoc"].libs = [lib_prefix + "protoc" + lib_suffix]
self.cpp_info.components["libprotoc"].requires = ["libprotobuf"]
# libprotobuf-lite
if self.options.lite:
self.cpp_info.components["libprotobuf-lite"].set_property("cmake_target_name", "protobuf::libprotobuf-lite")
self.cpp_info.components["libprotobuf-lite"].set_property("pkg_config_name", "protobuf-lite")
self.cpp_info.components["libprotobuf-lite"].builddirs.append(self._cmake_install_base_path)
self.cpp_info.components["libprotobuf-lite"].libs = [lib_prefix + "protobuf-lite" + lib_suffix]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libprotobuf-lite"].system_libs.append("pthread")
if self._is_clang_x86 or "arm" in str(self.settings.arch):
self.cpp_info.components["libprotobuf-lite"].system_libs.append("atomic")
if self.settings.os == "Windows":
if self.options.shared:
self.cpp_info.components["libprotobuf-lite"].defines = ["PROTOBUF_USE_DLLS"]
if self.settings.os == "Android":
self.cpp_info.components["libprotobuf-lite"].system_libs.append("log")
bindir = os.path.join(self.package_folder, "bin")
self.output.info("Appending PATH environment variable: {}".format(bindir))
self.env_info.PATH.append(bindir)
# TODO: to remove in conan v2 once cmake_find_package* & pkg_config generators removed
self.cpp_info.filenames["cmake_find_package"] = "Protobuf"
self.cpp_info.filenames["cmake_find_package_multi"] = "protobuf"
self.cpp_info.names["pkg_config"] ="protobuf_full_package"
self.cpp_info.components["libprotobuf"].build_modules = build_modules
if self.options.lite:
self.cpp_info.components["libprotobuf-lite"].build_modules = build_modules
| 49.014035 | 213 | 0.654163 |
b85bbda1225dadb2b31cb216674462c452473c1d | 18,093 | py | Python | numba/cuda/tests/cudapy/test_intrinsics.py | skailasa/numba | 38ab89dd369a14b8826d3fa30d080aa083aed00b | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 6,620 | 2015-01-04T08:51:04.000Z | 2022-03-31T12:52:18.000Z | numba/cuda/tests/cudapy/test_intrinsics.py | skailasa/numba | 38ab89dd369a14b8826d3fa30d080aa083aed00b | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 6,457 | 2015-01-04T03:18:41.000Z | 2022-03-31T17:38:42.000Z | numba/cuda/tests/cudapy/test_intrinsics.py | skailasa/numba | 38ab89dd369a14b8826d3fa30d080aa083aed00b | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 930 | 2015-01-25T02:33:03.000Z | 2022-03-30T14:10:32.000Z | import itertools
import numpy as np
import re
from numba import cuda, int64
from numba.cuda.testing import unittest, CUDATestCase, skip_on_cudasim
def simple_threadidx(ary):
i = cuda.threadIdx.x
ary[0] = i
def fill_threadidx(ary):
i = cuda.threadIdx.x
ary[i] = i
def fill3d_threadidx(ary):
i = cuda.threadIdx.x
j = cuda.threadIdx.y
k = cuda.threadIdx.z
ary[i, j, k] = (i + 1) * (j + 1) * (k + 1)
def simple_grid1d(ary):
i = cuda.grid(1)
ary[i] = i
def simple_grid2d(ary):
i, j = cuda.grid(2)
ary[i, j] = i + j
def simple_gridsize1d(ary):
i = cuda.grid(1)
x = cuda.gridsize(1)
if i == 0:
ary[0] = x
def simple_gridsize2d(ary):
i, j = cuda.grid(2)
x, y = cuda.gridsize(2)
if i == 0 and j == 0:
ary[0] = x
ary[1] = y
def intrinsic_forloop_step(c):
startX, startY = cuda.grid(2)
gridX = cuda.gridDim.x * cuda.blockDim.x
gridY = cuda.gridDim.y * cuda.blockDim.y
height, width = c.shape
for x in range(startX, width, gridX):
for y in range(startY, height, gridY):
c[y, x] = x + y
def simple_popc(ary, c):
ary[0] = cuda.popc(c)
def simple_fma(ary, a, b, c):
ary[0] = cuda.fma(a, b, c)
def simple_cbrt(ary, a):
ary[0] = cuda.cbrt(a)
def simple_brev(ary, c):
ary[0] = cuda.brev(c)
def simple_clz(ary, c):
ary[0] = cuda.clz(c)
def simple_ffs(ary, c):
ary[0] = cuda.ffs(c)
def simple_round(ary, c):
ary[0] = round(c)
def simple_round_to(ary, c, ndigits):
ary[0] = round(c, ndigits)
def branching_with_ifs(a, b, c):
i = cuda.grid(1)
if a[i] > 4:
if b % 2 == 0:
a[i] = c[i]
else:
a[i] = 13
else:
a[i] = 3
def branching_with_selps(a, b, c):
i = cuda.grid(1)
inner = cuda.selp(b % 2 == 0, c[i], 13)
a[i] = cuda.selp(a[i] > 4, inner, 3)
def simple_laneid(ary):
i = cuda.grid(1)
ary[i] = cuda.laneid
def simple_warpsize(ary):
ary[0] = cuda.warpsize
class TestCudaIntrinsic(CUDATestCase):
def test_simple_threadidx(self):
compiled = cuda.jit("void(int32[:])")(simple_threadidx)
ary = np.ones(1, dtype=np.int32)
compiled[1, 1](ary)
self.assertTrue(ary[0] == 0)
def test_fill_threadidx(self):
compiled = cuda.jit("void(int32[:])")(fill_threadidx)
N = 10
ary = np.ones(N, dtype=np.int32)
exp = np.arange(N, dtype=np.int32)
compiled[1, N](ary)
self.assertTrue(np.all(ary == exp))
def test_fill3d_threadidx(self):
X, Y, Z = 4, 5, 6
def c_contigous():
compiled = cuda.jit("void(int32[:,:,::1])")(fill3d_threadidx)
ary = np.zeros((X, Y, Z), dtype=np.int32)
compiled[1, (X, Y, Z)](ary)
return ary
def f_contigous():
compiled = cuda.jit("void(int32[::1,:,:])")(fill3d_threadidx)
ary = np.asfortranarray(np.zeros((X, Y, Z), dtype=np.int32))
compiled[1, (X, Y, Z)](ary)
return ary
c_res = c_contigous()
f_res = f_contigous()
self.assertTrue(np.all(c_res == f_res))
def test_simple_grid1d(self):
compiled = cuda.jit("void(int32[::1])")(simple_grid1d)
ntid, nctaid = 3, 7
nelem = ntid * nctaid
ary = np.empty(nelem, dtype=np.int32)
compiled[nctaid, ntid](ary)
self.assertTrue(np.all(ary == np.arange(nelem)))
def test_simple_grid2d(self):
compiled = cuda.jit("void(int32[:,::1])")(simple_grid2d)
ntid = (4, 3)
nctaid = (5, 6)
shape = (ntid[0] * nctaid[0], ntid[1] * nctaid[1])
ary = np.empty(shape, dtype=np.int32)
exp = ary.copy()
compiled[nctaid, ntid](ary)
for i in range(ary.shape[0]):
for j in range(ary.shape[1]):
exp[i, j] = i + j
self.assertTrue(np.all(ary == exp))
def test_simple_gridsize1d(self):
compiled = cuda.jit("void(int32[::1])")(simple_gridsize1d)
ntid, nctaid = 3, 7
ary = np.zeros(1, dtype=np.int32)
compiled[nctaid, ntid](ary)
self.assertEqual(ary[0], nctaid * ntid)
@skip_on_cudasim('Tests PTX emission')
def test_selp(self):
sig = (int64[:], int64, int64[:])
cu_branching_with_ifs = cuda.jit(sig)(branching_with_ifs)
cu_branching_with_selps = cuda.jit(sig)(branching_with_selps)
n = 32
b = 6
c = np.full(shape=32, fill_value=17, dtype=np.int64)
expected = c.copy()
expected[:5] = 3
a = np.arange(n, dtype=np.int64)
cu_branching_with_ifs[n, 1](a, b, c)
ptx = cu_branching_with_ifs.inspect_asm(sig)
self.assertEqual(2, len(re.findall(r'\s+bra\s+', ptx)))
np.testing.assert_array_equal(a, expected, err_msg='branching')
a = np.arange(n, dtype=np.int64)
cu_branching_with_selps[n, 1](a, b, c)
ptx = cu_branching_with_selps.inspect_asm(sig)
self.assertEqual(0, len(re.findall(r'\s+bra\s+', ptx)))
np.testing.assert_array_equal(a, expected, err_msg='selp')
def test_simple_gridsize2d(self):
compiled = cuda.jit("void(int32[::1])")(simple_gridsize2d)
ntid = (4, 3)
nctaid = (5, 6)
ary = np.zeros(2, dtype=np.int32)
compiled[nctaid, ntid](ary)
self.assertEqual(ary[0], nctaid[0] * ntid[0])
self.assertEqual(ary[1], nctaid[1] * ntid[1])
def test_intrinsic_forloop_step(self):
compiled = cuda.jit("void(int32[:,::1])")(intrinsic_forloop_step)
ntid = (4, 3)
nctaid = (5, 6)
shape = (ntid[0] * nctaid[0], ntid[1] * nctaid[1])
ary = np.empty(shape, dtype=np.int32)
compiled[nctaid, ntid](ary)
gridX, gridY = shape
height, width = ary.shape
for i, j in zip(range(ntid[0]), range(ntid[1])):
startX, startY = gridX + i, gridY + j
for x in range(startX, width, gridX):
for y in range(startY, height, gridY):
self.assertTrue(ary[y, x] == x + y, (ary[y, x], x + y))
def test_3dgrid(self):
@cuda.jit
def foo(out):
x, y, z = cuda.grid(3)
a, b, c = cuda.gridsize(3)
out[x, y, z] = a * b * c
arr = np.zeros(9 ** 3, dtype=np.int32).reshape(9, 9, 9)
foo[(3, 3, 3), (3, 3, 3)](arr)
np.testing.assert_equal(arr, 9 ** 3)
def test_3dgrid_2(self):
@cuda.jit
def foo(out):
x, y, z = cuda.grid(3)
a, b, c = cuda.gridsize(3)
grid_is_right = (
x == cuda.threadIdx.x + cuda.blockIdx.x * cuda.blockDim.x and
y == cuda.threadIdx.y + cuda.blockIdx.y * cuda.blockDim.y and
z == cuda.threadIdx.z + cuda.blockIdx.z * cuda.blockDim.z
)
gridsize_is_right = (a == cuda.blockDim.x * cuda.gridDim.x and
b == cuda.blockDim.y * cuda.gridDim.y and
c == cuda.blockDim.z * cuda.gridDim.z)
out[x, y, z] = grid_is_right and gridsize_is_right
x, y, z = (4 * 3, 3 * 2, 2 * 4)
arr = np.zeros((x * y * z), dtype=np.bool_).reshape(x, y, z)
foo[(4, 3, 2), (3, 2, 4)](arr)
self.assertTrue(np.all(arr))
def test_popc_u4(self):
compiled = cuda.jit("void(int32[:], uint32)")(simple_popc)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0xF0)
self.assertEquals(ary[0], 4)
def test_popc_u8(self):
compiled = cuda.jit("void(int32[:], uint64)")(simple_popc)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0xF00000000000)
self.assertEquals(ary[0], 4)
def test_fma_f4(self):
compiled = cuda.jit("void(f4[:], f4, f4, f4)")(simple_fma)
ary = np.zeros(1, dtype=np.float32)
compiled[1, 1](ary, 2., 3., 4.)
np.testing.assert_allclose(ary[0], 2 * 3 + 4)
def test_fma_f8(self):
compiled = cuda.jit("void(f8[:], f8, f8, f8)")(simple_fma)
ary = np.zeros(1, dtype=np.float64)
compiled[1, 1](ary, 2., 3., 4.)
np.testing.assert_allclose(ary[0], 2 * 3 + 4)
def test_cbrt_f32(self):
compiled = cuda.jit("void(float32[:], float32)")(simple_cbrt)
ary = np.zeros(1, dtype=np.float32)
cbrt_arg = 2.
compiled[1, 1](ary, cbrt_arg)
np.testing.assert_allclose(ary[0], cbrt_arg ** (1 / 3))
def test_cbrt_f64(self):
compiled = cuda.jit("void(float64[:], float64)")(simple_cbrt)
ary = np.zeros(1, dtype=np.float64)
cbrt_arg = 6.
compiled[1, 1](ary, cbrt_arg)
np.testing.assert_allclose(ary[0], cbrt_arg ** (1 / 3))
def test_brev_u4(self):
compiled = cuda.jit("void(uint32[:], uint32)")(simple_brev)
ary = np.zeros(1, dtype=np.uint32)
compiled[1, 1](ary, 0x000030F0)
self.assertEquals(ary[0], 0x0F0C0000)
@skip_on_cudasim('only get given a Python "int", assumes 32 bits')
def test_brev_u8(self):
compiled = cuda.jit("void(uint64[:], uint64)")(simple_brev)
ary = np.zeros(1, dtype=np.uint64)
compiled[1, 1](ary, 0x000030F0000030F0)
self.assertEquals(ary[0], 0x0F0C00000F0C0000)
def test_clz_i4(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_clz)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x00100000)
self.assertEquals(ary[0], 11)
def test_clz_u4(self):
"""
Although the CUDA Math API
(http://docs.nvidia.com/cuda/cuda-math-api/group__CUDA__MATH__INTRINSIC__INT.html)
only says int32 & int64 arguments are supported in C code, the LLVM
IR input supports i8, i16, i32 & i64 (LLVM doesn't have a concept of
unsigned integers, just unsigned operations on integers).
http://docs.nvidia.com/cuda/nvvm-ir-spec/index.html#bit-manipulations-intrinics
"""
compiled = cuda.jit("void(int32[:], uint32)")(simple_clz)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x00100000)
self.assertEquals(ary[0], 11)
def test_clz_i4_1s(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_clz)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0xFFFFFFFF)
self.assertEquals(ary[0], 0)
def test_clz_i4_0s(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_clz)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x0)
self.assertEquals(ary[0], 32, "CUDA semantics")
@skip_on_cudasim('only get given a Python "int", assumes 32 bits')
def test_clz_i8(self):
compiled = cuda.jit("void(int32[:], int64)")(simple_clz)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x000000000010000)
self.assertEquals(ary[0], 47)
def test_ffs_i4(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_ffs)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x00100000)
self.assertEquals(ary[0], 21)
compiled[1, 1](ary, 0x80000000)
self.assertEquals(ary[0], 32)
def test_ffs_u4(self):
compiled = cuda.jit("void(int32[:], uint32)")(simple_ffs)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x00100000)
self.assertEquals(ary[0], 21)
compiled[1, 1](ary, 0x80000000)
self.assertEquals(ary[0], 32)
def test_ffs_i4_1s(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_ffs)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0xFFFFFFFF)
self.assertEquals(ary[0], 1)
def test_ffs_i4_0s(self):
compiled = cuda.jit("void(int32[:], int32)")(simple_ffs)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x0)
self.assertEquals(ary[0], 0)
@skip_on_cudasim('only get given a Python "int", assumes 32 bits')
def test_ffs_i8(self):
compiled = cuda.jit("void(int32[:], int64)")(simple_ffs)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary, 0x000000000010000)
self.assertEquals(ary[0], 17)
compiled[1, 1](ary, 0x100000000)
self.assertEquals(ary[0], 33)
def test_simple_laneid(self):
compiled = cuda.jit("void(int32[:])")(simple_laneid)
count = 2
ary = np.zeros(count * 32, dtype=np.int32)
exp = np.tile(np.arange(32, dtype=np.int32), count)
compiled[1, count * 32](ary)
self.assertTrue(np.all(ary == exp))
def test_simple_warpsize(self):
compiled = cuda.jit("void(int32[:])")(simple_warpsize)
ary = np.zeros(1, dtype=np.int32)
compiled[1, 1](ary)
self.assertEquals(ary[0], 32, "CUDA semantics")
def test_round_f4(self):
compiled = cuda.jit("void(int64[:], float32)")(simple_round)
ary = np.zeros(1, dtype=np.int64)
for i in [-3.0, -2.5, -2.25, -1.5, 1.5, 2.25, 2.5, 2.75]:
compiled[1, 1](ary, i)
self.assertEquals(ary[0], round(i))
def test_round_f8(self):
compiled = cuda.jit("void(int64[:], float64)")(simple_round)
ary = np.zeros(1, dtype=np.int64)
for i in [-3.0, -2.5, -2.25, -1.5, 1.5, 2.25, 2.5, 2.75]:
compiled[1, 1](ary, i)
self.assertEquals(ary[0], round(i))
def test_round_to_f4(self):
compiled = cuda.jit("void(float32[:], float32, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float32)
np.random.seed(123)
vals = np.random.random(32).astype(np.float32)
np.concatenate((vals, np.array([np.inf, -np.inf, np.nan])))
digits = (
# Common case branch of round_to_impl
-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5,
# The algorithm currently implemented can only round to 13 digits
# with single precision. Note that this doesn't trigger the
# "overflow safe" branch of the implementation, which can only be
# hit when using double precision.
13
)
for val, ndigits in itertools.product(vals, digits):
with self.subTest(val=val, ndigits=ndigits):
compiled[1, 1](ary, val, ndigits)
self.assertPreciseEqual(ary[0], round(val, ndigits),
prec='single')
# CPython on most platforms uses rounding based on dtoa.c, whereas the CUDA
# round-to implementation uses CPython's fallback implementation, which has
# slightly different behavior at the edges of the domain. Since the CUDA
# simulator executes using CPython, we need to skip this test when the
# simulator is active.
@skip_on_cudasim('Overflow behavior differs on CPython')
def test_round_to_f4_overflow(self):
# Test that the input value is returned when y in round_ndigits
# overflows.
compiled = cuda.jit("void(float32[:], float32, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float32)
val = np.finfo(np.float32).max
# An unusually large number of digits is required to hit the "y
# overflows" branch of the implementation because the typing results in
# the computation of y as float64.
ndigits = 300
compiled[1, 1](ary, val, ndigits)
self.assertEqual(ary[0], val)
def test_round_to_f4_halfway(self):
compiled = cuda.jit("void(float32[:], float32, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float32)
# Value chosen to trigger the "round to even" branch of the
# implementation
val = 0.3425
ndigits = 3
compiled[1, 1](ary, val, ndigits)
self.assertPreciseEqual(ary[0], round(val, ndigits), prec='single')
def test_round_to_f8(self):
compiled = cuda.jit("void(float64[:], float64, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float64)
np.random.seed(123)
vals = np.random.random(32)
np.concatenate((vals, np.array([np.inf, -np.inf, np.nan])))
digits = (-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)
for val, ndigits in itertools.product(vals, digits):
with self.subTest(val=val, ndigits=ndigits):
compiled[1, 1](ary, val, ndigits)
self.assertPreciseEqual(ary[0], round(val, ndigits),
prec='exact')
# Trigger the "overflow safe" branch of the implementation
val = 0.12345678987654321 * 10e-15
ndigits = 23
with self.subTest(val=val, ndigits=ndigits):
compiled[1, 1](ary, val, ndigits)
self.assertPreciseEqual(ary[0], round(val, ndigits),
prec='double')
# Skipped on cudasim for the same reasons as test_round_to_f4 above.
@skip_on_cudasim('Overflow behavior differs on CPython')
def test_round_to_f8_overflow(self):
# Test that the input value is returned when y in round_ndigits
# overflows.
compiled = cuda.jit("void(float64[:], float64, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float64)
val = np.finfo(np.float64).max
# Unlike test_round_to_f4_overflow, a reasonable number of digits can
# be used for this test to overflow y in round_ndigits.
ndigits = 12
compiled[1, 1](ary, val, ndigits)
self.assertEqual(ary[0], val)
def test_round_to_f8_halfway(self):
compiled = cuda.jit("void(float64[:], float64, int32)")(simple_round_to)
ary = np.zeros(1, dtype=np.float64)
# Value chosen to trigger the "round to even" branch of the
# implementation, with a value that is not exactly representable with a
# float32, but only a float64.
val = 0.5425
ndigits = 3
compiled[1, 1](ary, val, ndigits)
self.assertPreciseEqual(ary[0], round(val, ndigits), prec='double')
if __name__ == '__main__':
unittest.main()
| 34.267045 | 90 | 0.577848 |
9711f1393ff56670e6a9f73fac8aa90b49363fcc | 3,422 | py | Python | selfdrive/car/toyota/radar_interface.py | Orbbec-3DPG/openpilot | 27e0adc434906e37fd7674160a0a8c2dd5c3c180 | [
"MIT"
] | 41 | 2018-07-30T17:36:37.000Z | 2021-04-05T13:24:19.000Z | selfdrive/car/toyota/radar_interface.py | Orbbec-3DPG/openpilot | 27e0adc434906e37fd7674160a0a8c2dd5c3c180 | [
"MIT"
] | 14 | 2018-06-18T17:47:59.000Z | 2019-01-10T00:18:00.000Z | selfdrive/car/toyota/radar_interface.py | Orbbec-3DPG/openpilot | 27e0adc434906e37fd7674160a0a8c2dd5c3c180 | [
"MIT"
] | 29 | 2018-07-31T07:43:25.000Z | 2020-08-30T16:44:27.000Z | #!/usr/bin/env python
import os
import zmq
import time
from selfdrive.can.parser import CANParser
from cereal import car
from common.realtime import sec_since_boot
from selfdrive.services import service_list
import selfdrive.messaging as messaging
from selfdrive.car.toyota.values import NO_DSU_CAR
RADAR_A_MSGS = list(range(0x210, 0x220))
RADAR_B_MSGS = list(range(0x220, 0x230))
def _create_radard_can_parser():
dbc_f = 'toyota_prius_2017_adas.dbc'
msg_a_n = len(RADAR_A_MSGS)
msg_b_n = len(RADAR_B_MSGS)
signals = zip(['LONG_DIST'] * msg_a_n + ['NEW_TRACK'] * msg_a_n + ['LAT_DIST'] * msg_a_n +
['REL_SPEED'] * msg_a_n + ['VALID'] * msg_a_n + ['SCORE'] * msg_b_n,
RADAR_A_MSGS * 5 + RADAR_B_MSGS,
[255] * msg_a_n + [1] * msg_a_n + [0] * msg_a_n + [0] * msg_a_n + [0] * msg_a_n + [0] * msg_b_n)
checks = zip(RADAR_A_MSGS + RADAR_B_MSGS, [20]*(msg_a_n + msg_b_n))
return CANParser(os.path.splitext(dbc_f)[0], signals, checks, 1)
class RadarInterface(object):
def __init__(self, CP):
# radar
self.pts = {}
self.valid_cnt = {key: 0 for key in RADAR_A_MSGS}
self.track_id = 0
self.delay = 0.0 # Delay of radar
self.rcp = _create_radard_can_parser()
self.no_dsu_car = CP.carFingerprint in NO_DSU_CAR
context = zmq.Context()
self.logcan = messaging.sub_sock(context, service_list['can'].port)
def update(self):
ret = car.RadarState.new_message()
if self.no_dsu_car:
# TODO: make a adas dbc file for dsu-less models
time.sleep(0.05)
return ret
canMonoTimes = []
updated_messages = set()
while 1:
tm = int(sec_since_boot() * 1e9)
updated_messages.update(self.rcp.update(tm, True))
if RADAR_B_MSGS[-1] in updated_messages:
break
errors = []
if not self.rcp.can_valid:
errors.append("commIssue")
ret.errors = errors
ret.canMonoTimes = canMonoTimes
for ii in updated_messages:
if ii in RADAR_A_MSGS:
cpt = self.rcp.vl[ii]
if cpt['LONG_DIST'] >=255 or cpt['NEW_TRACK']:
self.valid_cnt[ii] = 0 # reset counter
if cpt['VALID'] and cpt['LONG_DIST'] < 255:
self.valid_cnt[ii] += 1
else:
self.valid_cnt[ii] = max(self.valid_cnt[ii] -1, 0)
score = self.rcp.vl[ii+16]['SCORE']
# print ii, self.valid_cnt[ii], score, cpt['VALID'], cpt['LONG_DIST'], cpt['LAT_DIST']
# radar point only valid if it's a valid measurement and score is above 50
if cpt['VALID'] or (score > 50 and cpt['LONG_DIST'] < 255 and self.valid_cnt[ii] > 0):
if ii not in self.pts or cpt['NEW_TRACK']:
self.pts[ii] = car.RadarState.RadarPoint.new_message()
self.pts[ii].trackId = self.track_id
self.track_id += 1
self.pts[ii].dRel = cpt['LONG_DIST'] # from front of car
self.pts[ii].yRel = -cpt['LAT_DIST'] # in car frame's y axis, left is positive
self.pts[ii].vRel = cpt['REL_SPEED']
self.pts[ii].aRel = float('nan')
self.pts[ii].yvRel = float('nan')
self.pts[ii].measured = bool(cpt['VALID'])
else:
if ii in self.pts:
del self.pts[ii]
ret.points = self.pts.values()
return ret
if __name__ == "__main__":
RI = RadarInterface(None)
while 1:
ret = RI.update()
print(chr(27) + "[2J")
print(ret)
| 31.685185 | 112 | 0.622735 |
c90e6f1e84e410b6925321e475251b91bbf5c7cf | 7,183 | py | Python | train.py | mikweeh/pyperview | 3360ec318f74b434b3b21f22e798242d7862a38c | [
"MIT"
] | null | null | null | train.py | mikweeh/pyperview | 3360ec318f74b434b3b21f22e798242d7862a38c | [
"MIT"
] | null | null | null | train.py | mikweeh/pyperview | 3360ec318f74b434b3b21f22e798242d7862a38c | [
"MIT"
] | null | null | null | import argparse
import json
import os
import pickle
import sys
import sagemaker_containers
import pandas as pd
import torch
import torch.optim as optim
import torch.utils.data
from model import LSTMClassifier
def model_fn(model_dir):
"""Load the PyTorch model from the `model_dir` directory."""
print("Loading model.")
# First, load the parameters used to create the model.
model_info = {}
model_info_path = os.path.join(model_dir, 'model_info.pth')
with open(model_info_path, 'rb') as f:
model_info = torch.load(f)
print("model_info: {}".format(model_info))
# Determine the device and construct the model.
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = LSTMClassifier(model_info['embedding_dim'], model_info['hidden_dim'], model_info['vocab_size'])
# Load the stored model parameters.
model_path = os.path.join(model_dir, 'model.pth')
with open(model_path, 'rb') as f:
model.load_state_dict(torch.load(f))
# Load the saved word_dict.
word_dict_path = os.path.join(model_dir, 'word_dict.pkl')
with open(word_dict_path, 'rb') as f:
model.word_dict = pickle.load(f)
model.to(device).eval()
print("Done loading model.")
return model
def _get_train_data_loader(batch_size, training_dir):
print("Get train data loader.")
train_data = pd.read_csv(os.path.join(training_dir, "train.csv"), header=None, names=None)
train_y = torch.from_numpy(train_data[[0]].values).float().squeeze()
train_X = torch.from_numpy(train_data.drop([0], axis=1).values).long()
train_ds = torch.utils.data.TensorDataset(train_X, train_y)
return torch.utils.data.DataLoader(train_ds, batch_size=batch_size)
def train(model, train_loader, epochs, optimizer, loss_fn, device):
"""
This is the training method that is called by the PyTorch training script. The parameters
passed are as follows:
model - The PyTorch model that we wish to train.
train_loader - The PyTorch DataLoader that should be used during training.
epochs - The total number of epochs to train for.
optimizer - The optimizer to use during training.
loss_fn - The loss function used for training.
device - Where the model and data should be loaded (gpu or cpu).
"""
for epoch in range(1, epochs + 1):
model.train()
total_loss = 0
for batch in train_loader:
batch_X, batch_y = batch
batch_X = batch_X.to(device)
batch_y = batch_y.to(device)
#TODO in notebook
#_____________________________________________________________________________
# Compute the output of the model (the forward pass)
# Option 1:
# output = model(batch_X)
# Recommended in: https://pytorch.org/docs/stable/nn.html#torch.nn.Module.forward
# To be used if you have to use any hook, as seen in __Call__ method of torch.nn.module
# https://pytorch.org/docs/stable/_modules/torch/nn/modules/module.html#Module
# Option 2:
# Much clearer and more explicit
output = model.forward(batch_X)
# Calculate the loss from error
loss = loss_fn(output, batch_y)
# Perform backpropagation
loss.backward()
#Perform updates of the parameters
optimizer.step()
# Gradients are accumulated, so every time we use the gradients to update
# the parameters, we need to zero the gradients afterwards
optimizer.zero_grad()
#_____________________________________________________________________________
total_loss += loss.data.item()
print("Epoch: {}, BCELoss: {}".format(epoch, total_loss / len(train_loader)))
if __name__ == '__main__':
# All of the model parameters and training parameters are sent as arguments when the script
# is executed. Here we set up an argument parser to easily access the parameters.
parser = argparse.ArgumentParser()
# Training Parameters
parser.add_argument('--batch-size', type=int, default=512, metavar='N',
help='input batch size for training (default: 512)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
# Model Parameters
parser.add_argument('--embedding_dim', type=int, default=32, metavar='N',
help='size of the word embeddings (default: 32)')
parser.add_argument('--hidden_dim', type=int, default=100, metavar='N',
help='size of the hidden dimension (default: 100)')
parser.add_argument('--vocab_size', type=int, default=5000, metavar='N',
help='size of the vocabulary (default: 5000)')
# SageMaker Parameters
parser.add_argument('--hosts', type=list, default=json.loads(os.environ['SM_HOSTS']))
parser.add_argument('--current-host', type=str, default=os.environ['SM_CURRENT_HOST'])
parser.add_argument('--model-dir', type=str, default=os.environ['SM_MODEL_DIR'])
parser.add_argument('--data-dir', type=str, default=os.environ['SM_CHANNEL_TRAINING'])
parser.add_argument('--num-gpus', type=int, default=os.environ['SM_NUM_GPUS'])
args = parser.parse_args()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("Using device {}.".format(device))
torch.manual_seed(args.seed)
# Load the training data.
train_loader = _get_train_data_loader(args.batch_size, args.data_dir)
# Build the model.
model = LSTMClassifier(args.embedding_dim, args.hidden_dim, args.vocab_size).to(device)
with open(os.path.join(args.data_dir, "word_dict.pkl"), "rb") as f:
model.word_dict = pickle.load(f)
print("Model loaded with embedding_dim {}, hidden_dim {}, vocab_size {}.".format(
args.embedding_dim, args.hidden_dim, args.vocab_size
))
# Train the model.
optimizer = optim.Adam(model.parameters())
loss_fn = torch.nn.BCELoss()
train(model, train_loader, args.epochs, optimizer, loss_fn, device)
# Save the parameters used to construct the model
model_info_path = os.path.join(args.model_dir, 'model_info.pth')
with open(model_info_path, 'wb') as f:
model_info = {
'embedding_dim': args.embedding_dim,
'hidden_dim': args.hidden_dim,
'vocab_size': args.vocab_size,
}
torch.save(model_info, f)
# Save the word_dict
word_dict_path = os.path.join(args.model_dir, 'word_dict.pkl')
with open(word_dict_path, 'wb') as f:
pickle.dump(model.word_dict, f)
# Save the model parameters
model_path = os.path.join(args.model_dir, 'model.pth')
with open(model_path, 'wb') as f:
torch.save(model.cpu().state_dict(), f)
| 38.61828 | 107 | 0.653348 |
ec5e89434fc166ed3ee1439727f390f38cf029dc | 8,736 | py | Python | service/tests/unittests/testExceptions.py | ehenneken/metrics_service | b910e57f3a923637a71049b96ffa4ec11ec1fdea | [
"MIT"
] | null | null | null | service/tests/unittests/testExceptions.py | ehenneken/metrics_service | b910e57f3a923637a71049b96ffa4ec11ec1fdea | [
"MIT"
] | null | null | null | service/tests/unittests/testExceptions.py | ehenneken/metrics_service | b910e57f3a923637a71049b96ffa4ec11ec1fdea | [
"MIT"
] | null | null | null | import sys
import os
PROJECT_HOME = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../'))
sys.path.append(PROJECT_HOME)
from flask.ext.testing import TestCase
from flask import request
from flask import url_for, Flask
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean
from sqlalchemy.dialects import postgresql
import glob
from datetime import datetime
from math import sqrt
import itertools
import unittest
import requests
import time
import app
import json
import httpretty
import mock
from models import db, Bind, MetricsModel
testset = ['1997ZGlGl..33..173H', '1997BoLMe..85..475M',
'1997BoLMe..85...81M', '2014bbmb.book..243K', '2012opsa.book..253H']
# Import the JSON document with expected results
results_file = "%s/tests/unittests/testdata/expected_results" % PROJECT_HOME
with open(results_file) as data_file:
expected_results = json.load(data_file)
# The mockdata to be returned by the Solr mock, which is supposed
# to return just the bibcodes for our test set
mockdata = [
{'id': '1', 'bibcode': '1997ZGlGl..33..173H'},
{'id': '2', 'bibcode': '1997BoLMe..85..475M'},
{'id': '3', 'bibcode': '1997BoLMe..85...81M'},
{'id': '4', 'bibcode': '2014bbmb.book..243K'},
{'id': '5', 'bibcode': '2012opsa.book..253H', }
]
def get_test_data(bibcodes=None):
# We have to keep track of the current year, to get the
# correct number of entries in the reads and downloads lists
year = datetime.now().year
# We will generate 'reads' and 'downloads' of 1 read/download
# per year, so that we always have Nentries reads/downloads total
# This is done because the code actually checks if a reads/downloads
# has the correct length, given the current year (however, the reads
# /downloads in the stub data never change
Nentries = year - 1996 + 1
datafiles = glob.glob("%s/tests/unittests/testdata/*.json" % PROJECT_HOME)
records = []
for dfile in datafiles:
with open(dfile) as data_file:
data = json.load(data_file)
if bibcodes and data['bibcode'] not in bibcodes:
continue
r = MetricsModel(
id=data['id'],
bibcode=data['bibcode'],
refereed=data['refereed'],
rn_citation_data=data['rn_citation_data'],
downloads=[1] * Nentries,
reads=[1] * Nentries,
refereed_citation_num=data['refereed_citation_num'],
citation_num=data['citation_num'],
citations=data['citations'],
refereed_citations=data['refereed_citations'],
author_num=data['author_num'],
)
records.append(r)
records = sorted(records, key=lambda a: a.citation_num, reverse=True)
return records
# INTERNAL EXCEPTIONS
#
# exceptions generated by calling method with conflicting data
#
class TestUnknownMetricsType(TestCase):
'''Check exception when requesting unknown metrics type'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
db.session = mock.Mock()
db.metrics = mock.Mock()
exe = db.session.execute
mtr = db.metrics.execute
exe.return_value = get_test_data(bibcodes=testset)
mtr.return_value = get_test_data(bibcodes=testset)
return app_
def test_get_unknown_metrics_type(self):
'''When no metrics types are specified an exception is thrown'''
from metrics import generate_metrics
res = generate_metrics(bibcodes=testset, metrics_types=[])
# An unknown metrics type should return an empty dictionary
self.assertEqual(res, {})
class TestNoIdentiersFound(TestCase):
'''Check exception when no identifiers are found'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
db.session = mock.Mock()
db.metrics = mock.Mock()
exe = db.session.execute
mtr = db.metrics.execute
exe.return_value = []
mtr.return_value = []
return app_
def test_no_identifiers_found(self):
'''When no identifiers are found an exception is thrown'''
from metrics import generate_metrics
res = generate_metrics(bibcodes=testset, metrics_types=[])
# No identifiers (i.e. no records found in database) should return
# an empty dictionary
self.assertEqual(res, {})
class TestNoRecordInfoFound(TestCase):
'''Check exception when no record info is found'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
db.session = mock.Mock()
db.metrics = mock.Mock()
exe = db.session.execute
mtr = db.metrics.execute
exe.return_value = []
mtr.return_value = []
return app_
def test_illegal_retrieval_method(self):
'''No record info is found when an unsupported retrieval method
is specified'''
from metrics import get_record_info
data = get_record_info(other="foo")
expected = {'Status Code': 200,
'Error Info': 'Unsupported metrics request',
'Error': 'Unable to get results!'}
self.assertEqual(data, expected)
@httpretty.activate
def test_solr_failure(self):
'''No record info is found because Solr failed to return results'''
from metrics import get_record_info
httpretty.register_uri(
httpretty.GET, self.app.config.get('METRICS_SOLRQUERY_URL'),
content_type='application/json',
status=500,
body="""{
"responseHeader":{
"status":0, "QTime":0,
"params":{ "fl":"bibcode", "indent":"true", "wt":"json", "q":"*"}},
"response":{"numFound":0,"start":0,"docs":[]
}}""")
data = get_record_info(bibcodes=None, query="foo")
self.assertTrue(data['Status Code'] == 500)
self.assertTrue('Error' in data)
# EXTERNAL EXCEPTIONS
#
# exceptions generated by calling the endpoint with problematic data
#
class TestBadRequests(TestCase):
'''Tests that no or too many submitted bibcodes result in the
proper responses'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
return app_
def testEmptyBibcodeListSubmitted(self):
'''When an empty list of bibcodes is submitted an error should
be returned'''
r = self.client.post(
url_for('metrics'),
content_type='application/json',
data=json.dumps({'bibcodes': []}))
self.assertTrue(r.status_code == 403)
self.assertTrue('Error' in r.json)
self.assertTrue(r.json.get('Error') == 'Unable to get results!')
def testTooManyBibcodes(self):
'''When more than the maximum input bibcodes are submitted an error
should be returned'''
bibcodes = ["bibcode"] * \
(self.app.config.get('METRICS_MAX_SUBMITTED') + 1)
r = self.client.post(
url_for('metrics'),
content_type='application/json',
data=json.dumps({'bibcodes': bibcodes}))
self.assertTrue(r.status_code == 403)
self.assertTrue('Error' in r.json)
self.assertTrue(r.json.get('Error') == 'Unable to get results!')
def testNothingSubmitted(self):
'''When no bibcodes nor a query is submitted an error should
be returned'''
r = self.client.post(
url_for('metrics'),
content_type='application/json',
data=json.dumps({}))
self.assertTrue(r.status_code == 403)
self.assertTrue('Error' in r.json)
self.assertTrue(r.json.get('Error') == 'Unable to get results!')
class TestMetricsSingleInvalidBibcode(TestCase):
'''Check getting exception for a single bibcode'''
def create_app(self):
'''Create the wsgi application'''
app_ = app.create_app()
db.session = mock.Mock()
db.metrics = mock.Mock()
exe = db.session.execute
mtr = db.metrics.execute
exe.return_value = []
mtr.return_value = []
return app_
def test_get_metrics_single_invalid_bibcode(self):
'''Test getting exception for a single bibcode'''
url = url_for('pubmetrics', bibcode='foo')
r = self.client.get(url)
# The response should have a status code 200
self.assertTrue(r.status_code == 200)
self.assertTrue('Error' in r.json)
self.assertTrue(r.json.get('Error') == 'Unable to get results!')
if __name__ == '__main__':
unittest.main(verbosity=2)
| 34.529644 | 79 | 0.632898 |
cee75e3085623490095d97d1193e88d8d1e175b2 | 121 | py | Python | class and objects/Getting Parents classes of a class.py | ZephyrAveryl777/Python-Programs | 26de85c31af28382d406d27d54186b966a7b1bfc | [
"MIT"
] | 6 | 2020-08-13T11:49:29.000Z | 2021-03-07T05:46:17.000Z | class and objects/Getting Parents classes of a class.py | ZephyrAveryl777/Python-Programs | 26de85c31af28382d406d27d54186b966a7b1bfc | [
"MIT"
] | null | null | null | class and objects/Getting Parents classes of a class.py | ZephyrAveryl777/Python-Programs | 26de85c31af28382d406d27d54186b966a7b1bfc | [
"MIT"
] | 1 | 2021-04-24T06:12:48.000Z | 2021-04-24T06:12:48.000Z | class A(object):
pass
class B(object):
pass
class C(A,B):
pass
print(f'Parent Classes are: {C.__bases__}') | 12.1 | 43 | 0.628099 |
2a601ea707a20e3e958d11a989829c8062d1aef9 | 2,034 | py | Python | matplotlib_examples/examples_src/pylab_examples/mri_with_eeg.py | xzlmark/webspider | 133c620c65aa45abea1718b0dada09618c2115bf | [
"Apache-2.0"
] | 3 | 2020-04-09T02:35:26.000Z | 2021-02-27T17:00:21.000Z | matplotlib_examples/examples_src/pylab_examples/mri_with_eeg.py | colorworlds/webspider | 133c620c65aa45abea1718b0dada09618c2115bf | [
"Apache-2.0"
] | null | null | null | matplotlib_examples/examples_src/pylab_examples/mri_with_eeg.py | colorworlds/webspider | 133c620c65aa45abea1718b0dada09618c2115bf | [
"Apache-2.0"
] | 1 | 2020-04-09T02:35:08.000Z | 2020-04-09T02:35:08.000Z | """Displays a set of subplots with an MRI image, its intensity histogram and
some EEG traces.
"""
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
import matplotlib.cm as cm
from matplotlib.collections import LineCollection
from matplotlib.ticker import MultipleLocator
fig = plt.figure("MRI_with_EEG")
# Load the MRI data (256x256 16 bit integers)
dfile = cbook.get_sample_data('s1045.ima.gz')
im = np.fromstring(dfile.read(), np.uint16).astype(float)
im.shape = (256, 256)
dfile.close()
# Plot the MRI image
ax0 = fig.add_subplot(2, 2, 1)
ax0.imshow(im, cmap=cm.gray)
ax0.axis('off')
# Plot the histogram of MRI intensity
ax1 = fig.add_subplot(2, 2, 2)
im = np.ravel(im)
im = im[np.nonzero(im)] # Ignore the background
im = im / (2**16 - 1) # Normalize
ax1.hist(im, bins=100)
ax1.xaxis.set_major_locator(MultipleLocator(0.4))
ax1.minorticks_on()
ax1.set_yticks([])
ax1.set_xlabel('Intensity (a.u.)')
ax1.set_ylabel('MRI density')
# Load the EEG data
numSamples, numRows = 800, 4
eegfile = cbook.get_sample_data('eeg.dat', asfileobj=False)
print('Loading EEG %s' % eegfile)
data = np.fromfile(eegfile, dtype=float)
data.shape = (numSamples, numRows)
t = 10.0 * np.arange(numSamples) / numSamples
# Plot the EEG
ticklocs = []
ax2 = fig.add_subplot(2, 1, 2)
ax2.set_xlim(0, 10)
ax2.set_xticks(np.arange(10))
dmin = data.min()
dmax = data.max()
dr = (dmax - dmin) * 0.7 # Crowd them a bit.
y0 = dmin
y1 = (numRows - 1) * dr + dmax
ax2.set_ylim(y0, y1)
segs = []
for i in range(numRows):
segs.append(np.hstack((t[:, np.newaxis], data[:, i, np.newaxis])))
ticklocs.append(i * dr)
offsets = np.zeros((numRows, 2), dtype=float)
offsets[:, 1] = ticklocs
lines = LineCollection(segs, offsets=offsets, transOffset=None)
ax2.add_collection(lines)
# Set the yticks to use axes coordinates on the y axis
ax2.set_yticks(ticklocs)
ax2.set_yticklabels(['PG3', 'PG5', 'PG7', 'PG9'])
ax2.set_xlabel('Time (s)')
plt.tight_layout()
plt.show()
| 25.425 | 76 | 0.713864 |
8b6b0f9bfcc2c84236e512ae26b01ef6ff7ac3cd | 23,530 | py | Python | tasks-deploy/dfa/generate.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | tasks-deploy/dfa/generate.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | tasks-deploy/dfa/generate.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | TITLE = "Старый друг"
STATEMENT_TEMPLATE = '''
Только что увиделись с моим давним другом.
Он не богат, да и не красавец. Но говорит, что всегда носит с собой автомат.
Как ни странно, оружия я у него не увидел. Удивился, переспросил.
Он лишь усмехнулся и дал мне флешку с этим [файлом](/static/files/10k5ze383e/{}).
'''
def generate(context):
participant = context['participant']
team_id = team_data[participant.id % len(team_data)][0]
return TaskStatement(TITLE, STATEMENT_TEMPLATE.format(team_id))
team_data = [('BVoEHO6B0T2vLnU8ZJrW', 'QCTF{EEdkidEEd1}'), ('jvnBmvXDi1Wqdr47RRZ2', 'QCTF{ZZfO0fZZfx}'), ('TAwSsItDMe0SCvedzm2V', 'QCTF{mm9W29mm9C}'), ('9zT32btaHf3lXUS1oJ6l', 'QCTF{CCXbyXCCX7}'), ('jRUU5y8NAWf9hEvKcWuf', 'QCTF{JJKpfKJJKx}'), ('W9l3ze5AkdqdpdLE58Pv', 'QCTF{oojzcjooje}'), ('sPl3VJ36Ux79s1Y60OGP', 'QCTF{ddqiRqddqu}'), ('LmO4PuPsHwpNzHjsFaWL', 'QCTF{IIz5lzIIzf}'), ('04K0LfSOe7xjWN1quP4A', 'QCTF{fftZBtfftD}'), ('rf4aTErDjcQ3TBF3DpuG', 'QCTF{dd84G8dd87}'), ('K6oMsJHgF0l7Q16tGg7F', 'QCTF{VV15p1VV1f}'), ('L3B6pLuA4M31uslzce2t', 'QCTF{00zPpz00zs}'), ('gMvxoMHWHMn2nYAZJCCD', 'QCTF{GGHqmHGGHk}'), ('4SCBlSAqAgSExyY72ihx', 'QCTF{yyZM7ZyyZT}'), ('hQSjBsF78sLmWDI3Xv5U', 'QCTF{88O74O88Oa}'), ('lWdqCUBsna8QgLgwGesC', 'QCTF{ttzU6zttzL}'), ('Rtt4qgy5KYK1kpB0QfIB', 'QCTF{88lQrl88lY}'), ('GbmRK5Fdkme6k3xLnCMl', 'QCTF{VVvDHvVVvK}'), ('lqbwM1UixPaBIzLGWNq9', 'QCTF{00gh6g00gf}'), ('TjdmAybaPAKuqw3XJt6r', 'QCTF{ooxyjxooxQ}'), ('8w9xvfTZP4HPaULgChzT', 'QCTF{88b4hb88bj}'), ('SUDhfqL3Q6TTaz5Ru3bB', 'QCTF{bbIYuIbbID}'), ('yLyDcBzrHVVT6g1e6lJb', 'QCTF{OO3wZ3OO3t}'), ('OUYhHW85rzQlirGsaoKn', 'QCTF{uuidViuuiG}'), ('TBKgSq3FvLMhuIvEE89m', 'QCTF{mmDO2DmmDY}'), ('TP7dAyrpT9lCw36IGthA', 'QCTF{AAT5jTAATq}'), ('kxRaGy9T7SwZkoAxOqP6', 'QCTF{88zV7z88zy}'), ('Mc8TcobTYLMM3YoQN8H4', 'QCTF{77PVMP77P3}'), ('7xvoNvhkkzYDdX0HPaRl', 'QCTF{BBn5knBBnc}'), ('0ay3Y6rzcQBshw5VQPXY', 'QCTF{NNkH9kNNkR}'), ('yZiFdczrFox0ejDMyAk7', 'QCTF{99n5Wn99nc}'), ('mzJtCOAsHuBJvBVBQQaw', 'QCTF{LLUuMULLUn}'), ('1kYOoSMu02exN16SdxMw', 'QCTF{EEMtPMEEM8}'), ('HQ4pSoy7TTVMbGETgc0s', 'QCTF{WWoJHoWWoO}'), ('8PlKezjTKJVCmUTAv4i2', 'QCTF{zz5xs5zz5A}'), ('ys77XYvzY1rww4NzJGmE', 'QCTF{ddQOxQddQW}'), ('HsZoLuHTcRwvqvynNaR4', 'QCTF{eeEwcEeeEf}'), ('fTA80F0yny9KgrRHbo6I', 'QCTF{DD0l70DD0y}'), ('pVvlF5CmMGvXgpT42kgY', 'QCTF{RR9MK9RR9y}'), ('YjbZ4egMLIaFOzkw6D1K', 'QCTF{55eIOe55eT}'), ('YXEGWBeO1ZkN1NysgmoH', 'QCTF{tta2Patta1}'), ('UkeaPfBpkMXLAJB6fVuC', 'QCTF{mmajKammaJ}'), ('Bt4A1F6TLCZPaJq17HDo', 'QCTF{00xfpx00xq}'), ('8wyorVdBddCDxJEguil7', 'QCTF{nnB7vBnnBD}'), ('f70h8EtgxJ3XNFvOqCIu', 'QCTF{uu35e3uu3t}'), ('O6XcWLzVjdBJnk9bGp3k', 'QCTF{LLpo5pLLpP}'), ('wyc5aNDMd38nZ0qj6s6s', 'QCTF{iiHUTHiiHn}'), ('iDyPPB7MBX7zZ8uP6Q5g', 'QCTF{55zWhz55z4}'), ('IJ3PxNSAjLcXdSP27I2z', 'QCTF{88GODG88GY}'), ('PxILA49UaDgKX8vkeP7i', 'QCTF{uuLRnLuuLy}'), ('VHuoZ5S01T6opJYGPfor', 'QCTF{77qQOq77qa}'), ('IpMbHdS2tGP902leZDYa', 'QCTF{xxa3eaxxan}'), ('I6OP04wjQOo4qgkx2E62', 'QCTF{GGyDzyGGys}'), ('5brbhNOl29oMuzdax09K', 'QCTF{RRG5dGRRGA}'), ('oxV5JhnQQvh5QPPPncd9', 'QCTF{33saFs33sV}'), ('k0uh9qBqPlnXUbtvcUgO', 'QCTF{WWAk5AWWAj}'), ('PQIVd0cpACdUM1BBAeP2', 'QCTF{ddnUCnddng}'), ('AQg80ei0nIcN2liAOVM0', 'QCTF{TT1SW1TT1Q}'), ('F33TiACepW7stoxVpZQf', 'QCTF{JJ3gj3JJ3c}'), ('4LI1bjabUXmrqqxFxuwQ', 'QCTF{NNuTSuNNu2}'), ('FHtXqm645U51uiBXtgK6', 'QCTF{MM3qd3MM37}'), ('6LMs0RU77a5KmsWyaoBP', 'QCTF{88YWJY88YM}'), ('q2fvuBDWOSy0hLHmXsc8', 'QCTF{ppGmVGppGx}'), ('w44v2941Nkz2QZhR0RJV', 'QCTF{ppayEappaV}'), ('Sj9Q8kHHLhjDtuMOogua', 'QCTF{OOxz7xOOxe}'), ('seyh7pi6fsApMLEIkStT', 'QCTF{HHVjqVHHVu}'), ('Wu1VaUnTADRBuy0hnJil', 'QCTF{aaquGqaaqP}'), ('js7Hyk9mvD6Xi2RWA99F', 'QCTF{RR3QG3RR3V}'), ('pO0b2XbPR9gdhCz9XoI2', 'QCTF{JJvMmvJJvL}'), ('yJ4cU0QZo5vuElrF7v8r', 'QCTF{PPHIOHPPHd}'), ('vt42lHYUmvSnuDwmwxbM', 'QCTF{nnwYfwnnwV}'), ('9U8LMdT4bnZSMbx3jU8U', 'QCTF{77EioE77E2}'), ('xKiJxbku3tvZKPUhQNfr', 'QCTF{hhaLNahhao}'), ('M92NOCMxaGg3Xtsti6a7', 'QCTF{hhMRGMhhMv}'), ('4rIVI1PBzVJYt4jlQc4z', 'QCTF{66ynfy66yh}'), ('32cnsJRmLNCh4qMcq7y9', 'QCTF{llyGNyllyv}'), ('xn86QoJ4huBluFesg4zE', 'QCTF{wwt5WtwwtF}'), ('iMSF77ytsoJpsWwfVLrV', 'QCTF{VVXZnXVVXj}'), ('dPys3SynsKKFkHA6jEr5', 'QCTF{ggbJXbggbY}'), ('Qnif44RSLbnOMDTDe1V9', 'QCTF{SStTNtSStG}'), ('Qg3hnXebmvKuYG9uSstf', 'QCTF{XXrDirXXrv}'), ('f15ANlWaXeSU5MmrMaOX', 'QCTF{443dy3443b}'), ('DBR9bvglLEtXR6xUQw8S', 'QCTF{iiGHaGiiGh}'), ('7HcZd9zysjDlsSoLPvCZ', 'QCTF{yyWJQWyyWI}'), ('7pjFU3PaqGhvy2qNQClz', 'QCTF{eebY8beebn}'), ('ElkNHEp3bUDdW68iM9ho', 'QCTF{FF6XJ6FF64}'), ('GlRH4zxSoBTu9AvwkV82', 'QCTF{ccfmvfccft}'), ('9CLWMfw2VSsNYrI7rRn7', 'QCTF{FFgU9gFFgV}'), ('adU1NLeCcl12Tzb6HPTN', 'QCTF{II0Ol0II0U}'), ('Fb2QrOmvinAbj64Hw09J', 'QCTF{55qJZq55qU}'), ('XbVTu0NwPueYAURW8jIP', 'QCTF{cc1ZC1cc1g}'), ('WF98C52dHqXUD1SezC9y', 'QCTF{FFPvHPFFP6}'), ('WsIwAEFojhR7D8eo4Cnq', 'QCTF{XXaZ3aXXai}'), ('5XCdNcSipn9Z1jPSt1PY', 'QCTF{GG2I12GG29}'), ('W2SZtUCmjruXwgHsPRaB', 'QCTF{eezxSzeezI}'), ('OCpich9jAlNR5a5J5t0D', 'QCTF{nnBdzBnnBS}'), ('eqwjuugzmV6K0GVbTyfl', 'QCTF{UUO0lOUUOh}'), ('c4OXiSrmGWFWyscwM57d', 'QCTF{ppMUvMppM0}'), ('rNxWb1CKkMB8S0RMsTHF', 'QCTF{ZZlqilZZlU}'), ('5ZP0j7p8Aohjw75nEhnV', 'QCTF{jj2IY2jj2v}'), ('TF5I1B4TJNXqlQ1Zvy1T', 'QCTF{uuIA2IuuIk}'), ('XeBCAhK86G2Bgk6od6xR', 'QCTF{llzUSzllz2}'), ('QmyYfada3z77BfQTvY9R', 'QCTF{ffChZCffCJ}'), ('fphwMtrTEkw3DcgoL49o', 'QCTF{33BpOB33Bn}'), ('sVNEHMbZbHFEXxeyP3nu', 'QCTF{OOMZtMOOMH}'), ('NPwwASMBj2U7eJLG4B2e', 'QCTF{WWpXkpWWp6}'), ('V4cGUYjvitKmn3bC0uYE', 'QCTF{ppWqKWppWJ}'), ('lUnxedURTau8ZBSjpqgV', 'QCTF{UUjBVjUUjE}'), ('o86TaeOOdYeTrAiRn3yJ', 'QCTF{BBQd6QBBQm}'), ('jcyGHBSuHb5zrriFgqsT', 'QCTF{JJWalWJJWO}'), ('7fiESMsd7mP4jOKXHX4k', 'QCTF{KK9fd9KK9a}'), ('H2bPHJZpcnzH92K1rFfh', 'QCTF{TTzxQzTTzp}'), ('OeiQFnt5gpYlzv3S2S7W', 'QCTF{MMqU0qMMqa}'), ('08ejxTofjQrKkey7yvCL', 'QCTF{ttv6hvttvF}'), ('3C96wmZhaaeDSLhyt48k', 'QCTF{GGRA9RGGRC}'), ('Cup5K3SKcBCPmFfZlwCR', 'QCTF{UUGz8GUUGR}'), ('SLLhnnAoS62rqK4FsPu3', 'QCTF{CCLiALCCLZ}'), ('aitSFeVLsm1pmWkPhWJv', 'QCTF{KKMDBMKKMV}'), ('3B8YiC4IKZGYLkeOsdhN', 'QCTF{zzn6Nnzznk}'), ('HWXTSwLbELM31Hp6cJmS', 'QCTF{22za6z22zT}'), ('CNtTYkni4GjILHpkcmIv', 'QCTF{ppqx7qppqa}'), ('UPjjkjyOGQYxo8A8Aa74', 'QCTF{ppqv4qppqT}'), ('jBAB4GrDn6EUqVE7VLBf', 'QCTF{88LkSL88LE}'), ('rMlNCZhMgbiJZmcfS4Ww', 'QCTF{YYRkDRYYRb}'), ('H8J5vZr8v5j40PIAFSfb', 'QCTF{ccfJDfccfe}'), ('5S4Jfi7g75OAwlUMIeSF', 'QCTF{bbFe6FbbFm}'), ('YXuCX4H25Pr10v3QHTdF', 'QCTF{22LIvL22Ld}'), ('zS4qusBAc7yRAcgFjjEI', 'QCTF{ddU48UddUj}'), ('qH6rbGgtx0feIUcJ0d1r', 'QCTF{ggCFJCggC3}'), ('wzoc9SnCdHQnAOBXn5Fq', 'QCTF{zzjHFjzzje}'), ('WsJOlFUUq4QrDl588lZ9', 'QCTF{33LzHL33LP}'), ('cE79r4ZLyQ6xAqWUhd06', 'QCTF{kkOToOkkON}'), ('1TbnQGhRqafJJfEMM8A7', 'QCTF{ttZsVZttZn}'), ('2gIuhe5QHkpEv01iEFae', 'QCTF{YY2gs2YY21}'), ('mGieerNpQ73yldls72ax', 'QCTF{RR1Nk1RR1n}'), ('wn9s1FPXeKSnrw1lH7pf', 'QCTF{yyOr0OyyOG}'), ('32WKePKnQ71jrwLXdJe7', 'QCTF{OOTZUTOOTy}'), ('Fa279g56C4mgsUxpkRVZ', 'QCTF{SSCXFCSSC0}'), ('37vNwFNkZKaFrNlUPbt0', 'QCTF{WWeZgeWWem}'), ('Z2qrPIq1YCVX0GgserOR', 'QCTF{99SyFS99SA}'), ('wQMQVZ5hToSqn0VT4ZiG', 'QCTF{ddK5TKddKc}'), ('X8QwUAs24dfW6SYtZsfM', 'QCTF{qqbwAbqqb3}'), ('EykYrEjbmDby9ONW97fM', 'QCTF{nno9Lonnow}'), ('OW2vcRyjE3qgdskOhsGX', 'QCTF{GGFEUFGGFp}'), ('jnAJSAcfU1x8mZALmFfM', 'QCTF{11nkUn11nP}'), ('nxl3cqP9ufzilY70r2gX', 'QCTF{ZZF2pFZZF5}'), ('dNhYXsaemRlkShWPJGQ1', 'QCTF{ssfdbfssf4}'), ('RI7Y2Vkm15f575Do91OG', 'QCTF{ttebGetteH}'), ('oBr6O339GMTRVK3AVGDh', 'QCTF{II57e5II5q}'), ('6gUAXUt2dJteazkZ2N49', 'QCTF{qqjtLjqqjy}'), ('yQGCX1eIdgMmzRTNmjBX', 'QCTF{22LquL22LN}'), ('8DJeFPP39m25k5imBmTi', 'QCTF{VVThtTVVTB}'), ('7oourXaDDFPcxMPIie1k', 'QCTF{NNx0dxNNxU}'), ('0PtPI3fw5VfqOghsg3XD', 'QCTF{00MwXM00MW}'), ('6VvvfvAtlzTiHfOQQ901', 'QCTF{33lu0l33l5}'), ('AUsNfX2whskVl7RMnsoa', 'QCTF{JJdiudJJdR}'), ('KFaitHo5r5SitXGaJsvl', 'QCTF{88FOcF88FA}'), ('GgccjsAizZLquYtDCGrR', 'QCTF{wwmU9mwwm6}'), ('4t2k1hY8qnMhpMxh4YK1', 'QCTF{bb0U40bb0K}'), ('hghW6pbFfyhe880MW4QF', 'QCTF{QQFB3FQQFJ}'), ('Bur6DZYRCVrvh8pHRLAb', 'QCTF{hhoZXohho1}'), ('2A0247ADUdatoFeaMsaQ', 'QCTF{II9xt9II9o}'), ('CU7Zlt7Yu2PIdcJZKceD', 'QCTF{mm6TH6mm6J}'), ('zSDG4uKSELexSfzykhVk', 'QCTF{YYN6cNYYNr}'), ('GfL7FfEDBI2sXHX27SOf', 'QCTF{KKILkIKKIa}'), ('pMcHKPiAX5KwQa6vcG6p', 'QCTF{77JIYJ77Js}'), ('GtVHDHMhcR5iXXbouypk', 'QCTF{uutrytuutL}'), ('4Tud3wDLCuuQT3MuCv93', 'QCTF{nnR28RnnRG}'), ('j9tQHIqbxse0tdjSbelW', 'QCTF{XXLRiLXXLh}'), ('KnztarOW0Pl1Z9BUGASF', 'QCTF{22YTdY22YA}'), ('YkueEqn76zhDCewNXn60', 'QCTF{EEAY8AEEAm}'), ('rEd623G7M81dtIitPxjW', 'QCTF{YYDrODYYDf}'), ('RtRPoSkzPHnnHWuTgE6S', 'QCTF{XXM7GMXXMV}'), ('Drk3WEr10V52NRazy6NB', 'QCTF{BBHKUHBBHY}'), ('PZ8D18Dff7FYktLK8bPo', 'QCTF{ppXKlXppX4}'), ('ZMWaLjOoIfqkzH3UCqse', 'QCTF{xxZDEZxxZ0}'), ('1tfbx2LhtjYX6DMDHKmf', 'QCTF{WWzJhzWWzG}'), ('u9nX6wGDCVjt6nkaopsi', 'QCTF{JJgAKgJJgE}'), ('aPDtCJujjb6ld9lBKVD8', 'QCTF{BBIoOIBBIA}'), ('iHohNx8kk2jvg5t3sjUt', 'QCTF{PP1wU1PP18}'), ('h9YDgTj0KCcgoBf3GfNT', 'QCTF{mmbRQbmmbd}'), ('IyHGzIgw9km2oL5sm5gH', 'QCTF{VVHeoHVVHz}'), ('HJuU8IaOOdrTyYD38vBl', 'QCTF{iiEsSEiiEu}'), ('BL8ZwNdSwWaiWjBt0JQq', 'QCTF{JJ4Eo4JJ4L}'), ('OFMVJk8irVqPCJroUFA9', 'QCTF{iiSEvSiiSr}'), ('PbcgCeZvqTk4AjZ084zJ', 'QCTF{1157H51158}'), ('xmpGTv8SXwavetLAB8Po', 'QCTF{ssI7BIssIU}'), ('dNtBkLS30sn7wSvVDTFA', 'QCTF{kk9wv9kk9l}'), ('1n0U3LNp0VkR0DxhMEob', 'QCTF{ttRy5RttRf}'), ('Nti6euwkQfDAPJgT9G8x', 'QCTF{ffoKnoffoD}'), ('VKw4PnzmuVdWRAyN6OtX', 'QCTF{KKyJXyKKyC}'), ('CkeE9VSFNX20JGBC2DRR', 'QCTF{ggKdnKggKA}'), ('X2G54t3qeeIW4GGy86Iy', 'QCTF{77MFiM77MO}'), ('IBxgaqocX5yMoV59B1zf', 'QCTF{eeVsqVeeVJ}'), ('egvQH0nuRmRlbiE9aLMx', 'QCTF{kkTt7TkkTK}'), ('7CdGn2Nztd53DmrXKkGw', 'QCTF{llS5QSllSx}'), ('fImVFPaABU1aDuWExIgP', 'QCTF{66WONW66Wx}'), ('MVH3C1qQ1gM1rdgDPJS4', 'QCTF{BBaVpaBBaU}'), ('0KcK1AcuYD8nJyaP22m4', 'QCTF{zzXxFXzzXM}'), ('YWer8Ht4RA2KY8sqJ5LF', 'QCTF{JJUgoUJJUI}'), ('K9PQQj5PInfzmJUxUO3Y', 'QCTF{IIQe5QIIQz}'), ('oLxdsqCFqP4IF75f6Ozt', 'QCTF{EE1mo1EE1l}'), ('dybgZeH6nAgCsEPQjWwf', 'QCTF{bb1Js1bb1u}'), ('ORVhEGcX3sCpPmBFxBVq', 'QCTF{ppxmUxppxt}'), ('QVawaBcTN0qVJjGJEfST', 'QCTF{yyoXQoyyof}'), ('06EK3Tbggz7XWsLqrKpQ', 'QCTF{ff3BO3ff3C}'), ('2HQvRkFYImcRCliXJ1FI', 'QCTF{xxN4GNxxNn}'), ('w22FmqGfmScf37I1RWaH', 'QCTF{ZZidXiZZi2}'), ('NJPs7dkXZha5Kenoz0Cw', 'QCTF{zzG53GzzGw}'), ('h8qHGA8N9Zy4XbShO6JC', 'QCTF{JJfhPfJJfN}'), ('CYl5ravT8jzZS6kwhzRN', 'QCTF{NNmHJmNNmK}'), ('GfQQtyaPC9oF1fH4EUwM', 'QCTF{88pLMp88pz}'), ('epDSkY7rgGDUQR8BP6m2', 'QCTF{44fbnf44fB}'), ('PC3sFACvCn0sFoAuHm3U', 'QCTF{YYJtLJYYJ2}'), ('wTosHOsMfrw7cQgryx1c', 'QCTF{44MQjM44Mo}'), ('OVD3AoTkNs97O938ny3d', 'QCTF{EEVeKVEEVx}'), ('b3rCWlNzYHzhlLDd6FU4', 'QCTF{55GQgG55Gk}'), ('ZbW7H5R6x8xjcfLoqTWo', 'QCTF{44H9zH44Hn}'), ('98YxzbDqqbIv2uPq8EIe', 'QCTF{hhHNYHhhHo}'), ('yIquikXTqz5CdAvykIax', 'QCTF{GG4fr4GG4h}'), ('xCG5DTKx1KdFbYmC4xyz', 'QCTF{11h8Nh11hO}'), ('co8N9ZUI7NmUe6Fpwklp', 'QCTF{iivGYviiv8}'), ('EAxZYjAdbJgGSOzkYJd5', 'QCTF{SSfMufSSfx}'), ('Xasfh3cH4MYabvNoEiNA', 'QCTF{22grUg22gx}'), ('tZyGtoYcvZpa7v5cQn8e', 'QCTF{GGzCnzGGzk}'), ('YB4YmyZCNYwYcBvWcZXu', 'QCTF{OOHZRHOOHr}'), ('l9oXtcska0241JEY4Np6', 'QCTF{KKTaFTKKTV}'), ('YDWfSgOjbD4f1yv1voMG', 'QCTF{llqB0qllqI}'), ('9rEiyxsLtE1exuNxc7QS', 'QCTF{KKnkCnKKnJ}'), ('hqauap5Bj8C4Jyp0BRyD', 'QCTF{GGD8IDGGDr}'), ('AeiwCGipjzmyyDttx5XN', 'QCTF{QQV9HVQQVN}'), ('vAN4BhofQ6ZO4azFisBV', 'QCTF{qqKUCKqqKO}'), ('gS3k2gf3hSnlTvoGVfCn', 'QCTF{NNDYHDNNDU}'), ('10HZwo4itycx7jjru9Yc', 'QCTF{ppiCRippit}'), ('zKfLClwtpWzGCNNyFUNU', 'QCTF{33EVcE33EC}'), ('ZXYi8e546WYkwkfFhlMl', 'QCTF{qq3fU3qq3t}'), ('xFOSQSDNdZfoRXunSCLA', 'QCTF{BBDLKDBBDC}'), ('iwzhMBB5E8lue5Ef9nOp', 'QCTF{11KS2K11KM}'), ('ECwjM6e7KaP1XNwVzJcj', 'QCTF{oocYZcooc4}'), ('ZaAi3BnF74P4RjJQYTJb', 'QCTF{SSrZFrSSrl}'), ('3hW8DALZkItGbYYybbXR', 'QCTF{CCAKzACCAM}'), ('tnsNmeGBtxLLJCWnGTzV', 'QCTF{wwU14UwwUc}'), ('JdMthkyKpsZF1ldlovGa', 'QCTF{ggq1MqggqW}'), ('ZowTuEVTtKNpxBuMtokW', 'QCTF{kk0HR0kk0C}'), ('xFDkwLzYTFA2aNvQGrgO', 'QCTF{DDJHuJDDJr}'), ('KAwRQcSkz7BMdFtHIvl4', 'QCTF{hhb5jbhhbx}'), ('ZHQvHhFkhi2Ndrn1f0Tl', 'QCTF{JJ7Np7JJ7X}'), ('xYx3ohapC74AQbSWurUu', 'QCTF{bbyUvybbyu}'), ('I3qDQvP2EIN1hmkCfPFG', 'QCTF{33KMtK33Kl}'), ('PtN7Q1fSCXSgxxZwAjoO', 'QCTF{ddMheMddMn}'), ('wuPumHE4MwDcnn5iXAdH', 'QCTF{88ZKeZ88ZF}'), ('z6z0FRRp2GYo3rKcTY4x', 'QCTF{UUoGNoUUoZ}'), ('neeFA9AUdeF7uTP64vO4', 'QCTF{ttFbjFttF1}'), ('DCq1VwUhoB1FOlhubkga', 'QCTF{JJaOZaJJaf}'), ('zZHAJPNAAneYcKAwBUHu', 'QCTF{PPQrtQPPQB}'), ('R6BNwLBAMW9VbWK0yLMt', 'QCTF{zzBWCBzzBY}'), ('iwqd8e9AGx3Fo0cZfzMc', 'QCTF{MM7V67MM7t}'), ('Nk8XOaDEjYslYQ2Hy81Z', 'QCTF{DDxrexDDxO}'), ('J30yjXQ85lHeq4P6eASG', 'QCTF{MMHgCHMMHA}'), ('g4HcLSwRJuLuiTS66RMi', 'QCTF{YYWZEWYYWJ}'), ('F14fksku6bHL4Cnqhpht', 'QCTF{77MzVM77M1}'), ('InIMpPZu7KCUcIpS2n4n', 'QCTF{AAouzoAAo6}'), ('g68xW6UaaF5v0eZDr6Te', 'QCTF{JJuFHuJJuR}'), ('bE2fWLvBTJiiqopmWTC9', 'QCTF{992qY2992o}'), ('AHMV3t3D2zIIdFh8YJqB', 'QCTF{66mHwm66mA}'), ('YZ6DiGyrLpA0mmnOC7VE', 'QCTF{ffp0epffpm}'), ('72WhzqDEl3Pm0Uw3AcN9', 'QCTF{yydotdyydE}'), ('9CJso0wEXZ969xUTfmmP', 'QCTF{ffzYuzffzJ}'), ('1zJqcWLDucjmNKYjHxdN', 'QCTF{UUDkPDUUDQ}'), ('Vh0XzKUq8p0Wr6dweC6z', 'QCTF{JJSlRSJJSm}'), ('rhKnPMF4cOPOXtmAHbWu', 'QCTF{ww7Dy7ww7h}'), ('JS1gwwJ0agqPjcTXPJKi', 'QCTF{88FxGF88FP}'), ('MxU6rH4nAHPIfcqSh1NP', 'QCTF{ll2Ue2ll2t}'), ('e2js8REgKtPYjPnokt2U', 'QCTF{TTylZyTTy0}'), ('cz2gojIiSUKuPKMkOaUl', 'QCTF{DD5nE5DD5h}'), ('Th5x7cQJmR8TkysYGmkz', 'QCTF{KKeNbeKKe9}'), ('1lUKV79KVsbl3MjHRnoy', 'QCTF{rrqwxqrrqX}'), ('TG4Leo5hDNledzCleSU5', 'QCTF{22xXfx22x1}'), ('RvczmR0dADsbK2FWbHrr', 'QCTF{ccn0Fnccnw}'), ('ZGzMB6GKthgAPEtN9DVM', 'QCTF{hhEvFEhhEn}'), ('HWsVJ3oJDq57J9Mp5VbQ', 'QCTF{YY1kr1YY1o}'), ('QhVAamGv7O1zwBNLf1WN', 'QCTF{44SC3S44Sy}'), ('ykCyUa6LgbGOHjaJBHP6', 'QCTF{ll3uj3ll3V}'), ('TgiHKlc96zZ1kVyiqXuF', 'QCTF{CCTwYTCCTj}'), ('uAnJsf0V6Vp9C9knfmyV', 'QCTF{22JTSJ22Jg}'), ('KVBhO5MlkoeFHZQ4TAer', 'QCTF{llEfTEllEj}'), ('U43nrYNgpdmWZpG4d1M0', 'QCTF{88LNwL88Ln}'), ('vGO9hJ9byYAmg2oTIsMt', 'QCTF{446Np6446t}'), ('F42xRqe22m5MktYrGCmz', 'QCTF{yyE9bEyyE3}'), ('Nmhu6YEx7HgR6EL88g0i', 'QCTF{99JCxJ99Jo}'), ('2T6Lkx0Mg2o6Cl5Wwtl6', 'QCTF{uuIdNIuuIS}'), ('GoeUihZHvNLIfq7sGvH1', 'QCTF{CCoz7oCCoP}'), ('VMmkxF0pE5uitg5y8Qhj', 'QCTF{AAzH2zAAze}'), ('OtQjW4DlRjwzAVPRbePe', 'QCTF{GGcBscGGcT}'), ('v3EQkbvN24CujT3I6k4R', 'QCTF{WW3TR3WW3Y}'), ('T0bvI96MH68ys3hWmD0V', 'QCTF{PPwI9wPPwY}'), ('ToZoVUv8e1k2CVzWetEt', 'QCTF{ssNuGNssNW}'), ('71FmXzn3l6H77Iry6Ca8', 'QCTF{vvwdDwvvwc}'), ('OR6kSMEJEWaLib7pZ2SF', 'QCTF{yyUv9UyyUb}'), ('AprANYeEUiQrbuXhQENv', 'QCTF{TTdhfdTTdB}'), ('yzQPupRavyO7imClWQSt', 'QCTF{kk4Y14kk4y}'), ('2KXBxpzaarGgEy07X4XW', 'QCTF{XXSxDSXXSJ}'), ('VpBueWyedd9pnE5qdzWv', 'QCTF{66tTYt66tq}'), ('1gihk5CsdmzNTrqW0QSL', 'QCTF{KKPOTPKKPx}'), ('imyg9epvKLfBCk0GCxQR', 'QCTF{BBpHLpBBpi}'), ('z2CmTWZsLVsZ8GAhZyHP', 'QCTF{NN8D58NN8S}'), ('0r9KVFzYUhvArgZYnOTj', 'QCTF{JJryNrJJrs}'), ('OyASXax1ckrzLgKE69y5', 'QCTF{uuvLdvuuvr}'), ('LMwADrVRKeVVWvcQU90c', 'QCTF{44ZA8Z44Z2}'), ('rcRpnlwiHRqDWMLnjZgp', 'QCTF{BBlDMlBBlP}'), ('N9yFHtd0yLGCUxKvzYCK', 'QCTF{ffA7dAffAS}'), ('yeuijpFeUi3PGFJT1OJf', 'QCTF{OOiG1iOOiy}'), ('n4mqxVxQHa5OouywTqpl', 'QCTF{ssEm2EssEO}'), ('w9xAKvwPl4FZYPufn1Jf', 'QCTF{iifFZfiifa}'), ('sMqa6DFNMrfhr0voojPg', 'QCTF{RRkWHkRRkh}'), ('OqHLlBWyL6gf4QYFw8B9', 'QCTF{LLDIlDLLDv}'), ('Wx9m5wlE7qid0CVaXc20', 'QCTF{ccxWTxccxy}'), ('zfaLcNdGHHYcLY0Osmz4', 'QCTF{ssYSCYssYK}'), ('Xe7gK4Sg7xpnRGOGKZ64', 'QCTF{VVheAhVVhB}'), ('PuFX29hvf3iF5ez8aiKB', 'QCTF{WWRHdRWWR4}'), ('eEZa6CLzfz8D8yktbEf2', 'QCTF{KKMvVMKKMr}'), ('sQ4jUqlYEsPcZHSVN0Xd', 'QCTF{lluasullud}'), ('bUCpstkoKoDKpjPtjiOK', 'QCTF{FFfdHfFFfy}'), ('WBfu6Ki89pAVrKRpVfrK', 'QCTF{WWHQNHWWHB}'), ('GuUIXMWvqkbDRexrBKb9', 'QCTF{PP6nk6PP6F}'), ('ho72ruumFtPvtwlMdcVZ', 'QCTF{jjl1Fljjle}'), ('mdWJd0CZ49niyUPbsXXM', 'QCTF{ddjFbjddj4}'), ('a7LpjGp4MJrny20Q0UaP', 'QCTF{bbiREibbi7}'), ('WkUe9U2Z0sz5OT593j31', 'QCTF{mmD5QDmmDa}'), ('stPw2IKS4upEceBUqEVX', 'QCTF{nnaUYanna2}'), ('Rxkgi2PZByAAXnAnndNm', 'QCTF{66P4vP66Pu}'), ('h2UFJHzEYcxXkP8azvWo', 'QCTF{nneFqenne9}'), ('w4rNwbP2s7iRgnspdqqZ', 'QCTF{PPBpqBPPB4}'), ('Cwj4xqfR43j6TjTKAcPw', 'QCTF{TTQryQTTQV}'), ('ADXU2F10mt7YEDrS4obP', 'QCTF{XXd2xdXXd0}'), ('nyYpVJqemZJv9CJOca9d', 'QCTF{99FEqF99Fy}'), ('UW43qG3bOaCPWCWTXXq7', 'QCTF{ppMmYMppMV}'), ('quPeJTPqBJT0K2Elg5U3', 'QCTF{mm75o7mm7Z}'), ('qEXVNfekuXbiE56p50In', 'QCTF{wwWbnWwwWr}'), ('1VcBLrSuVuq6X6RRW1gJ', 'QCTF{zzCDnCzzC8}'), ('oK5cWKAPTc2iuam5F4lr', 'QCTF{AAp4kpAApI}'), ('ZpLOnUveQsvo962k5vkB', 'QCTF{ccD6pDccDY}'), ('1dojRWHNdUZeyTjiRsCz', 'QCTF{UUod9oUUol}'), ('b4PHUfYWATQDF0nq5Ovy', 'QCTF{iiaPOaiiaY}'), ('dxFRzgianTqCcq1F4j12', 'QCTF{WWGvLGWWG0}'), ('9jAhlcsbl6EzkJ7CGusJ', 'QCTF{ppjoyjppjq}'), ('V58JhD42eHeGyoa0GD8Q', 'QCTF{tt8ZR8tt8O}'), ('SxEPogXdnZBkKorKA1wR', 'QCTF{UU6Nj6UU61}'), ('POSeQHMio4OhMpgfueRC', 'QCTF{aa2iW2aa2I}'), ('AcQz1Mpmrc08omSCkGub', 'QCTF{IIX8hXIIXO}'), ('7mQgFUMVPbQoLj4UbbDr', 'QCTF{WWkUFkWWk1}'), ('MaLgpgp4K5tUP1VHCDOC', 'QCTF{44STyS44SF}'), ('V5mHFNIypqiD73Ey0Ok4', 'QCTF{WW1Fx1WW1R}'), ('yQF375fx4LmNN60QmNTa', 'QCTF{ggt5btggtS}'), ('37d1TjcoGAYgdizxqlli', 'QCTF{NNDq7DNNDP}'), ('hfrTUHDweHQCnWcF80y5', 'QCTF{NNPAWPNNPO}'), ('Ccnh9o0HaE6FrseRVU4M', 'QCTF{II6Xm6II6Q}'), ('GGNoMNXd6Nh3htZJVdR5', 'QCTF{zzTV1TzzT4}'), ('sIG1feaidXJEI75MXmmi', 'QCTF{55aXFa55aq}'), ('am22eg0fMBdSVHb4N42I', 'QCTF{ddgbwgddgv}'), ('16zkOhVEDxmJQCX88eYa', 'QCTF{IIGcHGIIG3}'), ('hKqY8sHxEnOzE8gzVUqu', 'QCTF{xxuVGuxxu4}'), ('df8Z1JpoGOVyGHjDg5rQ', 'QCTF{JJtT9tJJty}'), ('CZi6aHssXKHFcXkEGAWZ', 'QCTF{SSR3MRSSRi}'), ('cr2nkPHfAd1RoFjWYdQH', 'QCTF{66DMxD66D8}'), ('MD4JZXnXRjMYuTPBIXNd', 'QCTF{RRFlxFRRFa}'), ('ItAUbHO1o5myaakZycyA', 'QCTF{ggFEJFggFK}'), ('HArEXFpJoJbZVV5pGICD', 'QCTF{QQESPEQQEh}'), ('rBz5C2l0mhIpss72W0xW', 'QCTF{SSBlzBSSBf}'), ('SDkqKB1ATpUzSQdp7C9q', 'QCTF{ffBlhBffBi}'), ('z4Q9uTd3h0EpdDs80gzu', 'QCTF{ggRj9RggRW}'), ('wyqO8s07ECfRr3T5vvio', 'QCTF{nnBDpBnnBX}'), ('tSi5NS9dtcx6ppafNlgW', 'QCTF{vvoQ3ovvoA}'), ('QhsIgccL1GFLOac0NyJl', 'QCTF{nnfZUfnnfm}'), ('ZGVBLDutda5U2HGEXzCC', 'QCTF{22HD4H22H1}'), ('is8t1YUaSokoNpm9DEYP', 'QCTF{oo72u7oo7y}'), ('VZAqVnWqae9UDQhQT3N0', 'QCTF{LL4lB4LL4M}'), ('xI7bsw4gh8gYPjO9rhO6', 'QCTF{AACGxCAACB}'), ('ksl7ZAro3FxsbK5RYtbV', 'QCTF{nnMvSMnnME}'), ('N4jxyROQ6lbvfvMGM7PR', 'QCTF{PPX7yXPPX2}'), ('NoEqR5wU1SYmWYafJMP9', 'QCTF{66wy3w66wa}'), ('69hhuFhtkYpM5bSA2As4', 'QCTF{33EONE33E4}'), ('rLTsIVbFROhv8JCEwwj1', 'QCTF{eeXLMXeeXH}'), ('s4KBjh5IBvgvZSe7xQv0', 'QCTF{hhJGVJhhJX}'), ('FQmxQjXRjpo0YGUJDFFF', 'QCTF{EESzCSEES0}'), ('eKylRIq57ZJPESNCxxob', 'QCTF{MMrtWrMMrQ}'), ('tmKS1LlLP8XUOWU3dcyJ', 'QCTF{00stws00si}'), ('2aDSvuWnOiku7A3K6J9W', 'QCTF{VV4SK4VV41}'), ('GXQ8DoxpKRohN28Z2h9m', 'QCTF{xx6cd6xx6h}'), ('gRQ7Lv86qypeiA2ylk1w', 'QCTF{vv9oO9vv92}'), ('cNGuEZ565Adpxbk1fL8S', 'QCTF{uudYFduudl}'), ('ekxMLzRU9f89kgjyOpah', 'QCTF{uuiofiuui8}'), ('esGLqL55xb1bJEVegLFq', 'QCTF{RR4qE4RR4B}'), ('iXvWFWIerziJzC5sN6io', 'QCTF{nnQmOQnnQZ}'), ('KQdo7hMTJu3u7m303N4n', 'QCTF{ZZdySdZZdc}'), ('2MaWqsyr2Hk1i0dAnKds', 'QCTF{ssQafQssQW}'), ('Y2vEG4VjVShN6imYI5qa', 'QCTF{ww4H74ww4m}'), ('dhGbFA9r4SnfN5HDtQO5', 'QCTF{oo5RC5oo52}'), ('aduJRSDHm5XhRfQ3gHsx', 'QCTF{FFlDqlFFlJ}'), ('1jnfPplor6PVBR3cNI7a', 'QCTF{00GioG00Gg}'), ('UWNfp0frrqnPt8IiBjUt', 'QCTF{PPcg4cPPci}'), ('a2I8jI2ZnqH24zYdnS1X', 'QCTF{DDnrQnDDn8}'), ('e2oUSWyTDuvBhkAC6waN', 'QCTF{44awka44ac}'), ('vi5o4iu0YKGOsF3q4sOs', 'QCTF{ZZDs9DZZDT}'), ('6cy12ZnsurfNvxuFJD8b', 'QCTF{66fLNf66fU}'), ('J5vRP17xac9h2Le4aZsK', 'QCTF{BB2UN2BB28}'), ('0SXNr4NCkL864Au2rSfK', 'QCTF{ddxZzxddx2}'), ('nPN7LJSIIlCh1mX85lUe', 'QCTF{665Wl5665Y}'), ('DvoLtk7kt0pRTxT50GJm', 'QCTF{PPjJfjPPjd}'), ('SVP72tfCGaZqq6a4Db9Y', 'QCTF{ttmKPmttmB}'), ('XNTV33aENXtTHGjBQoq0', 'QCTF{88IpxI88IQ}'), ('rR36Xp1qbw4dS9bh8TYK', 'QCTF{II8Zs8II8U}'), ('c9A2hczO4eqmygshxvID', 'QCTF{33wnTw33wD}'), ('QqXrQqKjFGCCfn8xLpHc', 'QCTF{jjcYDcjjcJ}'), ('XIKnNVZpc7I6YaZSelkp', 'QCTF{CCH3eHCCHg}'), ('L04C7YvFFdI9GfMfVnPR', 'QCTF{HHlaSlHHl3}'), ('M26kVOoMShFdtdnVhdPe', 'QCTF{115sy51150}'), ('VqsHa5K1GuKzJUS2wsy0', 'QCTF{qqbE9bqqbD}'), ('1MKjzQM47LWSOlJkgWbg', 'QCTF{nnGiJGnnGc}'), ('pTWlaw54UdKJkZsdr0CF', 'QCTF{wwIC3IwwIs}'), ('6FtCraXyCCWHelfRVnTJ', 'QCTF{llHo4HllH7}'), ('HSqlfaM7yqIoYbY0V2zJ', 'QCTF{OONlJNOONc}'), ('3ESWvBuiwagLbbZSGuJS', 'QCTF{99UCaU99Ut}'), ('Priaw3PI5sMeS7gJT5QS', 'QCTF{ppl9jlpplz}'), ('yW3rpfDMlLLrOrsX7aTO', 'QCTF{ddpIApddpB}'), ('VfTvfa6ebLtzurPqlFEg', 'QCTF{YYGrxGYYGQ}'), ('U35eqwishC4i5T3kAq9I', 'QCTF{vvdajdvvdx}'), ('tlbgNzoDpDePM16BKgbS', 'QCTF{ppgoDgppge}'), ('MdvA1BD0a53q6QF6Gpoy', 'QCTF{uutSJtuutk}'), ('0lHDFLpJwvNQE6fhJ3Iu', 'QCTF{xxbifbxxbu}'), ('9ucDWaVWX7JkzQnbJJ5h', 'QCTF{99OJeO99O3}'), ('46LIcTvWaqhpnl0VdtLu', 'QCTF{CCeEfeCCec}'), ('FgN796aOy0FQrm0tSsVZ', 'QCTF{KK6nZ6KK6h}'), ('MengcruInEUkEVBKajCj', 'QCTF{NNaIXaNNal}'), ('tOa8L8lWqqMO4pFVwthc', 'QCTF{ffbNubffbX}'), ('aZ2n9FV55pJRgE51eAiV', 'QCTF{nniD2inniA}'), ('HJW37sTE3TtzejHYCmd3', 'QCTF{vvChmCvvCM}'), ('vS8V0NfEMvwQ6WmQzt0O', 'QCTF{kk1YT1kk1i}'), ('Pwh4O1VOK4afMuvp7K3j', 'QCTF{ttfzXfttfq}'), ('3LFMMCjyQ4yM40jiRGm9', 'QCTF{WWIDoIWWIU}'), ('Hef1gouDTkdHRKDCyc17', 'QCTF{LLmcnmLLmP}'), ('zOcCtQrxRU5CDRqYo5wh', 'QCTF{ooi4Xioois}'), ('owdw3HqTgNB9T4tH1LSq', 'QCTF{MM4ZB4MM4b}'), ('WZzIqk9eBtJmaFpGiysG', 'QCTF{ll2BM2ll2j}'), ('hylBp9H4URKgP0vnKYKs', 'QCTF{88vFGv88vQ}'), ('UjxtygPh1zkapAcO6S1b', 'QCTF{ttXzaXttXL}'), ('UcboZZOT4RNokNBYa5vD', 'QCTF{CCF8AFCCF7}'), ('3qkqdNdy2akthmBeID4b', 'QCTF{ddhuAhddhz}'), ('2PSuI0P0IArlPCqLD1W2', 'QCTF{cc0Dy0cc0M}'), ('1lQJnvfFTxPE80DfmgYf', 'QCTF{kkUmzUkkUJ}'), ('pjTIzIGEhzfCEksCbIfH', 'QCTF{CCUgHUCCUV}'), ('1vFjpGczI11VqpKXtSSA', 'QCTF{bb2em2bb2k}'), ('l8zTq3IhXq2WNt5ZPTrO', 'QCTF{mmZDaZmmZ1}'), ('FZfzTgd5nfWGs95Cfohh', 'QCTF{rr5cz5rr51}'), ('tfOBvlJNwdXO7zcEhBSl', 'QCTF{AA3Ld3AA3k}'), ('6EsMRqwXphvDdNwEXxsv', 'QCTF{NNubGuNNux}'), ('nN0rUagP93FrGieaxyd4', 'QCTF{33RatR33RG}'), ('o1We9HGRompjkyoh6AnA', 'QCTF{UUpLmpUUpX}'), ('eoHRbuQIS5MiuJzP6YzW', 'QCTF{ddFMlFddFi}'), ('TBnhJQx9Ts4YP7bpT7HO', 'QCTF{xxwAtwxxwU}'), ('58NbfywZKcCzcOCcQYEM', 'QCTF{rr5nO5rr58}'), ('KevFCrBPgJKrql0y4rZs', 'QCTF{hhHQxHhhHG}'), ('Lceo5uJf6tZ55nEFr7YR', 'QCTF{11jlAj11jS}'), ('WCR6NqO3rSfrgfg5QE3H', 'QCTF{tteanettes}'), ('tKFo5M2MzCVymQDwKBQq', 'QCTF{yy64p6yy6C}'), ('xwArBVrvcs3mxidNyPn5', 'QCTF{bbH92HbbHM}'), ('0kjC2ZddAoAvgBVAd3NP', 'QCTF{AANuONAANY}'), ('2BiTIK0vGiAvf751d4XD', 'QCTF{99QD5Q99QC}'), ('lAo51enr4qUoVnk15Nd0', 'QCTF{55yKiy55yA}'), ('oIGDABbkAEOOfizxKv3i', 'QCTF{113BM3113I}'), ('b3Vwusn2OqYYz3KVlBZe', 'QCTF{XXgGZgXXgy}'), ('9np6p85yNRye7J6Gozbr', 'QCTF{LLi1uiLLi4}'), ('UY9JJnSkmyxFvt9OetSJ', 'QCTF{jjqy2qjjqs}'), ('mouAlP3WBjIiPxwcyEyI', 'QCTF{XXj6ZjXXj8}'), ('Rp3aWgZaj8sbv941wUNB', 'QCTF{ggFxJFggFf}'), ('dR8ZLckwGmm9J99v0o96', 'QCTF{mmH4OHmmHB}'), ('SEOZNw15uQOApgueqYX0', 'QCTF{WWtYdtWWth}'), ('yy3THBbtleMoWwCwAnVL', 'QCTF{BBsDysBBs7}'), ('RTTWvhA4s6t5TgY2eiEN', 'QCTF{iig67giigq}'), ('6p10UrZoQ30i7yXwXQN4', 'QCTF{qqPVxPqqPE}'), ('fwRUIlF4nee9U7ozl1xN', 'QCTF{00bKXb00bS}'), ('1HMOjtdH4arWxQAfsLqV', 'QCTF{22McYM22Mb}'), ('VhqcUVwZdNrJXxWGOp0w', 'QCTF{rrGKCGrrGj}'), ('UL4i6auicRptM7XSZcbi', 'QCTF{wwZCcZwwZ4}'), ('Sc7QngwzI6UAv3j5L0fS', 'QCTF{DDaWEaDDaY}'), ('QzVb85BZGI8UCUHIXPZT', 'QCTF{cc1Bd1cc1v}'), ('W6b5sZL9binj5RmteWQd', 'QCTF{PP1Jk1PP1O}'), ('VOKvKMpz5tYFpsKZyirP', 'QCTF{UUlJVlUUlY}'), ('Zuv0hxRdP2X2NzcRoNbu', 'QCTF{NNv2EvNNvT}'), ('QRc6T39DRn3UMma63L39', 'QCTF{nnmd6mnnm1}'), ('dXM0mgZke7m20dhj8ebM', 'QCTF{ooXpiXooXf}'), ('gaJzlD0SGS44dO0DfDss', 'QCTF{YYw9ewYYwO}'), ('SSLZRhMejHbbtxku7pWP', 'QCTF{QQcFLcQQcW}'), ('JevgZz3i2TvBCIrY4hME', 'QCTF{XXIeaIXXIs}'), ('I0gfqVSZkT5mUhw8R72k', 'QCTF{eeAI3AeeAG}'), ('Vj4yZs9txlkrC8CW28H9', 'QCTF{ssD80DssDe}'), ('hZ24sgRUAR4JNPvUr2Ka', 'QCTF{ssWJyWssWb}'), ('XIgEB2n8Ym44J1bdvSky', 'QCTF{OO0lq0OO0a}'), ('CrzmgKMV7TcAZvnX2LF2', 'QCTF{vvnQInvvnt}'), ('mcUoJjF0xdkp6hBWuJia', 'QCTF{BBhgqhBBhj}')] | 1,384.117647 | 23,012 | 0.739099 |
cc5e302b02a97e67828a50dc4996dda870ae6919 | 8,952 | py | Python | components/isceobj/Util/ImageUtil/DemImageLib.py | vincentschut/isce2 | 1557a05b7b6a3e65abcfc32f89c982ccc9b65e3c | [
"ECL-2.0",
"Apache-2.0"
] | 1,133 | 2022-01-07T21:24:57.000Z | 2022-01-07T21:33:08.000Z | components/isceobj/Util/ImageUtil/DemImageLib.py | vincentschut/isce2 | 1557a05b7b6a3e65abcfc32f89c982ccc9b65e3c | [
"ECL-2.0",
"Apache-2.0"
] | 276 | 2019-02-10T07:18:28.000Z | 2022-03-31T21:45:55.000Z | components/isceobj/Util/ImageUtil/DemImageLib.py | vincentschut/isce2 | 1557a05b7b6a3e65abcfc32f89c982ccc9b65e3c | [
"ECL-2.0",
"Apache-2.0"
] | 235 | 2019-02-10T05:00:53.000Z | 2022-03-18T07:37:24.000Z | #
# Author: Eric Gurrola
# Date: 2016
#
import os
from isceobj import createDemImage
from iscesys.DataManager import createManager
def createDem(usnwe, info, insar, demStitcher, useHighResOnly=False, useZeroTiles=False):
"""
Create a dem object given a user specified snwe lat, lon bounding box (usnwe),
a frame information object (info),
an insar container object (insar),
a configured demStitcher object,
an option useHighResOnly (default False) to accept only high resolution dem with zero fill
and option useZeroTiles (default False) to proceed with zero filled dem tiles if unavailable
The insar object contains a configured demStitcher,
"""
#get the south, north latitude and west, east longitude extremes (snwe) from the frame
#information with additional padding of 0.2 degrees in each direction
snwe = info.getExtremes(0.2)
#take the larger bounding region from these frame snwe values and the user's specified usnwe,
#if given
if usnwe:
op1 = (min, max)
snwe = [op1[i%2](usnwe[i], snwe[i]) for i in range(4)]
#round outwards (relative to bounding box) to nearest integer latitude, longitudes
import math
op2 = (math.floor, math.ceil)
snwe = [op2[i%2](snwe[i]) for i in range(4)]
#Record the user's (or default) preference for using zeroed out tiles when the DEM is not
#available (should really be done before coming here)
demStitcher.proceedIfZeroDem = useZeroTiles
#get the name of the wgs84 dem and its metadata file
demName = demStitcher.defaultName(snwe)
demNameXml = demName + '.xml'
wgs84demName = demName + '.wgs84'
wgs84demNameXml = wgs84demName + '.xml'
#save the name just in case
insar.demInitFile = wgs84demNameXml
#check to see if the demStitcher has a valid DEM image instance we can use
demImage = demStitcher.image
if demImage:
#get the wgs84 version
wgs84dem = get_wgs84dem(demStitcher, demImage)
insar.demImage = wgs84dem
return
#If not, check if there is already one in the local directory to load from
#wgs84 version?
if os.path.isfile(wgs84demNameXml):
wgs84demImage = createDemImage()
wgs84demImage.load(wgs84demNameXml)
insar.demImage = wgs84demImage
return
#or create one from the non-wgs84 version
if os.path.isfile(demNameXml):
demImage = createDemImage()
demImage.load(demNameXml)
wgs84demImage = get_wgs84dem(demStitcher, demImage)
insar.demImage = wgs84demImage
return
#or in the DEMDB directory
#the wgs84dem
if "DEMDB" in os.environ and os.path.isfile(os.path.join(os.environ["DEMDB"], wgs84demNameXml)):
dbwgs84dem = os.path.join(os.environ["DEMDB"], wgs84demNameXml)
wgs84demImage = createDemImage()
wgs84demImage.load(dbwgs84dem)
insar.demImage = wgs84demImage
return
#or from the non-wgs84 version
if "DEMDB" in os.environ and os.path.isfile(os.path.join(os.environ["DEMDB"], demNameXml)):
dbdem = os.path.join(os.environ["DEMDB"], demNameXml)
demImage = createDemImage()
demImage.load(dbdem)
wgs84demImage = get_wgs84dem(demStitcher, demImage)
insar.demImage = wgs84demImage
return
#or finally, have the demStitcher download and stitch a new one.
#stitch
if useHighResOnly:
#use the high res DEM. Fill the missing tiles
demStitcher.noFilling = False
stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4])
else:
#try to use the demStitcher (high resolution DEM by default)
#and do not allow missing tiles
demStitcher.noFilling = True
stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4])
#check if high resolution stitching was not OK
if not stitchOk:
#then maybe try the lower resolution DEM
newDemStitcher = createManager('dem3')
#and do not allow missing tiles
newDemStitcher.noFilling = True
#set the preference for proceeding if the server does not return a tile
newDemStitcher.proceedIfNoServer = useZeroTiles
#try again only if it's not already a low res instance
if type(newDemStitcher) != type(demStitcher):
stitchOk = newDemStitcher.stitch(snwe[0:2], snwe[2:4])
if stitchOk:
#if low res was ok change the stitcher to dem3
demStitcher = newDemStitcher
#if cannot form a full dem with either high and low res
#then use what ever with have with high res
if not stitchOk:
demStitcher.noFilling = False
stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4])
#check if stitching worked
if stitchOk:
#get the image
demImage = demStitcher.image
#set the metadatalocation and _extraFilename attributes
demImage.metadatalocation = demImage.filename + ".xml"
demImage._extraFilename = demImage.metadatalocation.replace(".xml", ".vrt")
#get the wgs84 dem
wgs84demImage = get_wgs84dem(demStitcher, demImage)
#if there is a global store, move the dem files to it
if "DEMDB" in os.environ and os.path.exists(os.environ["DEMDB"]):
#modify filename in the meta data to include
#path to the global store
#the demImage
demImage.filename = os.path.join(os.environ["DEMDB"],
demImage.filename)
demImage.metadatalocation = os.path.join(os.environ["DEMDB"],
demImage.metadatalocation)
demImage._extraFilename = os.path.join(os.environ["DEMDB"],
demImage._extraFilename)
demImage.dump(demNameXml)
#the wgs84demImage
wgs84demImage.load(wgs84demNameXml)
wgs84demImage.filename = os.path.join(os.environ["DEMDB"],
wgs84demImage.filename)
wgs84demImage.metadatalocation = os.path.join(os.environ["DEMDB"],
wgs84demImage.metadatalocation)
wgs84demImage._extraFilename = os.path.join(os.environ["DEMDB"],
wgs84demImage._extraFilename)
wgs84demImage.dump(wgs84demNameXml)
#remove the demLat*.vrt file from the local directory because
#a side effect of the demImage.dump() above was to create the
#vrt in the location indicated by the path in the xml file.
os.remove(demNameXml.replace('.xml','.vrt'))
os.remove(wgs84demNameXml.replace('.xml','.vrt'))
#move the demfiles to the global store
#make list of dem file names to be moved to the global store
import glob
dwlist = glob.glob(demName+"*")
import shutil
#move the dem files to the global store
for dwfile in dwlist:
shutil.move(dwfile,os.environ["DEMDB"])
#put the wgs84demImage in the InsarProc object
insar.demImage = wgs84demImage
#that's all
return
#exhausted all options; ask the user for help
else:
logger.error(
"Cannot form the DEM for the region of interest. "+
"If you have one, set the appropriate DEM "+
"component in the input file.")
raise Exception
return
def get_wgs84dem(demStitcher, demImage):
#check to see if demImage is actually an EGM96 referenced dem as expected
if demImage.reference.upper() == 'EGM96':
#look for wgs84 version of the dem with the expected name
wgs84demName = demImage.filename + ".wgs84"
wgs84demNameXml = wgs84demName + ".xml"
if os.path.isfile(wgs84demName) and os.path.isfile(wgs84demNameXml):
#create a DemImage instance
wgs84demImage = createDemImage()
#load its state
wgs84demImage.load(wgs84demNameXml)
else:
#correct the dem reference to the WGS84 ellipsoid
wgs84demImage = demStitcher.correct(demImage)
#set the metadatalocation
wgs84demImage.metadatalocation = wgs84demNameXml
#set the vrt filename (even though it is not yet created)
wgs84demImage._extraFilename = wgs84demImage.metadatalocation.replace('.xml', '.vrt')
#Check if the demImage is already referenced to WGS84
elif demImage.reference.upper() == 'WGS84':
wgs84demImage = demImage
#all expectations have been exhausted; give up
else:
wgs84demImage = None
logger.error(
"Cannot form the WGS84 DEM for the region of interest. "+
"If you have one, set the appropriate DEM "+
"component in the input file.")
raise Exception
#return the wgs84demImage
return wgs84demImage
#end-of-file
| 39.964286 | 100 | 0.64723 |
9ebd24f2666d8d23ba57c62d091a8533573ca5aa | 7,223 | py | Python | merge2.py | unis369/Mytubedownload | 73011e4f18165a43f6750a4ce375885ade5f64c6 | [
"MIT"
] | null | null | null | merge2.py | unis369/Mytubedownload | 73011e4f18165a43f6750a4ce375885ade5f64c6 | [
"MIT"
] | null | null | null | merge2.py | unis369/Mytubedownload | 73011e4f18165a43f6750a4ce375885ade5f64c6 | [
"MIT"
] | null | null | null | # standard library imports
import argparse
import os
import platform
import subprocess
# third party imports
import cursor
# local library imports
from pytube import YouTube
video_audio = 'video codec'
resolution = ''
abr = ''
fps = 0
args = {}
fileobj = {}
download_count = 1
def main():
global args
global video_audio
parser = argparse.ArgumentParser()
parser.add_argument('url', help='指定YouTube視訊網址')
parser.add_argument('-sd', action='store_true', help='選擇普通(480P)畫質')
parser.add_argument('-hd', action='store_true', help='選擇HD(720P)畫質')
parser.add_argument('-fhd', action='store_true', help='選擇Full HD(1080P)畫質')
parser.add_argument('-a', action='store_true', help='僅下載聲音')
args = parser.parse_args()
video_audio = 'video codec'
download_media(args) # 下載video。
def download_media(args):
global video_audio, resolution, abr, fps
print()
try:
yt = YouTube(args.url,
on_progress_callback=onProgress,
on_complete_callback=onComplete
)
except:
print('下載影片時發生錯誤,請確認網路連線和YouTube網址無誤。')
return
filter = yt.streams.filter
resolution_str = ''
if args.a: # 只下載聲音
target = filter(type='audio').first()
elif args.fhd:
resolution_str = 'Full HD(1080P)'
target = filter(type='video', resolution='1080p').first()
elif args.hd:
resolution_str = 'HD(720P)'
target = filter(type='video', resolution='720p').first()
elif args.sd:
resolution_str = 'SD(480P)'
target = filter(type='video', resolution='480p').first()
else:
# target = filter(type='video').first()
resolutions = ('1080p', '720p', '480p', '360p', '240p')
for r in resolutions:
target = filter(type='video', resolution=r).first()
if target is not None:
break
if target is None:
target = filter(type='video').first()
# print(target)
# exit(0)
if target is None:
# print()
print(f'沒有您指定的{resolution_str}解析度,可用的解析度如下:')
res_list = video_res(yt)
for i, res in enumerate(res_list):
# print('{}) {}'.format(i + 1, res))
print(f' {i+1}) {res}')
print()
# val = input('請選擇(預設{}):'.format(res_list[0]))
val = input(f'請選擇代碼(預設1):')
try:
res = res_list[int(val)-1]
except:
res = res_list[0]
# print()
# print('您選擇的是 {} 。'.format(res))
print(f'您選擇的解析度是 {res}。')
target = filter(type='video', resolution=res).first()
print()
# print(type(target))
# print(target)
# print(target.resolution)
# print(type(target.resolution))
# exit(0)
if args.a:
abr = target.abr
# bitrate = target.bitrate
# print('bitrate:', bitrate)
video_audio += '(' + abr + ')'
else:
resolution = target.resolution
fps = target.fps
video_audio += '(' + resolution + ')'
# targets = vars(target)
# for t in targets:
# print(t)
cursor.hide()
# 開始下載
target.download(output_path=pyTube_folder())
cursor.show()
# 檔案下載的回呼函式
def onProgress(stream, chunk, remains):
global video_audio
total = stream.filesize
percent = (total-remains) / total * 100
# print('下載中… {:05.2f}%'.format(percent), end='\r')
print(f'下載{video_audio}中...{percent:6.2f}%', end='\r')
# 檔案下載的回呼函式
def onComplete(stream, file_name):
global download_count, fileobj, video_audio
fileobj['name'] = os.path.basename(file_name)
fileobj['dir'] = os.path.dirname(file_name)
# print('\r')
print(file_name)
if download_count == 1:
if check_media(file_name) == -1:
# print('此影片沒有聲音。')
download_count += 1
try:
# 視訊檔重新命名
os.rename(file_name, os.path.join(
fileobj['dir'], 'temp_video.mp4'))
except:
print('視訊檔重新命名失敗。')
return
# print('準備下載聲音檔...')
vars(args)['a'] = True # 設定成a參數,表示只下載audio codec。
video_audio = 'audio codec'
# print()
# cursor.hide()
download_media(args) # 下載audio。
# cursor.show()
else:
print('此影片有聲音,下載完畢。')
else:
try:
# 聲音檔重新命名
os.rename(file_name, os.path.join(
fileobj['dir'], 'temp_audio.mp4'))
except:
print('聲音檔重新命名失敗。')
# 合併video/audio
merge_media(file_name)
# 檢查影片檔是否包含聲音
def check_media(file_name):
'''本函數呼叫的外部程式ffprobe偶爾會誤判,下載的codec中明明沒有audio,卻傳回非-1的值,
表示「有聲音」,所以主程式便不再下載audio codec。
由於ffprobe是外部程式,無法追蹤,乾脆改為不用ffprobe判斷,固定傳回-1,表示沒有聲音,
原download的codec不管有無audio,主程式一律再下載audio然後合併。這樣可保證有影有聲。
經測試,即使原來的codec有audio,再下載audio合併,也不會影響音效,頂多是花多點時間而已。
而且高解析度的video很少會有audio的,所以影響效能的機會很低。
'''
return -1
# r = subprocess.Popen(['ffprobe', file_name],
# stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# out, err = r.communicate()
# if (out.decode('utf-8').find('Audio') == -1):
# return -1 # 沒有聲音
# else:
# return 1
# 合併video/audio
def merge_media(file_name):
global resolution, abr, fps
print('video codec和audio codec合併中...')
print()
temp_video = os.path.join(fileobj['dir'], 'temp_video.mp4')
temp_audio = os.path.join(fileobj['dir'], 'temp_audio.mp4')
temp_output = os.path.join(fileobj['dir'], 'output.mp4')
cmd = f'ffmpeg -i {temp_video} -i {temp_audio} \
-map 0:v -map 1:a -c copy -y {temp_output}'
try:
subprocess.call(cmd, shell=True)
# 視訊檔重新命名
os.rename(temp_output, os.path.join(fileobj['dir'], fileobj['name']))
os.remove(temp_audio)
os.remove(temp_video)
print()
print()
print('video codec和audio codec合併完成。存放路徑和檔名:')
print(file_name)
spaces = ' ' * 7
print(f'\tresolution: {resolution} / fps: {fps}{spaces}abr: {abr}')
print()
except:
print()
print('video codec和audio codec合併失敗。')
finally:
print()
def pyTube_folder():
# sys = platform.system()
# home = os.path.expanduser('~')
#
# # print(sys)
# # exit()
# if sys == 'Windows':
# folder = os.path.join(home, 'Videos', 'PyTube')
# elif sys == 'Linux':
# folder = os.path.join(home, 'Movies', 'PyTube')
# print(type(folder))
# exit(0)
# elif sys == 'Darwin':
# folder = os.path.join(home, 'Movies', 'PyTube')
# if not os.path.isdir(folder): # 若'PyTube'資料夾不存在…
# os.mkdir(folder) # 則新增資料夾
# return folder
return os.getcwd()
def video_res(yt):
res_set = set()
# video_list = yt.streams.filter(type='video').all()
video_list = yt.streams.filter(type='video')
for v in video_list:
res_set.add(v.resolution)
return sorted(res_set, reverse=True, key=lambda s: int(s[:-1]))
if __name__ == '__main__':
main() | 27.568702 | 79 | 0.564447 |
90b032d8b7f3f36ba120c464a2c0d0735b0fd4ec | 3,252 | py | Python | create_opencv_homography_submission_example.py | hosang/ransac-tutorial-2020-data | 52810309d8341d538e24a13577c44ae2b4a5ec77 | [
"Apache-2.0"
] | null | null | null | create_opencv_homography_submission_example.py | hosang/ransac-tutorial-2020-data | 52810309d8341d538e24a13577c44ae2b4a5ec77 | [
"Apache-2.0"
] | null | null | null | create_opencv_homography_submission_example.py | hosang/ransac-tutorial-2020-data | 52810309d8341d538e24a13577c44ae2b4a5ec77 | [
"Apache-2.0"
] | null | null | null | # select the data
import numpy as np
import h5py
import cv2
from utils import *
from metrics import *
from tqdm import tqdm
def create_cv2_submission(split = 'val', inlier_th = 3.0, match_th = 0.85, n_iter=100000):
DIR = 'homography'
out_model = {}
for ds in ['EVD', 'HPatchesSeq']:
out_model[ds] = {}
matches = load_h5(f'{DIR}/{ds}/{split}/matches.h5')
matches_scores = load_h5(f'{DIR}/{ds}/{split}/match_conf.h5')
for k, m in tqdm(matches.items()):
ms = matches_scores[k].reshape(-1)
mask = ms <= match_th
tentatives = m[mask]
src_pts = tentatives[:,:2]
dst_pts = tentatives[:,2:]
H, mask_inl = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,
inlier_th, maxIters=n_iter, confidence=0.9999)
out_model[ds][k] = H
return out_model
def evaluate_results(results_dict, split='val'):
DIR = 'homography'
MAEs = {}
for ds in ['EVD', 'HPatchesSeq']:
Hgt_dict = load_h5(f'{DIR}/{ds}/{split}/Hgt.h5')
models = results_dict[ds]
MAEs[ds] = {}
for k, H_est in tqdm(models.items()):
H_gt = Hgt_dict[k]
img1, img2 = get_h_imgpair(k, ds, split)
MAE = get_visible_part_mean_absolute_reprojection_error(img1, img2, H_gt, H_est)
MAEs[ds][k] = MAE
return MAEs
def grid_search_hypers_opencv(INL_THs = [0.75, 1.0, 1.5, 2.0, 3.0, 4.0],
MATCH_THs = [0.75, 0.8, 0.85, 0.9, 0.95]):
res = {}
for inl_th in INL_THs:
for match_th in MATCH_THs:
key = f'{inl_th}_{match_th}'
print (f"inlier_th = {inl_th}, snn_ration = {match_th}")
cv2_results = create_cv2_submission(split = 'val',
inlier_th = inl_th,
match_th = match_th,
n_iter=50000)
MAEs = evaluate_results(cv2_results)
mAA = calc_mAA(MAEs)
final = (mAA['EVD'] + mAA['HPatchesSeq'])/2.0
print (f'Validation mAA = {final}')
res[key] = final
max_MAA = 0
inl_good = 0
match_good = 0
for k, v in res.items():
if max_MAA < v:
max_MAA = v
pars = k.split('_')
match_good = float(pars[1])
inl_good = float(pars[0])
return inl_good, match_good, max_MAA
if __name__ == '__main__':
# Search for the best hyperparameters on the validation set
print ("Searching hypers")
inl_good, match_good, max_MAA = grid_search_hypers_opencv()
print (f"The best hyperparameters for OpenCV H RANSAC are")
print (f"inlier_th = {inl_good}, snn_ration = {match_good}. Validation mAA = {max_MAA}")
print ("Creating submission")
cv2_test_submission = create_cv2_submission(split = 'test', inlier_th = inl_good, match_th = match_good,
n_iter=50000)
for ds_name, models in cv2_test_submission.items():
save_h5(models, f'homography_opencv_{ds_name}_submission.h5')
print (f"Saved to homography_opencv_{ds_name}_submission.h5")
| 39.180723 | 110 | 0.561501 |
3f6964479476d8bc01d036d660a8b135200e0679 | 661 | py | Python | tests/dist1/setup.py | tirkarthi/humpty | 8652cf7b18a09d1a1d73465afd38581ef4e2369e | [
"BSD-3-Clause"
] | 14 | 2015-09-05T20:20:50.000Z | 2021-04-08T08:53:20.000Z | tests/dist1/setup.py | tirkarthi/humpty | 8652cf7b18a09d1a1d73465afd38581ef4e2369e | [
"BSD-3-Clause"
] | 6 | 2017-05-12T20:46:40.000Z | 2020-02-08T05:05:03.000Z | tests/dist1/setup.py | tirkarthi/humpty | 8652cf7b18a09d1a1d73465afd38581ef4e2369e | [
"BSD-3-Clause"
] | 8 | 2017-02-13T15:38:53.000Z | 2020-11-11T20:16:58.000Z | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='dist1',
version='0.1',
description="A dummy distribution",
long_description=u"Long description.\n\nGruß.\n",
classifiers=[
"Topic :: Software Development :: Testing",
],
author='Jeff Dairiki',
author_email='dairiki@dairiki.org',
keywords='dummy testing',
py_modules=['dist1'],
install_requires=[],
scripts=[
'dist1_script',
],
entry_points={
'console_scripts': [
'dist1_wrapper = dist1:main',
],
},
extras_require={
'extras': ['extension_dist'],
},
)
| 23.607143 | 53 | 0.555219 |
d685bca8e6543952c82391a2c1141fb12252210f | 462 | py | Python | tests/test_swf/models/test_timeout.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 1 | 2021-03-06T22:01:41.000Z | 2021-03-06T22:01:41.000Z | tests/test_swf/models/test_timeout.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 1 | 2021-12-13T20:51:54.000Z | 2021-12-13T20:51:54.000Z | tests/test_swf/models/test_timeout.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 1 | 2017-10-19T00:53:28.000Z | 2017-10-19T00:53:28.000Z | from freezegun import freeze_time
from moto.swf.models import Timeout
from ..utils import make_workflow_execution
def test_timeout_creation():
wfe = make_workflow_execution()
# epoch 1420113600 == "2015-01-01 13:00:00"
timeout = Timeout(wfe, 1420117200, "START_TO_CLOSE")
with freeze_time("2015-01-01 12:00:00"):
timeout.reached.should.be.falsy
with freeze_time("2015-01-01 13:00:00"):
timeout.reached.should.be.truthy
| 24.315789 | 56 | 0.71645 |
e0c4cac8a8f82722bf27628a7d66131a0e820bed | 1,526 | py | Python | Model/initialize.py | Taoooo9/Cail_Text_similarity_esimtribert | 10b0314fdc3fcc60e39737ac563e8538b96ceb19 | [
"Apache-2.0"
] | 5 | 2019-12-10T06:49:13.000Z | 2022-03-01T15:01:09.000Z | Model/initialize.py | Taoooo9/Cail_Text_similarity_esimtribert | 10b0314fdc3fcc60e39737ac563e8538b96ceb19 | [
"Apache-2.0"
] | null | null | null | Model/initialize.py | Taoooo9/Cail_Text_similarity_esimtribert | 10b0314fdc3fcc60e39737ac563e8538b96ceb19 | [
"Apache-2.0"
] | 2 | 2019-12-23T06:46:31.000Z | 2021-03-22T11:05:52.000Z | import torch.nn as nn
import numpy as np
def init_embedding(input_embedding):
"""
初始化embedding层权重
"""
scope = np.sqrt(3.0 / input_embedding.weight.size(1))
nn.init.uniform_(input_embedding.weight, -scope, scope)
def init_lstm_weight(lstm, num_layer=1):
"""
初始化lstm权重
"""
for i in range(num_layer):
weight_h = getattr(lstm, 'weight_hh_l{0}'.format(i))
scope = np.sqrt(6.0 / (weight_h.size(0) / 4. + weight_h.size(1)))
nn.init.uniform_(getattr(lstm, 'weight_hh_l{0}'.format(i)), -scope, scope)
weight_i = getattr(lstm, 'weight_ih_l{0}'.format(i))
scope = np.sqrt(6.0 / (weight_i.size(0) / 4. + weight_i.size(1)))
nn.init.uniform_(getattr(lstm, 'weight_ih_l{0}'.format(i)), -scope, scope)
if lstm.bias:
for i in range(num_layer):
weight_i = getattr(lstm, 'bias_ih_l{0}'.format(i))
weight_i.data.zero_()
weight_i.data[lstm.hidden_size:2 * lstm.hidden_size] = 1
weight_h = getattr(lstm, 'bias_hh_l{0}'.format(i))
weight_h.data.zero_()
weight_h.data[lstm.hidden_size:2 * lstm.hidden_size] = 1
def init_linear(input_linear):
"""
初始化全连接层权重
"""
scope = np.sqrt(6.0 / (input_linear.weight.size(0) + input_linear.weight.size(1)))
nn.init.uniform_(input_linear.weight, -scope, scope)
if input_linear.bias is not None:
scope = np.sqrt(6.0 / (input_linear.bias.size(0) + 1))
input_linear.bias.data.uniform_(-scope, scope)
| 32.468085 | 86 | 0.621887 |
7d571f8a8ec6869005e87358fee52170db7b050a | 96 | py | Python | venv/lib/python3.8/site-packages/numpy/core/generate_numpy_api.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/numpy/core/generate_numpy_api.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/numpy/core/generate_numpy_api.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/91/1c/a9/f83043538eb547bf87a5df04206e96b5c1f06d71db1a312c0d9e710e38 | 96 | 96 | 0.895833 |
d27fae0ffacba3795d16b19541f7898b88f3491c | 3,854 | py | Python | fuzzers/afl/fuzzer.py | hsachinraj/fuzzbench | 9639821132b0bfb70bc7d025ad2964d26b363b25 | [
"Apache-2.0"
] | null | null | null | fuzzers/afl/fuzzer.py | hsachinraj/fuzzbench | 9639821132b0bfb70bc7d025ad2964d26b363b25 | [
"Apache-2.0"
] | null | null | null | fuzzers/afl/fuzzer.py | hsachinraj/fuzzbench | 9639821132b0bfb70bc7d025ad2964d26b363b25 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration code for AFL fuzzer."""
import shutil
import subprocess
import os
from fuzzers import utils
def prepare_build_environment():
"""Set environment variables used to build targets for AFL-based
fuzzers."""
cflags = ['-fsanitize-coverage=trace-pc-guard']
utils.append_flags('CFLAGS', cflags)
utils.append_flags('CXXFLAGS', cflags)
os.environ['CC'] = 'clang'
os.environ['CXX'] = 'clang++'
os.environ['FUZZER_LIB'] = '/libAFL.a'
def build():
"""Build benchmark."""
prepare_build_environment()
utils.build_benchmark()
print('[post_build] Copying afl-fuzz to $OUT directory')
# Copy out the afl-fuzz binary as a build artifact.
shutil.copy('/afl/afl-fuzz', os.environ['OUT'])
def prepare_fuzz_environment(input_corpus):
"""Prepare to fuzz with AFL or another AFL-based fuzzer."""
# Tell AFL to not use its terminal UI so we get usable logs.
os.environ['AFL_NO_UI'] = '1'
# Skip AFL's CPU frequency check (fails on Docker).
os.environ['AFL_SKIP_CPUFREQ'] = '1'
# No need to bind affinity to one core, Docker enforces 1 core usage.
os.environ['AFL_NO_AFFINITY'] = '1'
# AFL will abort on startup if the core pattern sends notifications to
# external programs. We don't care about this.
os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1'
# Don't exit when crashes are found. This can happen when corpus from
# OSS-Fuzz is used.
os.environ['AFL_SKIP_CRASHES'] = '1'
# AFL needs at least one non-empty seed to start.
utils.create_seed_file_for_empty_corpus(input_corpus)
def run_afl_fuzz(input_corpus,
output_corpus,
target_binary,
additional_flags=None,
hide_output=False):
"""Run afl-fuzz."""
# Spawn the afl fuzzing process.
# FIXME: Currently AFL will exit if it encounters a crashing input in seed
# corpus (usually timeouts). Add a way to skip/delete such inputs and
# re-run AFL.
print('[run_afl_fuzz] Running target with afl-fuzz')
command = [
'./afl-fuzz',
'-i',
input_corpus,
'-o',
output_corpus,
# Use deterministic mode as it does best when we don't have
# seeds which is often the case.
'-d',
# Use no memory limit as ASAN doesn't play nicely with one.
'-m',
'none',
'-t',
'1000+', # Use same default 1 sec timeout, but add '+' to skip hangs.
]
if additional_flags:
command.extend(additional_flags)
dictionary_path = utils.get_dictionary_path(target_binary)
if dictionary_path:
command.extend(['-x', dictionary_path])
command += [
'--',
target_binary,
# Pass INT_MAX to afl the maximize the number of persistent loops it
# performs.
'2147483647'
]
print('[run_afl_fuzz] Running command: ' + ' '.join(command))
output_stream = subprocess.DEVNULL if hide_output else None
subprocess.check_call(command, stdout=output_stream, stderr=output_stream)
def fuzz(input_corpus, output_corpus, target_binary):
"""Run afl-fuzz on target."""
prepare_fuzz_environment(input_corpus)
run_afl_fuzz(input_corpus, output_corpus, target_binary)
| 34.106195 | 78 | 0.672289 |
c3160832aa22fdfd7f48f3cddabdd019be2fc111 | 474 | py | Python | prototyping/auto-segmentation/mc_old_2/config_prostate_wdsc.py | ethanio12345/pymedphys | cb34c992de8d442eced3385018a194364060092d | [
"Apache-2.0"
] | 2 | 2020-02-04T03:21:20.000Z | 2020-04-11T14:17:53.000Z | prototyping/auto-segmentation/mc_old_2/config_prostate_wdsc.py | ethanio12345/pymedphys | cb34c992de8d442eced3385018a194364060092d | [
"Apache-2.0"
] | 6 | 2020-10-06T15:36:46.000Z | 2022-02-27T05:15:17.000Z | examples/protyping/pymedphys-segmentation/config_prostate_wdsc.py | cpbhatt/pymedphys | 177b3db8e2a6e83c44835d0007d1d5c7a420fd99 | [
"Apache-2.0"
] | 1 | 2020-12-20T14:14:00.000Z | 2020-12-20T14:14:00.000Z | import loss as loss
import tensorflow as tf
# DATA
DATA_PATH = "./dataset_prostate_cleaned/"
MODEL_SAVE = "./prostate_wdsc/wdsc"
BATCH_SIZE = 1
OUTPUT_CHANNELS = 3
# COMPILING MODEL
LOSS = loss.weighted_dsc_loss
INITIAL_LR = 1e-5
OPTIMIZER = tf.keras.optimizers.Adam(lr=INITIAL_LR)
METRICS = [loss.dice_metric, tf.keras.metrics.Precision(), tf.keras.metrics.Recall()]
INITIAL_WEIGHTS = None
# TRAINING MODEL
EPOCHS = 150
LR_SCALE = 0.5
LR_PATIENCE = 3
STOP_PATIENCE = 30
| 21.545455 | 85 | 0.767932 |
8c595cb2431bc8ce9021445aa563a5dd60c46c5f | 2,753 | py | Python | karbor-1.3.0/karbor/tests/unit/clients/test_glance_client.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 1 | 2021-05-23T01:48:25.000Z | 2021-05-23T01:48:25.000Z | karbor-1.3.0/karbor/tests/unit/clients/test_glance_client.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | karbor-1.3.0/karbor/tests/unit/clients/test_glance_client.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from karbor.context import RequestContext
from karbor.services.protection.clients import glance
from karbor.tests import base
class GlanceClientTest(base.TestCase):
def setUp(self):
super(GlanceClientTest, self).setUp()
self._public_url = 'http://127.0.0.1:9292'
service_catalog = [
{
'endpoints': [{'publicURL': self._public_url}],
'type': 'image',
'name': 'glance',
},
]
self._context = RequestContext(user_id='demo',
project_id='abcd',
auth_token='efgh',
service_catalog=service_catalog)
def test_create_client_by_endpoint(self):
cfg.CONF.set_default('glance_endpoint',
'http://127.0.0.1:9292',
'glance_client')
gc = glance.create(self._context, cfg.CONF)
self.assertEqual('http://127.0.0.1:9292', gc.http_client.endpoint)
def test_create_client_by_catalog(self):
gc = glance.create(self._context, cfg.CONF)
self.assertEqual('http://127.0.0.1:9292', gc.http_client.endpoint)
@mock.patch('karbor.services.protection.clients.utils.get_url')
@mock.patch('glanceclient.client.Client')
def test_create_client(self, create, get_url):
get_url.return_value = self._public_url
client_config = cfg.CONF[glance.CONFIG_GROUP]
client_version = glance.GLANCECLIENT_VERSION
session = object()
args = {
'endpoint': self._public_url,
'token': self._context.auth_token,
'cacert': client_config.glance_ca_cert_file,
'insecure': client_config.glance_auth_insecure,
}
glance.create(self._context, cfg.CONF)
create.assert_called_with(client_version, **args)
glance.create(self._context, cfg.CONF, session=session)
create.assert_called_with(client_version,
endpoint=self._public_url,
session=session)
| 37.712329 | 78 | 0.616782 |
4b58dc456b5bfc5639f9639d3a68557c0aefda6a | 1,752 | py | Python | coin/settings.py | bluppfisk/coindicator | 4e128d5a034d39bdbac057299b9bdbc72243986a | [
"MIT"
] | null | null | null | coin/settings.py | bluppfisk/coindicator | 4e128d5a034d39bdbac057299b9bdbc72243986a | [
"MIT"
] | null | null | null | coin/settings.py | bluppfisk/coindicator | 4e128d5a034d39bdbac057299b9bdbc72243986a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# GSettings
from gi.repository import Gio
SCHEMA_ID = 'org.blf.indicator.coin'
DEFAULTS = {
'refresh': 30,
'exchange': 'kraken',
'assetpair-kraken': 'XXBTZUSD',
}
class Settings(object):
def __init__(self, manual_settings=None):
self.settings = None
self.manual_settings = None
if manual_settings:
self.manual_settings = manual_settings.split(':')
else:
source = Gio.SettingsSchemaSource.get_default()
if source.lookup(SCHEMA_ID, True):
self.settings = Gio.Settings(SCHEMA_ID)
else:
print("GSettings: schema [" + SCHEMA_ID + "] not installed. Using defaults.")
def refresh(self, val=None):
if self.manual_settings:
return int(self.manual_settings[2])
elif self.settings:
return self.settings.get_int('refresh') if not val else self.settings.set_int('refresh', val)
else:
return DEFAULTS['refresh']
def exchange(self, val=None):
if self.manual_settings:
return self.manual_settings[0]
elif self.settings:
return self.settings.get_string('exchange') if not val else self.settings.set_string('exchange', val)
else:
return DEFAULTS['exchange']
def assetpair(self, exchange, val=None):
if val:
val.upper()
if self.manual_settings:
return self.manual_settings[1].upper()
elif self.settings:
return self.settings.get_string('assetpair-' + exchange) if not val else self.settings.set_string(
'assetpair-' + exchange, val)
else:
return DEFAULTS['assetpair-' + exchange]
| 30.736842 | 113 | 0.603881 |
22e9bce56c9e7596fab0a72c4e2725b2822f8c22 | 5,734 | py | Python | astley/transformer.py | yunruse/Astley | 31ca6b438dc46b01a463b30e92e5ee930ff8452c | [
"CC-BY-4.0"
] | 1 | 2020-11-28T23:10:38.000Z | 2020-11-28T23:10:38.000Z | astley/transformer.py | yunruse/Astley | 31ca6b438dc46b01a463b30e92e5ee930ff8452c | [
"CC-BY-4.0"
] | null | null | null | astley/transformer.py | yunruse/Astley | 31ca6b438dc46b01a463b30e92e5ee930ff8452c | [
"CC-BY-4.0"
] | null | null | null | """Modified stateful NodeTransformer with QOL functions."""
from ast import NodeTransformer
from _ast import AST
from types import CodeType
from io import TextIOBase
from functools import wraps
from .node import Node, parse, modify
from .nodes import Expression, expr
__all__ = "match Language Python".split()
def match(cls=None, **kw):
"""Advanced Language matcher that accounts for more conditions.
Wrap this around a class, then call it around functions:
@match
class NewLang(Language):
@match(kind=Add, mode=eval, bare_node=True)
@match(kind=AugAssign, op=Add, mode='eval', bare_node=True)
def Print_Every_Add_I_see(self, node):
print(node.left, node.right)
"""
L = "_LanguageMatch"
# Runtime shenanigans mean we must assign properties
# to the method, then iterate over them. Fun!
if not isinstance(cls, type):
# given keywords, so return wrapper that binds then.
# this is pretty eldritch so I'd steer clear
def new(func):
if not hasattr(func, L):
setattr(func, L, [])
getattr(func, L).append(kw)
return func
return new
else:
for k, func in cls.__dict__.items():
for kw in getattr(func, L, []):
kinds = kw.pop("kind", object)
if not isinstance(kinds, (tuple, list)):
kinds = (kinds,)
for k in kinds:
if k not in cls.match_conds:
cls.match_conds[k] = []
cls.match_conds[k].append((kw, func))
return cls
def parse_try(source, filename):
try:
return parse(source, filename, "eval"), "eval"
except SyntaxError:
return parse(source, filename, "exec"), "exec"
class Language(NodeTransformer):
"""Abstract syntax tree stateful transformer.
Instances are stateful, allowing more advanced transformations.
If you want to take the state of the node and work with it,
make sure you can guarantee locals and globals are provided!
eval, exec and compile are provided. Run as:
>>> state = Language(node_or_source)
>>> obj = state.eval()
>>> code = state.compile()
You can provide the mode, but with a few exceptions Astley can
automatically determine it from source code or node.
"""
def _match_cond(self, kw, node):
"""Handles advanced properties of match_cond."""
if not len(kw):
return True
correct_bare = True
if hasattr(self.node, "body") and "bare_node" in kw:
correct_bare = (node in self.node.body) == kw.get("bare_node")
correct_fields = all(
getattr(node, i) == v or isinstance(getattr(node, i), v)
for i, v in kw.items()
if i in node._fields
)
conds = (self.mode == kw.get("mode", self.mode), correct_bare, correct_fields)
return all(conds)
def visit(self, node):
"""Visit a node."""
method = "visit_" + node.__class__.__name__
visitor = getattr(self, method, None)
if visitor:
return visitor(node)
matches = self.match_conds.get(type(node), [])
for kw, func in matches:
if self._match_cond(kw, node):
return func(self, node)
else:
return self.generic_visit(node)
match_conds = {} # {type: [{conditions}, node_func]}
def __init__(self, node=None, **kw):
if node is None:
# maintain `ast` compatibility
self.mode = "classic"
return
if isinstance(node, TextIOBase):
self.filename = node.name
node = node.read(-1)
else:
self.filename = kw.get("filename", "<{}>".format(self.__class__.__name__))
mode = kw.get("mode")
if isinstance(node, Node):
self.node = node
elif isinstance(node, AST):
self.node = modify(node)
elif isinstance(node, str):
mode = "eval" if mode is eval else "exec" if mode is exec else mode
if mode is None:
self.node, mode = parse_try(node, self.filename)
else:
self.node = parse(node, self.filename, mode)
else:
raise TypeError("Must be node or source.")
if mode is None:
mode = "eval" if isinstance(self.node, (expr, Expression)) else "exec"
self.mode = mode
self.globals = kw.get("globals", globals())
self.locals = kw.get("locals", dict())
self.on_visit_start()
self.visit(self.node)
self.on_visit_finish()
def as_python(self):
return self.node.as_python()
# overwritable methods
def on_visit_start(self):
pass
def on_visit_finish(self):
pass
def compile(self, flags=0):
return self.node.compile(self.filename)
def eval(self):
return eval(self.compile(), self.globals, self.locals)
def exec(self):
return exec(self.compile(), self.globals, self.locals)
@classmethod
def compiles(cls, source, filename, mode, flags=0, **kw):
return cls(source, filename=filename, mode=mode, **kw).compile(flags)
@classmethod
def evals(cls, source, globals=None, locals=None, **kw):
return cls(source, globals=globals, locals=locals, **kw).eval()
@classmethod
def execs(cls, source, globals=None, locals=None, **kw):
cls(source, globals=globals, locals=locals, **kw).exec()
class Python(Language):
"""Base language 'transformer'. Has no effect."""
def visit(self, node):
return node
| 30.994595 | 86 | 0.590862 |
c2a65bba5b9703851af1b63ff8427cf923e1ef5e | 55 | py | Python | shamus/__init__.py | vladimyr/shamus | 1e7c016298e6776c258e64e07641c78dd9953ef5 | [
"MIT"
] | 2 | 2018-04-11T06:32:47.000Z | 2018-04-13T18:50:54.000Z | shamus/__init__.py | vladimyr/shamus | 1e7c016298e6776c258e64e07641c78dd9953ef5 | [
"MIT"
] | 3 | 2018-04-10T16:32:08.000Z | 2018-04-27T01:36:49.000Z | shamus/__init__.py | marinko-peso/shamus | aac345cc4edbd7dc5a2364e8372f7dc0bc2d731d | [
"MIT"
] | null | null | null | __all__ = [
'shamus'
]
from .shamus import shamus
| 9.166667 | 26 | 0.636364 |
2f3ea452a3b95458b80f27eaa21113b627dfd053 | 5,268 | py | Python | tests/test_client.py | wankes2000/json2parquet | b3aafdb9341b0b43018625809d6243aeeec690a7 | [
"MIT"
] | null | null | null | tests/test_client.py | wankes2000/json2parquet | b3aafdb9341b0b43018625809d6243aeeec690a7 | [
"MIT"
] | null | null | null | tests/test_client.py | wankes2000/json2parquet | b3aafdb9341b0b43018625809d6243aeeec690a7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import tempfile
import pandas as pd
import pyarrow as pa
from pyarrow import parquet as pq
from json2parquet import client
def test_ingest():
"""
Test ingesting data with a given schema
"""
schema = pa.schema([
pa.field("foo", pa.int64()),
pa.field("bar", pa.int64())
])
data = [{"foo": 1, "bar": 2}, {"foo": 10, "bar": 20}]
converted_data = client.ingest_data(data, schema)
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}
def test_ingest_with_numeric_boolean():
"""
Test ingesting data with boolean values given as numbers
"""
schema = pa.schema([
pa.field("foo", pa.bool_())
])
data = [{"foo": 0}, {"foo": 1}]
converted_data = client.ingest_data(data, schema)
assert converted_data.to_pydict() == {'foo': [False, True]}
def test_ingest_with_boolean_none():
"""
Test ingesting data with boolean values and none
"""
schema = pa.schema([
pa.field("foo", pa.bool_())
])
data = [{"foo": 0}, {"foo": 1}, {"foo": None}]
converted_data = client.ingest_data(data, schema)
assert converted_data.to_pydict() == {'foo': [False, True, None]}
def test_ingest_with_datetime():
"""
Test ingesting datetime data with a given schema
"""
schema = pa.schema([
pa.field("foo", pa.int64()),
pa.field("bar", pa.int64()),
pa.field("baz", pa.timestamp("ns"))
])
data = [{"foo": 1, "bar": 2, "baz": "2018-01-01 01:02:03"}, {"foo": 10, "bar": 20, "baz": "2018-01-02 01:02:03"}]
converted_data = client.ingest_data(data, schema)
timestamp_values = [pd.to_datetime("2018-01-01 01:02:03"), pd.to_datetime("2018-01-02 01:02:03")]
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20], 'baz': timestamp_values}
def test_ingest_with_datetime_formatted():
"""
Test ingesting datetime data with a given schema and custom date format
"""
schema = pa.schema([
pa.field("foo", pa.int64()),
pa.field("bar", pa.int64()),
pa.field("baz", pa.timestamp("ns"))
])
data = [{"foo": 1, "bar": 2, "baz": "2018/01/01 01:02:03"}, {"foo": 10, "bar": 20, "baz": "2018/01/02 01:02:03"}]
converted_data = client.ingest_data(data, schema, date_format="%Y/%m/%d %H:%M:%S")
timestamp_values = [pd.to_datetime("2018-01-01 01:02:03"), pd.to_datetime("2018-01-02 01:02:03")]
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20], 'baz': timestamp_values}
def test_ingest_with_column_names():
"""
Test ingesting data with given column names
"""
schema = ["foo", "bar"]
data = [{"foo": 1, "bar": 2}, {"foo": 10, "bar": 20}]
converted_data = client.ingest_data(data, schema)
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}
def test_ingest_with_no_schema():
"""
Test ingesting data with no schema
"""
data = [{"foo": 1, "bar": 2}, {"foo": 10, "bar": 20}]
converted_data = client.ingest_data(data)
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}
def test_ingest_with_no_schema_and_uneven_column_names():
"""
Test ingesting data with no schema and incomplete JSON records
"""
data = [{"foo": 1, "bar": 2}, {"foo": 10, "bar": 20}, {"foo": 100, "bar": 200, "baz": 300}]
converted_data = client.ingest_data(data)
assert converted_data.to_pydict() == {'foo': [1, 10, 100], 'bar': [2, 20, 200], 'baz': [None, None, 300]}
def test_load_json():
"""
Test loading JSON from a file
"""
schema = pa.schema([
pa.field("foo", pa.int32()),
pa.field("bar", pa.int64())
])
path = "{}/tests/fixtures/simple_json.txt".format(os.getcwd())
converted_data = client.load_json(path, schema)
assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}
def test_convert_json():
"""
Test converting a JSON file to Parquet
"""
schema = pa.schema([
pa.field("foo", pa.int32()),
pa.field("bar", pa.int64())
])
input_path = "{}/tests/fixtures/simple_json.txt".format(os.getcwd())
expected_file = "{}/tests/fixtures/simple.parquet".format(os.getcwd())
with tempfile.NamedTemporaryFile() as f:
output_file = f.name
client.convert_json(input_path, output_file, schema)
output = pq.ParquetFile(output_file)
expected = pq.ParquetFile(expected_file)
assert output.metadata.num_columns == expected.metadata.num_columns
assert output.metadata.num_rows == expected.metadata.num_rows
assert output.schema.equals(expected.schema)
assert output.read_row_group(0).to_pydict() == expected.read_row_group(0).to_pydict()
def test_date_conversion():
"""
Test converting DATE columns to days since epoch
"""
schema = pa.schema([
pa.field("foo", pa.date32())
])
data = [{"foo": "2018-01-01"}, {"foo": "2018-01-02"}]
converted_data = client.ingest_data(data, schema)
assert converted_data.to_pydict()['foo'][0].strftime("%Y-%m-%d") == "2018-01-01"
assert converted_data.to_pydict()['foo'][1].strftime("%Y-%m-%d") == "2018-01-02"
| 30.275862 | 117 | 0.606492 |
84bc3d5912cbbfeb6a0504c24e8d9d3b9171416c | 1,526 | py | Python | src/digitalmarket/mixins.py | damansinghh/digital-marketplace | 3d1797716f39459950fcf10042603890335f7f55 | [
"MIT"
] | 104 | 2015-12-11T10:33:17.000Z | 2022-03-26T04:36:13.000Z | src/digitalmarket/mixins.py | damansinghh/digital-marketplace | 3d1797716f39459950fcf10042603890335f7f55 | [
"MIT"
] | 6 | 2020-06-06T01:20:49.000Z | 2022-03-12T00:32:27.000Z | src/digitalmarket/mixins.py | damansinghh/digital-marketplace | 3d1797716f39459950fcf10042603890335f7f55 | [
"MIT"
] | 85 | 2015-12-12T11:26:21.000Z | 2022-03-02T21:12:50.000Z | from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.shortcuts import get_object_or_404
from .decorators import ajax_required
class AjaxRequiredMixin(object):
@method_decorator(ajax_required)
def dispatch(self, request, *args, **kwargs):
return super(AjaxRequiredMixin, self).dispatch(request, *args, **kwargs)
class LoginRequiredMixin(object):
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
class StatffRequiredMixin(object):
@method_decorator(staff_member_required)
def dispatch(self, request, *args, **kwargs):
return super(StatffRequiredMixin, self).dispatch(request, *args, **kwargs)
class MultiSlugMixin(object):
model = None
def get_object(self, *args, **kwargs):
slug = self.kwargs.get("slug")
ModelClass = self.model
if slug is not None:
try:
obj = get_object_or_404(ModelClass, slug=slug)
except ModelClass.MultipleObjectsReturned:
obj = ModelClass.objects.filter(slug=slug).order_by("-title").first()
else:
obj = super(MultiSlugMixin, self).get_object(*args, **kwargs)
return obj
class SubmitBtnMixin(object):
submit_btn = None
def get_context_data(self, *args, **kwargs):
context = super(SubmitBtnMixin, self).get_context_data(*args, **kwargs)
context["submit_btn"] = self.submit_btn
return context
| 28.259259 | 76 | 0.766055 |
8c1e42326e82239c36b3cdff6f051ebf1fde5ba5 | 2,755 | py | Python | src/sphinx-rigado-theme/sphinx_rigado_theme/conf.py | mgeier/sphinx-themes.org | 5c057b569e4084ac061692f0bae3685fc4267c80 | [
"BSD-2-Clause"
] | null | null | null | src/sphinx-rigado-theme/sphinx_rigado_theme/conf.py | mgeier/sphinx-themes.org | 5c057b569e4084ac061692f0bae3685fc4267c80 | [
"BSD-2-Clause"
] | null | null | null | src/sphinx-rigado-theme/sphinx_rigado_theme/conf.py | mgeier/sphinx-themes.org | 5c057b569e4084ac061692f0bae3685fc4267c80 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Sphinx-Themes template'
copyright = '2018, sphinx-themes.org'
author = 'sphinx-themes.org'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '1'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
#---sphinx-themes-----
html_theme = 'sphinx_rigado_theme'
| 30.611111 | 78 | 0.690381 |
e94193bd9a1d6bee948b96c0d4a28c665bd600b8 | 68,078 | py | Python | pysnmp-with-texts/ELTEX-TAU8.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/ELTEX-TAU8.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/ELTEX-TAU8.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ELTEX-TAU8 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ELTEX-TAU8
# Produced by pysmi-0.3.4 at Wed May 1 13:02:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
elHardware, = mibBuilder.importSymbols("ELTEX-SMI-ACTUAL", "elHardware")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
ModuleIdentity, ObjectIdentity, iso, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, TimeTicks, Bits, IpAddress, Integer32, Gauge32, Counter32, NotificationType, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "ObjectIdentity", "iso", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "TimeTicks", "Bits", "IpAddress", "Integer32", "Gauge32", "Counter32", "NotificationType", "Counter64")
TextualConvention, TimeStamp, RowStatus, TruthValue, DisplayString, TimeInterval = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TimeStamp", "RowStatus", "TruthValue", "DisplayString", "TimeInterval")
tau8 = ModuleIdentity((1, 3, 6, 1, 4, 1, 35265, 1, 55))
tau8.setRevisions(('2013-08-28 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: tau8.setRevisionsDescriptions(('first version',))
if mibBuilder.loadTexts: tau8.setLastUpdated('201308280000Z')
if mibBuilder.loadTexts: tau8.setOrganization('Eltex Enterprise Ltd')
if mibBuilder.loadTexts: tau8.setContactInfo(' ')
if mibBuilder.loadTexts: tau8.setDescription('TAU-4/8.IP MIB')
class CallerIdType(TextualConvention, Integer32):
description = 'Caller-Id generation'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("bell", 0), ("v23", 1), ("dtmf", 2), ("off", 3))
class CallTransferType(TextualConvention, Integer32):
description = 'Flash mode'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("transmitFlash", 0), ("attendedCT", 1), ("unattendedCT", 2), ("localCT", 3))
class RsrvModeType(TextualConvention, Integer32):
description = 'Proxy mode'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("off", 0), ("homing", 1), ("parking", 2))
class RsrvCheckMethodType(TextualConvention, Integer32):
description = 'Check method'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("invite", 0), ("register", 1), ("options", 2))
class OutboundType(TextualConvention, Integer32):
description = 'Outbound mode'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("off", 0), ("outbound", 1), ("outboundWithBusy", 2))
class EarlyMediaType(TextualConvention, Integer32):
description = 'User call (SIP) (180 Ringing (0), 183 Progress (Early media) (1))'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("ringing180", 0), ("progress183EarlyMedia", 1))
class Option100relType(TextualConvention, Integer32):
description = '100rel (supported, required, off)'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("supported", 0), ("required", 1), ("off", 2))
class KeepAliveModeType(TextualConvention, Integer32):
description = ' '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("off", 0), ("options", 1), ("notify", 2), ("clrf", 3))
class DtmfTransferType(TextualConvention, Integer32):
description = 'DTMF transfer'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("inband", 0), ("rfc2833", 1), ("info", 2))
class FaxDirectionType(TextualConvention, Integer32):
description = 'Fax Direction'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("callerAndCallee", 0), ("caller", 1), ("callee", 2), ("noDetectFax", 3))
class FaxtransferType(TextualConvention, Integer32):
description = 'Fax Direction'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("g711a", 0), ("g711u", 1), ("t38", 2), ("none", 3))
class FlashtransferType(TextualConvention, Integer32):
description = 'Flash transfer'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("off", 0), ("rfc2833", 1), ("info", 2))
class FlashMimeType(TextualConvention, Integer32):
description = 'Hook flash MIME Type (if flashtransfer = info)'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("hookflash", 0), ("dtmfRelay", 1), ("broadsoft", 2), ("sscc", 3))
class ModemType(TextualConvention, Integer32):
description = 'Modem transfer (V.152)'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("g711a", 0), ("g711u", 1), ("g711aNse", 2), ("g711uNse", 3), ("off", 4))
class GroupType(TextualConvention, Integer32):
description = 'Type of group (group(0),serial(1),cyclic(2))'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("group", 0), ("serial", 1), ("cyclic", 2))
class TraceOutputType(TextualConvention, Integer32):
description = 'Output trace to'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("console", 0), ("syslogd", 1), ("disable", 2))
class ConferenceMode(TextualConvention, Integer32):
description = 'sip profile conference settings'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("local", 0), ("remote", 1))
pbxConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1))
fxsPorts = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1))
fxsPortsUseFxsProfile = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortsUseFxsProfile.setStatus('current')
if mibBuilder.loadTexts: fxsPortsUseFxsProfile.setDescription('Use FXS profiles settings')
fxsPortTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2), )
if mibBuilder.loadTexts: fxsPortTable.setStatus('current')
if mibBuilder.loadTexts: fxsPortTable.setDescription(' ')
fxsPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1), ).setIndexNames((0, "ELTEX-TAU8", "fxsPortIndex"))
if mibBuilder.loadTexts: fxsPortEntry.setStatus('current')
if mibBuilder.loadTexts: fxsPortEntry.setDescription(' ')
fxsPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: fxsPortIndex.setStatus('current')
if mibBuilder.loadTexts: fxsPortIndex.setDescription('FXS port index (from 1)')
fxsPortEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortEnabled.setStatus('current')
if mibBuilder.loadTexts: fxsPortEnabled.setDescription('Enabled')
fxsPortSipProfileId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortSipProfileId.setStatus('current')
if mibBuilder.loadTexts: fxsPortSipProfileId.setDescription('SIP profile')
fxsPortProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortProfile.setStatus('current')
if mibBuilder.loadTexts: fxsPortProfile.setDescription('FXS profile')
fxsPortPhone = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortPhone.setStatus('current')
if mibBuilder.loadTexts: fxsPortPhone.setDescription('Phone')
fxsPortUsername = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortUsername.setStatus('current')
if mibBuilder.loadTexts: fxsPortUsername.setDescription('Username')
fxsPortAuthName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortAuthName.setStatus('current')
if mibBuilder.loadTexts: fxsPortAuthName.setDescription('Login')
fxsPortAuthPass = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortAuthPass.setStatus('current')
if mibBuilder.loadTexts: fxsPortAuthPass.setDescription('Password')
fxsPortSipPort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortSipPort.setStatus('current')
if mibBuilder.loadTexts: fxsPortSipPort.setDescription('SIP Port')
fxsPortUseAltNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 10), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortUseAltNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortUseAltNumber.setDescription('Use alternative number')
fxsPortAltNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortAltNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortAltNumber.setDescription('Alternative number')
fxsPortCpcRus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCpcRus.setStatus('current')
if mibBuilder.loadTexts: fxsPortCpcRus.setDescription('Calling party category')
fxsPortMinOnhookTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortMinOnhookTime.setStatus('current')
if mibBuilder.loadTexts: fxsPortMinOnhookTime.setDescription('Minimal on-hook time')
fxsPortMinFlash = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortMinFlash.setStatus('current')
if mibBuilder.loadTexts: fxsPortMinFlash.setDescription('Min flash time')
fxsPortGainR = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortGainR.setStatus('current')
if mibBuilder.loadTexts: fxsPortGainR.setDescription('Gain receive (x0.1dB)')
fxsPortGainT = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortGainT.setStatus('current')
if mibBuilder.loadTexts: fxsPortGainT.setDescription('Gain transmit (x0.1dB)')
fxsPortMinPulse = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortMinPulse.setStatus('current')
if mibBuilder.loadTexts: fxsPortMinPulse.setDescription('Min pulse')
fxsPortInterdigit = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortInterdigit.setStatus('current')
if mibBuilder.loadTexts: fxsPortInterdigit.setDescription('Interdigit')
fxsPortCallerId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 19), CallerIdType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCallerId.setStatus('current')
if mibBuilder.loadTexts: fxsPortCallerId.setDescription('Caller-Id generation')
fxsPortHangupTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortHangupTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsPortHangupTimeout.setDescription('Hangup timeout')
fxsPortRbTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortRbTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsPortRbTimeout.setDescription('Ringback timeout')
fxsPortBusyTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortBusyTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsPortBusyTimeout.setDescription('Busy timeout')
fxsPortPolarityReverse = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 23), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortPolarityReverse.setStatus('current')
if mibBuilder.loadTexts: fxsPortPolarityReverse.setDescription('Polarity reversal')
fxsPortCallTransfer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 24), CallTransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCallTransfer.setStatus('current')
if mibBuilder.loadTexts: fxsPortCallTransfer.setDescription('Flash mode')
fxsPortCallWaiting = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 25), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCallWaiting.setStatus('current')
if mibBuilder.loadTexts: fxsPortCallWaiting.setDescription('Callwaiting')
fxsPortDirectnumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 26), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortDirectnumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortDirectnumber.setDescription('Direct number')
fxsPortStopDial = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 27), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortStopDial.setStatus('current')
if mibBuilder.loadTexts: fxsPortStopDial.setDescription('Stop dialing at #')
fxsPortHotLine = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 28), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortHotLine.setStatus('current')
if mibBuilder.loadTexts: fxsPortHotLine.setDescription('Hotline')
fxsPortHotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 29), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortHotNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortHotNumber.setDescription('Hot number (if Hotline is enabled)')
fxsPortHotTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 30), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortHotTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsPortHotTimeout.setDescription('Hot timeout (if Hotline is enabled)')
fxsPortCtUnconditional = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 31), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCtUnconditional.setStatus('current')
if mibBuilder.loadTexts: fxsPortCtUnconditional.setDescription('CFU')
fxsPortCfuNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 32), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCfuNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortCfuNumber.setDescription('CGU number (if CFU is enabled)')
fxsPortCtBusy = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 33), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCtBusy.setStatus('current')
if mibBuilder.loadTexts: fxsPortCtBusy.setDescription('CFB')
fxsPortCfbNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 34), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCfbNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortCfbNumber.setDescription('CFB number (if CFB is enabled)')
fxsPortCtNoanswer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 35), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCtNoanswer.setStatus('current')
if mibBuilder.loadTexts: fxsPortCtNoanswer.setDescription('CFNA')
fxsPortCfnaNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 36), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCfnaNumber.setStatus('current')
if mibBuilder.loadTexts: fxsPortCfnaNumber.setDescription('CFNA number (if CFNA is enabled)')
fxsPortCtTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 37), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortCtTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsPortCtTimeout.setDescription('CFNA timeout (if CFNA is enabled)')
fxsPortDndEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 38), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsPortDndEnable.setStatus('current')
if mibBuilder.loadTexts: fxsPortDndEnable.setDescription('DND')
fxsPortRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 2, 1, 39), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fxsPortRowStatus.setStatus('current')
if mibBuilder.loadTexts: fxsPortRowStatus.setDescription('RowStatus')
fxsPortsMIBBoundary = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fxsPortsMIBBoundary.setStatus('current')
if mibBuilder.loadTexts: fxsPortsMIBBoundary.setDescription('Dummy object to prevent GETNEXT request from poking into neighbor table.')
fxsProfiles = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2))
fxsProfileTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1), )
if mibBuilder.loadTexts: fxsProfileTable.setStatus('current')
if mibBuilder.loadTexts: fxsProfileTable.setDescription(' ')
fxsProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1), ).setIndexNames((0, "ELTEX-TAU8", "fxsProfileIndex"))
if mibBuilder.loadTexts: fxsProfileEntry.setStatus('current')
if mibBuilder.loadTexts: fxsProfileEntry.setDescription(' ')
fxsProfileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: fxsProfileIndex.setStatus('current')
if mibBuilder.loadTexts: fxsProfileIndex.setDescription('FXS Profile index (from 1)')
fxsProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileName.setStatus('current')
if mibBuilder.loadTexts: fxsProfileName.setDescription('Profile name')
fxsProfileMinOnhookTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileMinOnhookTime.setStatus('current')
if mibBuilder.loadTexts: fxsProfileMinOnhookTime.setDescription('Minimal on-hook time')
fxsProfileMinFlash = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileMinFlash.setStatus('current')
if mibBuilder.loadTexts: fxsProfileMinFlash.setDescription('Min flash time (from 80 to 1000 ms)')
fxsProfileGainR = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileGainR.setStatus('current')
if mibBuilder.loadTexts: fxsProfileGainR.setDescription('Gain receive (x0.1dB)')
fxsProfileGainT = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileGainT.setStatus('current')
if mibBuilder.loadTexts: fxsProfileGainT.setDescription('Gain transmit (x0.1dB)')
fxsProfileMinPulse = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileMinPulse.setStatus('current')
if mibBuilder.loadTexts: fxsProfileMinPulse.setDescription('Minimal pulse time (from 20 to 100 ms)')
fxsProfileInterdigit = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileInterdigit.setStatus('current')
if mibBuilder.loadTexts: fxsProfileInterdigit.setDescription('Interdigit interval (from 100 to 400 ms)')
fxsProfileCallerId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 9), CallerIdType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileCallerId.setStatus('current')
if mibBuilder.loadTexts: fxsProfileCallerId.setDescription('Caller-Id generation')
fxsProfileHangupTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileHangupTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsProfileHangupTimeout.setDescription('Hangup timeout')
fxsProfileRbTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileRbTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsProfileRbTimeout.setDescription('Ringback timeout')
fxsProfileBusyTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfileBusyTimeout.setStatus('current')
if mibBuilder.loadTexts: fxsProfileBusyTimeout.setDescription('Busy timeout')
fxsProfilePolarityReverse = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 13), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fxsProfilePolarityReverse.setStatus('current')
if mibBuilder.loadTexts: fxsProfilePolarityReverse.setDescription('Polarity reversal')
fxsProfileRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 1, 1, 14), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: fxsProfileRowStatus.setStatus('current')
if mibBuilder.loadTexts: fxsProfileRowStatus.setDescription('RowStatus')
fxsProfilesMIBBoundary = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 2, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fxsProfilesMIBBoundary.setStatus('current')
if mibBuilder.loadTexts: fxsProfilesMIBBoundary.setDescription('Dummy object to prevent GETNEXT request from poking into neighbor table.')
sipConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3))
sipCommon = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1))
sipCommonStunEnable = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonStunEnable.setStatus('current')
if mibBuilder.loadTexts: sipCommonStunEnable.setDescription('STUN enable')
sipCommonStunServer = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonStunServer.setStatus('current')
if mibBuilder.loadTexts: sipCommonStunServer.setDescription('STUN server address (:port)')
sipCommonStunInterval = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonStunInterval.setStatus('current')
if mibBuilder.loadTexts: sipCommonStunInterval.setDescription('STUN request sending interval (sec)')
sipCommonPublicIp = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonPublicIp.setStatus('current')
if mibBuilder.loadTexts: sipCommonPublicIp.setDescription('Public IP')
sipCommonNotUseNAPTR = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 5), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonNotUseNAPTR.setStatus('current')
if mibBuilder.loadTexts: sipCommonNotUseNAPTR.setDescription('Disable NAPTR DNS queries')
sipCommonNotUseSRV = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipCommonNotUseSRV.setStatus('current')
if mibBuilder.loadTexts: sipCommonNotUseSRV.setDescription('Disable SRV DNS queries')
sipProfileTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2), )
if mibBuilder.loadTexts: sipProfileTable.setStatus('current')
if mibBuilder.loadTexts: sipProfileTable.setDescription(' ')
sipProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1), ).setIndexNames((0, "ELTEX-TAU8", "sipProfileIndex"))
if mibBuilder.loadTexts: sipProfileEntry.setStatus('current')
if mibBuilder.loadTexts: sipProfileEntry.setDescription(' ')
sipProfileIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: sipProfileIndex.setStatus('current')
if mibBuilder.loadTexts: sipProfileIndex.setDescription('SIP Profile index (from 1)')
sipProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProfileName.setStatus('current')
if mibBuilder.loadTexts: sipProfileName.setDescription('Profile name')
sipProEnablesip = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 3), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProEnablesip.setStatus('current')
if mibBuilder.loadTexts: sipProEnablesip.setDescription('Activate profile')
sipProRsrvMode = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 4), RsrvModeType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRsrvMode.setStatus('current')
if mibBuilder.loadTexts: sipProRsrvMode.setDescription('Proxy mode')
sipProProxyip = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProProxyip.setStatus('current')
if mibBuilder.loadTexts: sipProProxyip.setDescription('Proxy address (:port)')
sipProRegistration = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistration.setStatus('current')
if mibBuilder.loadTexts: sipProRegistration.setDescription('Registration')
sipProRegistrarip = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistrarip.setStatus('current')
if mibBuilder.loadTexts: sipProRegistrarip.setDescription('Registrar address (:port) (if Registration is enabled)')
sipProProxyipRsrv1 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProProxyipRsrv1.setStatus('current')
if mibBuilder.loadTexts: sipProProxyipRsrv1.setDescription('Proxy address (:port)')
sipProRegistrationRsrv1 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 9), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistrationRsrv1.setStatus('current')
if mibBuilder.loadTexts: sipProRegistrationRsrv1.setDescription('Registration')
sipProRegistraripRsrv1 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistraripRsrv1.setStatus('current')
if mibBuilder.loadTexts: sipProRegistraripRsrv1.setDescription('Registrar address (:port) (if Registration is enabled)')
sipProProxyipRsrv2 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProProxyipRsrv2.setStatus('current')
if mibBuilder.loadTexts: sipProProxyipRsrv2.setDescription('Proxy address (:port)')
sipProRegistrationRsrv2 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 12), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistrationRsrv2.setStatus('current')
if mibBuilder.loadTexts: sipProRegistrationRsrv2.setDescription('Registration')
sipProRegistraripRsrv2 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistraripRsrv2.setStatus('current')
if mibBuilder.loadTexts: sipProRegistraripRsrv2.setDescription('Registrar address (:port) (if Registration is enabled)')
sipProProxyipRsrv3 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 14), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProProxyipRsrv3.setStatus('current')
if mibBuilder.loadTexts: sipProProxyipRsrv3.setDescription('Proxy address (:port)')
sipProRegistrationRsrv3 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 15), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistrationRsrv3.setStatus('current')
if mibBuilder.loadTexts: sipProRegistrationRsrv3.setDescription('Registration')
sipProRegistraripRsrv3 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 16), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistraripRsrv3.setStatus('current')
if mibBuilder.loadTexts: sipProRegistraripRsrv3.setDescription('Registrar address (:port) (if Registration is enabled)')
sipProProxyipRsrv4 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 17), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProProxyipRsrv4.setStatus('current')
if mibBuilder.loadTexts: sipProProxyipRsrv4.setDescription('Proxy address (:port)')
sipProRegistrationRsrv4 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 18), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistrationRsrv4.setStatus('current')
if mibBuilder.loadTexts: sipProRegistrationRsrv4.setDescription('Registration')
sipProRegistraripRsrv4 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 19), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRegistraripRsrv4.setStatus('current')
if mibBuilder.loadTexts: sipProRegistraripRsrv4.setDescription('Registrar address (:port) (if Registration is enabled)')
sipProRsrvCheckMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 20), RsrvCheckMethodType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRsrvCheckMethod.setStatus('current')
if mibBuilder.loadTexts: sipProRsrvCheckMethod.setDescription('Check method')
sipProRsrvKeepaliveTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRsrvKeepaliveTime.setStatus('current')
if mibBuilder.loadTexts: sipProRsrvKeepaliveTime.setDescription('Keepalive timeout (s)')
sipProDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 22), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProDomain.setStatus('current')
if mibBuilder.loadTexts: sipProDomain.setDescription('SIP domain')
sipProOutbound = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 23), OutboundType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProOutbound.setStatus('current')
if mibBuilder.loadTexts: sipProOutbound.setDescription('Outbound mode')
sipProExpires = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 24), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProExpires.setStatus('current')
if mibBuilder.loadTexts: sipProExpires.setDescription('Expires')
sipProRri = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 25), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRri.setStatus('current')
if mibBuilder.loadTexts: sipProRri.setDescription('Registration Retry Interval')
sipProDomainToReg = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 26), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProDomainToReg.setStatus('current')
if mibBuilder.loadTexts: sipProDomainToReg.setDescription('Use domain to register')
sipProEarlyMedia = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 27), EarlyMediaType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProEarlyMedia.setStatus('current')
if mibBuilder.loadTexts: sipProEarlyMedia.setDescription('User call (SIP) (180 Ringing (0), 183 Progress (Early media) (1))')
sipProDisplayToReg = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 28), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProDisplayToReg.setStatus('current')
if mibBuilder.loadTexts: sipProDisplayToReg.setDescription('Use SIP Display info in Register')
sipProRingback = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 29), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRingback.setStatus('current')
if mibBuilder.loadTexts: sipProRingback.setDescription('Ringback at 183 Progress')
sipProReduceSdpMediaCount = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 30), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProReduceSdpMediaCount.setStatus('current')
if mibBuilder.loadTexts: sipProReduceSdpMediaCount.setDescription('Remove rejected media')
sipProOption100rel = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 31), Option100relType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProOption100rel.setStatus('current')
if mibBuilder.loadTexts: sipProOption100rel.setDescription('100rel (supported, required, off)')
sipProCodecOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 32), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProCodecOrder.setStatus('current')
if mibBuilder.loadTexts: sipProCodecOrder.setDescription('List of codecs in preferred order (g711a,g711u,g723,g729x,g729a,g729b)')
sipProG711pte = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 33), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProG711pte.setStatus('current')
if mibBuilder.loadTexts: sipProG711pte.setDescription('G.711 PTE, ms')
sipProDtmfTransfer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 34), DtmfTransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProDtmfTransfer.setStatus('current')
if mibBuilder.loadTexts: sipProDtmfTransfer.setDescription('DTMF transfer')
sipProFaxDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 35), FaxDirectionType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFaxDirection.setStatus('current')
if mibBuilder.loadTexts: sipProFaxDirection.setDescription('Fax Direction')
sipProFaxTransfer1 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 36), FaxtransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFaxTransfer1.setStatus('current')
if mibBuilder.loadTexts: sipProFaxTransfer1.setDescription('Codec 1')
sipProFaxTransfer2 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 37), FaxtransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFaxTransfer2.setStatus('current')
if mibBuilder.loadTexts: sipProFaxTransfer2.setDescription('Codec 2')
sipProFaxTransfer3 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 38), FaxtransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFaxTransfer3.setStatus('current')
if mibBuilder.loadTexts: sipProFaxTransfer3.setDescription('Codec 3')
sipProEnableInT38 = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 39), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProEnableInT38.setStatus('current')
if mibBuilder.loadTexts: sipProEnableInT38.setDescription('Take the transition to T.38')
sipProFlashTransfer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 40), FlashtransferType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFlashTransfer.setStatus('current')
if mibBuilder.loadTexts: sipProFlashTransfer.setDescription('Flash transfer')
sipProFlashMime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 41), FlashMimeType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProFlashMime.setStatus('current')
if mibBuilder.loadTexts: sipProFlashMime.setDescription('Hook flash MIME Type (if flashtransfer = info)')
sipProModem = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 42), ModemType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProModem.setStatus('current')
if mibBuilder.loadTexts: sipProModem.setDescription('Modem transfer (V.152)')
sipProPayload = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 43), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProPayload.setStatus('current')
if mibBuilder.loadTexts: sipProPayload.setDescription('Payload ((96..127))')
sipProSilenceDetector = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 44), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProSilenceDetector.setStatus('current')
if mibBuilder.loadTexts: sipProSilenceDetector.setDescription('Silencedetector')
sipProEchoCanceler = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 45), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProEchoCanceler.setStatus('current')
if mibBuilder.loadTexts: sipProEchoCanceler.setDescription('Echocanceller')
sipProRtcp = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 46), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRtcp.setStatus('current')
if mibBuilder.loadTexts: sipProRtcp.setDescription('RTCP')
sipProRtcpTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 47), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRtcpTimer.setStatus('current')
if mibBuilder.loadTexts: sipProRtcpTimer.setDescription('Sending interval (if rtcp on)')
sipProRtcpCount = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 48), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProRtcpCount.setStatus('current')
if mibBuilder.loadTexts: sipProRtcpCount.setDescription('Receiving period (if rtcp on)')
sipProDialplanRegexp = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 49), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProDialplanRegexp.setStatus('current')
if mibBuilder.loadTexts: sipProDialplanRegexp.setDescription('The regular expression for dialplan')
sipProRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 50), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: sipProRowStatus.setStatus('current')
if mibBuilder.loadTexts: sipProRowStatus.setDescription('RowStatus')
sipProKeepAliveMode = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 51), KeepAliveModeType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProKeepAliveMode.setStatus('current')
if mibBuilder.loadTexts: sipProKeepAliveMode.setDescription(' ')
sipProKeepAliveInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 52), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProKeepAliveInterval.setStatus('current')
if mibBuilder.loadTexts: sipProKeepAliveInterval.setDescription('sec')
sipProConferenceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 53), ConferenceMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProConferenceMode.setStatus('current')
if mibBuilder.loadTexts: sipProConferenceMode.setDescription(' ')
sipProConferenceServer = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 54), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProConferenceServer.setStatus('current')
if mibBuilder.loadTexts: sipProConferenceServer.setDescription(' ')
sipProImsEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 55), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProImsEnable.setStatus('current')
if mibBuilder.loadTexts: sipProImsEnable.setDescription(' ')
sipProXcapCallholdName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 56), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProXcapCallholdName.setStatus('current')
if mibBuilder.loadTexts: sipProXcapCallholdName.setDescription(' ')
sipProXcapCwName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 57), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProXcapCwName.setStatus('current')
if mibBuilder.loadTexts: sipProXcapCwName.setDescription(' ')
sipProXcapConferenceName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 58), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProXcapConferenceName.setStatus('current')
if mibBuilder.loadTexts: sipProXcapConferenceName.setDescription(' ')
sipProXcapHotlineName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 2, 1, 59), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sipProXcapHotlineName.setStatus('current')
if mibBuilder.loadTexts: sipProXcapHotlineName.setDescription(' ')
sipProfilesMIBBoundary = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 3, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sipProfilesMIBBoundary.setStatus('current')
if mibBuilder.loadTexts: sipProfilesMIBBoundary.setDescription('Dummy object to prevent GETNEXT request from poking into neighbor table.')
groupsConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4))
huntGroupTable = MibTable((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1), )
if mibBuilder.loadTexts: huntGroupTable.setStatus('current')
if mibBuilder.loadTexts: huntGroupTable.setDescription(' ')
huntGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1), ).setIndexNames((0, "ELTEX-TAU8", "huntGrIndex"))
if mibBuilder.loadTexts: huntGroupEntry.setStatus('current')
if mibBuilder.loadTexts: huntGroupEntry.setDescription(' ')
huntGrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: huntGrIndex.setStatus('current')
if mibBuilder.loadTexts: huntGrIndex.setDescription('Hunt group index (from 1)')
huntGrEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrEnable.setStatus('current')
if mibBuilder.loadTexts: huntGrEnable.setDescription('Enable group')
huntGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGroupName.setStatus('current')
if mibBuilder.loadTexts: huntGroupName.setDescription('Group name')
huntGrSipProfileId = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrSipProfileId.setStatus('current')
if mibBuilder.loadTexts: huntGrSipProfileId.setDescription('SIP profile')
huntGrPhone = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrPhone.setStatus('current')
if mibBuilder.loadTexts: huntGrPhone.setDescription('Phone')
huntGrRegistration = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrRegistration.setStatus('current')
if mibBuilder.loadTexts: huntGrRegistration.setDescription('Registration')
huntGrUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrUserName.setStatus('current')
if mibBuilder.loadTexts: huntGrUserName.setDescription('User Name')
huntGrPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrPassword.setStatus('current')
if mibBuilder.loadTexts: huntGrPassword.setDescription('Password')
huntGrType = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 9), GroupType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrType.setStatus('current')
if mibBuilder.loadTexts: huntGrType.setDescription('Type of group (group(0),serial(1),cyclic(2))')
huntGrCallQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrCallQueueSize.setStatus('current')
if mibBuilder.loadTexts: huntGrCallQueueSize.setDescription('Call queue size')
huntGrWaitingTime = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrWaitingTime.setStatus('current')
if mibBuilder.loadTexts: huntGrWaitingTime.setDescription('Call reply timeout, sec')
huntGrSipPort = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrSipPort.setStatus('current')
if mibBuilder.loadTexts: huntGrSipPort.setDescription('SIP Port of group')
huntGrPickupEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 13), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrPickupEnable.setStatus('current')
if mibBuilder.loadTexts: huntGrPickupEnable.setDescription('Group call pickup enable')
huntGrPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 14), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: huntGrPorts.setStatus('current')
if mibBuilder.loadTexts: huntGrPorts.setDescription('List of the ports in the group')
huntGrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 1, 1, 15), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: huntGrRowStatus.setStatus('current')
if mibBuilder.loadTexts: huntGrRowStatus.setDescription('RowStatus')
huntGroupsMIBBoundary = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 4, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: huntGroupsMIBBoundary.setStatus('current')
if mibBuilder.loadTexts: huntGroupsMIBBoundary.setDescription('Dummy object to prevent GETNEXT request from poking into neighbor table.')
suppServices = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5))
dvoCfuPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoCfuPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoCfuPrefix.setDescription('Unconditional forward')
dvoCfbPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoCfbPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoCfbPrefix.setDescription('CT busy')
dvoCfnaPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoCfnaPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoCfnaPrefix.setDescription('CT noanswer')
dvoCallPickupPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoCallPickupPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoCallPickupPrefix.setDescription('Permit to pickup incoming calls')
dvoHotNumberPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoHotNumberPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoHotNumberPrefix.setDescription('Hotline')
dvoCallwaitingPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoCallwaitingPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoCallwaitingPrefix.setDescription('Callwaiting')
dvoDndPrefix = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 1, 5, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dvoDndPrefix.setStatus('current')
if mibBuilder.loadTexts: dvoDndPrefix.setDescription('DND')
networkConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2))
snmpConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1))
snmpRoCommunity = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpRoCommunity.setStatus('current')
if mibBuilder.loadTexts: snmpRoCommunity.setDescription('roCommunity')
snmpRwCommunity = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpRwCommunity.setStatus('current')
if mibBuilder.loadTexts: snmpRwCommunity.setDescription('rwCommunity')
snmpTrapsink = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapsink.setStatus('current')
if mibBuilder.loadTexts: snmpTrapsink.setDescription('TrapSink, usage: HOST [COMMUNITY [PORT]]')
snmpTrap2sink = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrap2sink.setStatus('current')
if mibBuilder.loadTexts: snmpTrap2sink.setDescription('Trap2Sink, usage: HOST [COMMUNITY [PORT]]')
snmpInformsink = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpInformsink.setStatus('current')
if mibBuilder.loadTexts: snmpInformsink.setDescription('InformSink, usage: HOST [COMMUNITY [PORT]]')
snmpSysname = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpSysname.setStatus('current')
if mibBuilder.loadTexts: snmpSysname.setDescription('System name')
snmpSyscontact = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpSyscontact.setStatus('current')
if mibBuilder.loadTexts: snmpSyscontact.setDescription('System contact')
snmpSyslocation = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpSyslocation.setStatus('current')
if mibBuilder.loadTexts: snmpSyslocation.setDescription('System location')
snmpTrapCommunity = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 2, 1, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapCommunity.setStatus('current')
if mibBuilder.loadTexts: snmpTrapCommunity.setDescription('TrapCommunity')
systemConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3))
traceConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1))
traceOutput = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 1), TraceOutputType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: traceOutput.setStatus('current')
if mibBuilder.loadTexts: traceOutput.setDescription('Output trace to')
syslogdIpaddr = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogdIpaddr.setStatus('current')
if mibBuilder.loadTexts: syslogdIpaddr.setDescription('Syslog server address')
syslogdPort = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: syslogdPort.setStatus('current')
if mibBuilder.loadTexts: syslogdPort.setDescription('Syslog server port')
logLocalFile = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logLocalFile.setStatus('current')
if mibBuilder.loadTexts: logLocalFile.setDescription('Log file name')
logLocalSize = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logLocalSize.setStatus('current')
if mibBuilder.loadTexts: logLocalSize.setDescription('Log file size (kB)')
logVoipPbxEnable = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 6), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipPbxEnable.setStatus('current')
if mibBuilder.loadTexts: logVoipPbxEnable.setDescription('VoIP trace enable')
logVoipError = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 7), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipError.setStatus('current')
if mibBuilder.loadTexts: logVoipError.setDescription('Errors')
logVoipWarning = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 8), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipWarning.setStatus('current')
if mibBuilder.loadTexts: logVoipWarning.setDescription('Warnings')
logVoipDebug = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 9), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipDebug.setStatus('current')
if mibBuilder.loadTexts: logVoipDebug.setDescription('Debug')
logVoipInfo = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 10), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipInfo.setStatus('current')
if mibBuilder.loadTexts: logVoipInfo.setDescription('Info')
logVoipSipLevel = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logVoipSipLevel.setStatus('current')
if mibBuilder.loadTexts: logVoipSipLevel.setDescription('SIP trace level')
logIgmpEnable = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 3, 1, 12), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: logIgmpEnable.setStatus('current')
if mibBuilder.loadTexts: logIgmpEnable.setDescription('IGMP trace enable')
actionCommands = MibIdentifier((1, 3, 6, 1, 4, 1, 35265, 1, 55, 10))
actionSave = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 10, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: actionSave.setStatus('current')
if mibBuilder.loadTexts: actionSave.setDescription('set true(1) to save all config files')
actionReboot = MibScalar((1, 3, 6, 1, 4, 1, 35265, 1, 55, 10, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: actionReboot.setStatus('current')
if mibBuilder.loadTexts: actionReboot.setDescription('set true(1) to reboot')
tau8Group = ObjectGroup((1, 3, 6, 1, 4, 1, 35265, 1, 55, 200)).setObjects(("ELTEX-TAU8", "fxsPortsUseFxsProfile"), ("ELTEX-TAU8", "fxsPortEnabled"), ("ELTEX-TAU8", "fxsPortSipProfileId"), ("ELTEX-TAU8", "fxsPortProfile"), ("ELTEX-TAU8", "fxsPortPhone"), ("ELTEX-TAU8", "fxsPortUsername"), ("ELTEX-TAU8", "fxsPortAuthName"), ("ELTEX-TAU8", "fxsPortAuthPass"), ("ELTEX-TAU8", "fxsPortSipPort"), ("ELTEX-TAU8", "fxsPortUseAltNumber"), ("ELTEX-TAU8", "fxsPortAltNumber"), ("ELTEX-TAU8", "fxsPortCpcRus"), ("ELTEX-TAU8", "fxsPortMinOnhookTime"), ("ELTEX-TAU8", "fxsPortMinFlash"), ("ELTEX-TAU8", "fxsPortGainR"), ("ELTEX-TAU8", "fxsPortGainT"), ("ELTEX-TAU8", "fxsPortMinPulse"), ("ELTEX-TAU8", "fxsPortInterdigit"), ("ELTEX-TAU8", "fxsPortCallerId"), ("ELTEX-TAU8", "fxsPortHangupTimeout"), ("ELTEX-TAU8", "fxsPortRbTimeout"), ("ELTEX-TAU8", "fxsPortBusyTimeout"), ("ELTEX-TAU8", "fxsPortPolarityReverse"), ("ELTEX-TAU8", "fxsPortCallTransfer"), ("ELTEX-TAU8", "fxsPortCallWaiting"), ("ELTEX-TAU8", "fxsPortDirectnumber"), ("ELTEX-TAU8", "fxsPortStopDial"), ("ELTEX-TAU8", "fxsPortHotLine"), ("ELTEX-TAU8", "fxsPortHotNumber"), ("ELTEX-TAU8", "fxsPortHotTimeout"), ("ELTEX-TAU8", "fxsPortCtUnconditional"), ("ELTEX-TAU8", "fxsPortCfuNumber"), ("ELTEX-TAU8", "fxsPortCtBusy"), ("ELTEX-TAU8", "fxsPortCfbNumber"), ("ELTEX-TAU8", "fxsPortCtNoanswer"), ("ELTEX-TAU8", "fxsPortCfnaNumber"), ("ELTEX-TAU8", "fxsPortCtTimeout"), ("ELTEX-TAU8", "fxsPortDndEnable"), ("ELTEX-TAU8", "fxsPortRowStatus"), ("ELTEX-TAU8", "fxsPortsMIBBoundary"), ("ELTEX-TAU8", "fxsProfileName"), ("ELTEX-TAU8", "fxsProfileMinOnhookTime"), ("ELTEX-TAU8", "fxsProfileMinFlash"), ("ELTEX-TAU8", "fxsProfileGainR"), ("ELTEX-TAU8", "fxsProfileGainT"), ("ELTEX-TAU8", "fxsProfileMinPulse"), ("ELTEX-TAU8", "fxsProfileInterdigit"), ("ELTEX-TAU8", "fxsProfileCallerId"), ("ELTEX-TAU8", "fxsProfileHangupTimeout"), ("ELTEX-TAU8", "fxsProfileRbTimeout"), ("ELTEX-TAU8", "fxsProfileBusyTimeout"), ("ELTEX-TAU8", "fxsProfilePolarityReverse"), ("ELTEX-TAU8", "fxsProfileRowStatus"), ("ELTEX-TAU8", "fxsProfilesMIBBoundary"), ("ELTEX-TAU8", "sipCommonStunEnable"), ("ELTEX-TAU8", "sipCommonStunServer"), ("ELTEX-TAU8", "sipCommonStunInterval"), ("ELTEX-TAU8", "sipCommonPublicIp"), ("ELTEX-TAU8", "sipCommonNotUseNAPTR"), ("ELTEX-TAU8", "sipCommonNotUseSRV"), ("ELTEX-TAU8", "sipProfileName"), ("ELTEX-TAU8", "sipProEnablesip"), ("ELTEX-TAU8", "sipProRsrvMode"), ("ELTEX-TAU8", "sipProProxyip"), ("ELTEX-TAU8", "sipProRegistration"), ("ELTEX-TAU8", "sipProRegistrarip"), ("ELTEX-TAU8", "sipProProxyipRsrv1"), ("ELTEX-TAU8", "sipProRegistrationRsrv1"), ("ELTEX-TAU8", "sipProRegistraripRsrv1"), ("ELTEX-TAU8", "sipProProxyipRsrv2"), ("ELTEX-TAU8", "sipProRegistrationRsrv2"), ("ELTEX-TAU8", "sipProRegistraripRsrv2"), ("ELTEX-TAU8", "sipProProxyipRsrv3"), ("ELTEX-TAU8", "sipProRegistrationRsrv3"), ("ELTEX-TAU8", "sipProRegistraripRsrv3"), ("ELTEX-TAU8", "sipProProxyipRsrv4"), ("ELTEX-TAU8", "sipProRegistrationRsrv4"), ("ELTEX-TAU8", "sipProRegistraripRsrv4"), ("ELTEX-TAU8", "sipProRsrvCheckMethod"), ("ELTEX-TAU8", "sipProRsrvKeepaliveTime"), ("ELTEX-TAU8", "sipProDomain"), ("ELTEX-TAU8", "sipProOutbound"), ("ELTEX-TAU8", "sipProExpires"), ("ELTEX-TAU8", "sipProRri"), ("ELTEX-TAU8", "sipProDomainToReg"), ("ELTEX-TAU8", "sipProEarlyMedia"), ("ELTEX-TAU8", "sipProDisplayToReg"), ("ELTEX-TAU8", "sipProRingback"), ("ELTEX-TAU8", "sipProReduceSdpMediaCount"), ("ELTEX-TAU8", "sipProOption100rel"), ("ELTEX-TAU8", "sipProCodecOrder"), ("ELTEX-TAU8", "sipProG711pte"), ("ELTEX-TAU8", "sipProDtmfTransfer"), ("ELTEX-TAU8", "sipProFaxDirection"), ("ELTEX-TAU8", "sipProFaxTransfer1"), ("ELTEX-TAU8", "sipProFaxTransfer2"), ("ELTEX-TAU8", "sipProFaxTransfer3"), ("ELTEX-TAU8", "sipProEnableInT38"), ("ELTEX-TAU8", "sipProFlashTransfer"), ("ELTEX-TAU8", "sipProFlashMime"), ("ELTEX-TAU8", "sipProModem"), ("ELTEX-TAU8", "sipProPayload"), ("ELTEX-TAU8", "sipProSilenceDetector"), ("ELTEX-TAU8", "sipProEchoCanceler"), ("ELTEX-TAU8", "sipProRtcp"), ("ELTEX-TAU8", "sipProRtcpTimer"), ("ELTEX-TAU8", "sipProRtcpCount"), ("ELTEX-TAU8", "sipProDialplanRegexp"), ("ELTEX-TAU8", "sipProRowStatus"), ("ELTEX-TAU8", "sipProKeepAliveMode"), ("ELTEX-TAU8", "sipProKeepAliveInterval"), ("ELTEX-TAU8", "sipProConferenceMode"), ("ELTEX-TAU8", "sipProConferenceServer"), ("ELTEX-TAU8", "sipProImsEnable"), ("ELTEX-TAU8", "sipProXcapCallholdName"), ("ELTEX-TAU8", "sipProXcapCwName"), ("ELTEX-TAU8", "sipProXcapConferenceName"), ("ELTEX-TAU8", "sipProXcapHotlineName"), ("ELTEX-TAU8", "sipProfilesMIBBoundary"), ("ELTEX-TAU8", "huntGrEnable"), ("ELTEX-TAU8", "huntGroupName"), ("ELTEX-TAU8", "huntGrSipProfileId"), ("ELTEX-TAU8", "huntGrPhone"), ("ELTEX-TAU8", "huntGrRegistration"), ("ELTEX-TAU8", "huntGrUserName"), ("ELTEX-TAU8", "huntGrPassword"), ("ELTEX-TAU8", "huntGrType"), ("ELTEX-TAU8", "huntGrCallQueueSize"), ("ELTEX-TAU8", "huntGrWaitingTime"), ("ELTEX-TAU8", "huntGrSipPort"), ("ELTEX-TAU8", "huntGrPickupEnable"), ("ELTEX-TAU8", "huntGrPorts"), ("ELTEX-TAU8", "huntGrRowStatus"), ("ELTEX-TAU8", "huntGroupsMIBBoundary"), ("ELTEX-TAU8", "dvoCfuPrefix"), ("ELTEX-TAU8", "dvoCfbPrefix"), ("ELTEX-TAU8", "dvoCfnaPrefix"), ("ELTEX-TAU8", "dvoCallPickupPrefix"), ("ELTEX-TAU8", "dvoHotNumberPrefix"), ("ELTEX-TAU8", "dvoCallwaitingPrefix"), ("ELTEX-TAU8", "dvoDndPrefix"), ("ELTEX-TAU8", "snmpRoCommunity"), ("ELTEX-TAU8", "snmpRwCommunity"), ("ELTEX-TAU8", "snmpTrapsink"), ("ELTEX-TAU8", "snmpTrap2sink"), ("ELTEX-TAU8", "snmpInformsink"), ("ELTEX-TAU8", "snmpSysname"), ("ELTEX-TAU8", "snmpSyscontact"), ("ELTEX-TAU8", "snmpSyslocation"), ("ELTEX-TAU8", "snmpTrapCommunity"), ("ELTEX-TAU8", "traceOutput"), ("ELTEX-TAU8", "syslogdIpaddr"), ("ELTEX-TAU8", "syslogdPort"), ("ELTEX-TAU8", "logLocalFile"), ("ELTEX-TAU8", "logLocalSize"), ("ELTEX-TAU8", "logVoipPbxEnable"), ("ELTEX-TAU8", "logVoipError"), ("ELTEX-TAU8", "logVoipWarning"), ("ELTEX-TAU8", "logVoipDebug"), ("ELTEX-TAU8", "logVoipInfo"), ("ELTEX-TAU8", "logVoipSipLevel"), ("ELTEX-TAU8", "logIgmpEnable"), ("ELTEX-TAU8", "actionReboot"), ("ELTEX-TAU8", "actionSave"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tau8Group = tau8Group.setStatus('current')
if mibBuilder.loadTexts: tau8Group.setDescription(' ')
mibBuilder.exportSymbols("ELTEX-TAU8", GroupType=GroupType, sipProRegistraripRsrv1=sipProRegistraripRsrv1, sipProFaxTransfer3=sipProFaxTransfer3, sipProRtcpTimer=sipProRtcpTimer, traceConfig=traceConfig, ConferenceMode=ConferenceMode, sipProRegistrationRsrv2=sipProRegistrationRsrv2, actionCommands=actionCommands, sipProRegistration=sipProRegistration, dvoCallPickupPrefix=dvoCallPickupPrefix, sipProEarlyMedia=sipProEarlyMedia, logVoipPbxEnable=logVoipPbxEnable, sipProDomainToReg=sipProDomainToReg, FlashtransferType=FlashtransferType, fxsPortProfile=fxsPortProfile, fxsPortSipPort=fxsPortSipPort, sipProfileEntry=sipProfileEntry, sipProOutbound=sipProOutbound, fxsPortGainT=fxsPortGainT, snmpRoCommunity=snmpRoCommunity, fxsPortGainR=fxsPortGainR, snmpTrapsink=snmpTrapsink, sipProModem=sipProModem, snmpTrapCommunity=snmpTrapCommunity, sipProFaxTransfer2=sipProFaxTransfer2, snmpConfig=snmpConfig, sipProRegistrationRsrv3=sipProRegistrationRsrv3, fxsPortDndEnable=fxsPortDndEnable, sipProReduceSdpMediaCount=sipProReduceSdpMediaCount, traceOutput=traceOutput, TraceOutputType=TraceOutputType, RsrvModeType=RsrvModeType, fxsPortHotLine=fxsPortHotLine, sipProDomain=sipProDomain, huntGrCallQueueSize=huntGrCallQueueSize, logVoipError=logVoipError, sipProExpires=sipProExpires, sipProProxyipRsrv4=sipProProxyipRsrv4, fxsPortCfbNumber=fxsPortCfbNumber, huntGrEnable=huntGrEnable, CallTransferType=CallTransferType, logIgmpEnable=logIgmpEnable, fxsProfilesMIBBoundary=fxsProfilesMIBBoundary, sipProConferenceServer=sipProConferenceServer, tau8Group=tau8Group, fxsPortInterdigit=fxsPortInterdigit, fxsPortRowStatus=fxsPortRowStatus, sipProFlashMime=sipProFlashMime, sipProXcapConferenceName=sipProXcapConferenceName, suppServices=suppServices, fxsProfileCallerId=fxsProfileCallerId, RsrvCheckMethodType=RsrvCheckMethodType, EarlyMediaType=EarlyMediaType, fxsPortCallTransfer=fxsPortCallTransfer, sipProRegistraripRsrv4=sipProRegistraripRsrv4, huntGrSipProfileId=huntGrSipProfileId, networkConfig=networkConfig, dvoHotNumberPrefix=dvoHotNumberPrefix, fxsPortMinOnhookTime=fxsPortMinOnhookTime, snmpSysname=snmpSysname, FaxtransferType=FaxtransferType, fxsProfileRowStatus=fxsProfileRowStatus, sipProRowStatus=sipProRowStatus, systemConfig=systemConfig, fxsProfileRbTimeout=fxsProfileRbTimeout, fxsPortPhone=fxsPortPhone, fxsPortSipProfileId=fxsPortSipProfileId, fxsPortDirectnumber=fxsPortDirectnumber, fxsPortMinPulse=fxsPortMinPulse, fxsPortHotNumber=fxsPortHotNumber, CallerIdType=CallerIdType, huntGrUserName=huntGrUserName, sipProRegistraripRsrv3=sipProRegistraripRsrv3, sipProXcapHotlineName=sipProXcapHotlineName, fxsProfileTable=fxsProfileTable, actionReboot=actionReboot, ModemType=ModemType, fxsProfilePolarityReverse=fxsProfilePolarityReverse, sipProDialplanRegexp=sipProDialplanRegexp, sipProImsEnable=sipProImsEnable, sipProProxyipRsrv2=sipProProxyipRsrv2, fxsPortCtNoanswer=fxsPortCtNoanswer, fxsProfileGainR=fxsProfileGainR, sipProRegistrationRsrv4=sipProRegistrationRsrv4, sipProProxyipRsrv3=sipProProxyipRsrv3, huntGrPorts=huntGrPorts, huntGrWaitingTime=huntGrWaitingTime, sipProProxyipRsrv1=sipProProxyipRsrv1, fxsPortMinFlash=fxsPortMinFlash, sipCommonStunInterval=sipCommonStunInterval, dvoCfnaPrefix=dvoCfnaPrefix, snmpTrap2sink=snmpTrap2sink, sipCommonStunServer=sipCommonStunServer, fxsProfileName=fxsProfileName, fxsPortBusyTimeout=fxsPortBusyTimeout, snmpSyscontact=snmpSyscontact, huntGroupName=huntGroupName, sipProfileIndex=sipProfileIndex, actionSave=actionSave, sipProRingback=sipProRingback, logVoipSipLevel=logVoipSipLevel, logVoipDebug=logVoipDebug, sipProRtcp=sipProRtcp, fxsProfiles=fxsProfiles, sipProRegistraripRsrv2=sipProRegistraripRsrv2, huntGroupEntry=huntGroupEntry, fxsPortUseAltNumber=fxsPortUseAltNumber, sipProRsrvKeepaliveTime=sipProRsrvKeepaliveTime, sipProKeepAliveMode=sipProKeepAliveMode, OutboundType=OutboundType, huntGrRegistration=huntGrRegistration, fxsPortCtBusy=fxsPortCtBusy, fxsProfileInterdigit=fxsProfileInterdigit, sipProDtmfTransfer=sipProDtmfTransfer, logVoipInfo=logVoipInfo, fxsPortHotTimeout=fxsPortHotTimeout, fxsProfileEntry=fxsProfileEntry, fxsPorts=fxsPorts, fxsPortCtTimeout=fxsPortCtTimeout, logVoipWarning=logVoipWarning, fxsPortRbTimeout=fxsPortRbTimeout, huntGrPickupEnable=huntGrPickupEnable, snmpInformsink=snmpInformsink, fxsPortTable=fxsPortTable, fxsPortHangupTimeout=fxsPortHangupTimeout, sipProRri=sipProRri, sipCommonNotUseNAPTR=sipCommonNotUseNAPTR, huntGrIndex=huntGrIndex, sipProRtcpCount=sipProRtcpCount, fxsPortCfnaNumber=fxsPortCfnaNumber, fxsPortEntry=fxsPortEntry, sipProRsrvCheckMethod=sipProRsrvCheckMethod, fxsProfileMinOnhookTime=fxsProfileMinOnhookTime, huntGrRowStatus=huntGrRowStatus, sipCommon=sipCommon, sipCommonPublicIp=sipCommonPublicIp, PYSNMP_MODULE_ID=tau8, huntGroupsMIBBoundary=huntGroupsMIBBoundary, sipProSilenceDetector=sipProSilenceDetector, fxsPortCfuNumber=fxsPortCfuNumber, fxsProfileIndex=fxsProfileIndex, sipProDisplayToReg=sipProDisplayToReg, sipProEnableInT38=sipProEnableInT38, huntGrPhone=huntGrPhone, fxsPortCpcRus=fxsPortCpcRus, fxsPortCallerId=fxsPortCallerId, groupsConfig=groupsConfig, fxsPortsMIBBoundary=fxsPortsMIBBoundary, fxsPortAltNumber=fxsPortAltNumber, sipCommonNotUseSRV=sipCommonNotUseSRV, tau8=tau8, fxsPortStopDial=fxsPortStopDial, sipProEnablesip=sipProEnablesip, sipProXcapCallholdName=sipProXcapCallholdName, huntGrSipPort=huntGrSipPort, sipProFaxDirection=sipProFaxDirection, fxsPortPolarityReverse=fxsPortPolarityReverse, sipProKeepAliveInterval=sipProKeepAliveInterval, Option100relType=Option100relType, syslogdPort=syslogdPort, sipProFlashTransfer=sipProFlashTransfer, huntGroupTable=huntGroupTable, sipProConferenceMode=sipProConferenceMode, FaxDirectionType=FaxDirectionType, KeepAliveModeType=KeepAliveModeType, sipProRsrvMode=sipProRsrvMode, huntGrType=huntGrType, sipProXcapCwName=sipProXcapCwName, fxsProfileBusyTimeout=fxsProfileBusyTimeout, syslogdIpaddr=syslogdIpaddr, fxsPortAuthName=fxsPortAuthName, sipProCodecOrder=sipProCodecOrder, sipProfileName=sipProfileName, logLocalSize=logLocalSize, DtmfTransferType=DtmfTransferType, logLocalFile=logLocalFile, fxsPortsUseFxsProfile=fxsPortsUseFxsProfile, sipProProxyip=sipProProxyip, fxsProfileHangupTimeout=fxsProfileHangupTimeout, huntGrPassword=huntGrPassword, sipProfilesMIBBoundary=sipProfilesMIBBoundary, snmpSyslocation=snmpSyslocation, fxsPortUsername=fxsPortUsername, fxsPortCtUnconditional=fxsPortCtUnconditional, dvoCfbPrefix=dvoCfbPrefix, fxsProfileMinFlash=fxsProfileMinFlash, sipProPayload=sipProPayload, sipCommonStunEnable=sipCommonStunEnable, fxsPortCallWaiting=fxsPortCallWaiting, sipProOption100rel=sipProOption100rel, dvoCfuPrefix=dvoCfuPrefix, sipProRegistrationRsrv1=sipProRegistrationRsrv1, fxsProfileGainT=fxsProfileGainT, fxsPortEnabled=fxsPortEnabled, fxsProfileMinPulse=fxsProfileMinPulse, sipConfig=sipConfig, sipProEchoCanceler=sipProEchoCanceler, pbxConfig=pbxConfig, fxsPortIndex=fxsPortIndex, fxsPortAuthPass=fxsPortAuthPass, dvoCallwaitingPrefix=dvoCallwaitingPrefix, snmpRwCommunity=snmpRwCommunity, dvoDndPrefix=dvoDndPrefix, sipProRegistrarip=sipProRegistrarip, sipProFaxTransfer1=sipProFaxTransfer1, sipProG711pte=sipProG711pte, FlashMimeType=FlashMimeType, sipProfileTable=sipProfileTable)
| 101.457526 | 7,230 | 0.761156 |
f222b124bc8b9c7b5efc886550cf878f89e07a9e | 5,378 | py | Python | opencamlib/scripts/cutter_shapes.py | JohnyEngine/CNC | e4c77250ab2b749d3014022cbb5eb9924e939993 | [
"Apache-2.0"
] | null | null | null | opencamlib/scripts/cutter_shapes.py | JohnyEngine/CNC | e4c77250ab2b749d3014022cbb5eb9924e939993 | [
"Apache-2.0"
] | null | null | null | opencamlib/scripts/cutter_shapes.py | JohnyEngine/CNC | e4c77250ab2b749d3014022cbb5eb9924e939993 | [
"Apache-2.0"
] | null | null | null | import ocl
import camvtk
import time
import vtk
import datetime
import math
def drawLoops(myscreen, loops, loopcolor):
nloop = 0
for lop in loops:
n = 0
N = len(lop)
first_point=ocl.Point(-1,-1,5)
previous=ocl.Point(-1,-1,5)
for p in lop:
if n==0: # don't draw anything on the first iteration
previous=p
first_point = p
elif n== (N-1): # the last point
myscreen.addActor( camvtk.Line(p1=(previous.x,previous.y,previous.z),p2=(p.x,p.y,p.z),color=loopcolor) ) # the normal line
# and a line from p to the first point
myscreen.addActor( camvtk.Line(p1=(p.x,p.y,p.z),p2=(first_point.x,first_point.y,first_point.z),color=loopcolor) )
else:
myscreen.addActor( camvtk.Line(p1=(previous.x,previous.y,previous.z),p2=(p.x,p.y,p.z),color=loopcolor) )
previous=p
n=n+1
print "rendered loop ",nloop, " with ", len(lop), " points"
nloop = nloop+1
def getWaterline(s, cutter, zh, sampling):
wl = ocl.Waterline()
#wl.setThreads(1) # single thread for easier debug
wl.setSTL(s)
wl.setCutter(cutter)
wl.setZ(zh)
wl.setSampling(sampling)
wl.run()
loops = wl.getLoops()
return loops
def getPathsY(s,cutter,sampling,y):
#apdc = ocl.PathDropCutter()
apdc = ocl.AdaptivePathDropCutter()
apdc.setSTL(s)
apdc.setCutter(cutter)
apdc.setZ( -20 )
apdc.setSampling(sampling)
apdc.setMinSampling(sampling/700)
path = ocl.Path()
p1 = ocl.Point(-1.52*cutter.getDiameter() , y,-111) # start-point of line
p2 = ocl.Point(+1.52*cutter.getDiameter(), y,-111) # end-point of line
l = ocl.Line(p1,p2) # line-object
path.append( l )
apdc.setPath( path )
apdc.run()
return apdc.getCLPoints()
def getPathsX(s,cutter,sampling,x):
#apdc = ocl.PathDropCutter()
apdc = ocl.AdaptivePathDropCutter()
apdc.setSTL(s)
apdc.setCutter(cutter)
apdc.setZ( -20 )
apdc.setSampling(sampling)
apdc.setMinSampling(sampling/700)
path = ocl.Path()
p1 = ocl.Point(x, -1.52*cutter.getDiameter() , -111) # start-point of line
p2 = ocl.Point(x, +1.52*cutter.getDiameter(), -111) # end-point of line
l = ocl.Line(p1,p2) # line-object
path.append( l )
apdc.setPath( path )
apdc.run()
return apdc.getCLPoints()
if __name__ == "__main__":
print ocl.revision()
myscreen = camvtk.VTKScreen()
#stl = camvtk.STLSurf("../stl/demo.stl")
#stl = camvtk.STLSurf("../stl/30sphere.stl")
#myscreen.addActor(stl)
base=0.1
tip=10
a=ocl.Point(base,0,-tip)
myscreen.addActor(camvtk.Point(center=(a.x,a.y,a.z), color=(1,0,1)));
b=ocl.Point(-base,0,-tip)
myscreen.addActor(camvtk.Point(center=(b.x,b.y,b.z), color=(1,0,1)));
c=ocl.Point(0,0,0)
myscreen.addActor( camvtk.Point(center=(c.x,c.y,c.z), color=(1,0,1)));
#myscreen.addActor( camvtk.Line(p1=(1,0,0),p2=(0,0,0.3)) )
#myscreen.addActor( camvtk.Line(p1=(0,0,0.3),p2=(0,1,0)) )
#myscreen.addActor( camvtk.Line(p1=(1,0,0),p2=(0,1,0)) )
t = ocl.Triangle(a,b,c)
s = ocl.STLSurf()
s.addTriangle(t)
print "STL surface read,", s.size(), "triangles"
Nwaterlines = 40
zh=[-0.15*x for x in xrange(Nwaterlines)]
#zh=[15]
diam = 3.01
length = 50
loops = []
sampling = 0.1
#cutter = ocl.CylCutter( diam , length )
#cutter = ocl.BallCutter( diam , length )
#cutter = ocl.BullCutter( diam , diam/5, length )
#cutter = ocl.ConeCutter(diam, math.pi/3, length)
#cutter = ocl.CylConeCutter(diam/float(3),diam,math.pi/float(9))
#cutter = ocl.BallConeCutter(diam/float(2.3),diam,math.pi/float(5))
#cutter = ocl.BullConeCutter(diam/1.5, diam/10, diam, math.pi/10)
cutter = ocl.ConeConeCutter(diam/2,math.pi/3,diam,math.pi/6)
print cutter
#raw_input("Press Enter to terminate")
ptsy_all = []
ptsx_all = []
yvals=[]
Nmax=15
for i in range(Nmax):
yvals.append( diam* float(i)/float(Nmax) )
yvals.append( -diam* float(i)/float(Nmax) )
for y in yvals: #[diam*0.4, diam*0.2, 0, -diam*0.2,diam*(-0.4)]:
ptsy = getPathsY(s,cutter,sampling, y)
ptsx = getPathsX(s,cutter,sampling, y)
ptsy_all.append(ptsy)
ptsx_all.append(ptsx)
#print " got ",len(pts)," cl-points"
#for p in pts:
# print p.x," ",p.y," ",p.z
#exit()
loops = []
for z in zh:
z_loops = getWaterline(s, cutter, z, sampling)
for l in z_loops:
loops.append(l)
#for l in line:
#drawLoops(myscreen, line, camvtk.cyan)
#for l in cutter_loops:
# loops.append(l)
print "All waterlines done. Got", len(loops)," loops in total."
# draw the loops
drawLoops(myscreen, loops, camvtk.cyan)
drawLoops(myscreen, ptsy_all, camvtk.pink)
drawLoops(myscreen, ptsx_all, camvtk.lblue)
print "done."
myscreen.camera.SetPosition(15, 13, 7)
myscreen.camera.SetFocalPoint(5, 5, 0)
camvtk.drawArrows(myscreen,center=(0,0,3))
camvtk.drawOCLtext(myscreen)
myscreen.render()
myscreen.iren.Start()
#raw_input("Press Enter to terminate")
| 32.39759 | 138 | 0.594831 |
65260e9ceaea40f24409e0e48aa179d6d6915d49 | 4,446 | py | Python | tests/pgshovel/replication/streams/kafka.py | disqus/pgshovel | dc8039f9fe3331a99f10b7e6309151c08a292879 | [
"Apache-2.0"
] | 13 | 2015-08-21T09:10:23.000Z | 2021-08-15T20:26:00.000Z | tests/pgshovel/replication/streams/kafka.py | disqus/pgshovel | dc8039f9fe3331a99f10b7e6309151c08a292879 | [
"Apache-2.0"
] | 29 | 2015-08-14T03:09:55.000Z | 2015-09-15T19:29:14.000Z | tests/pgshovel/replication/streams/kafka.py | disqus/pgshovel | dc8039f9fe3331a99f10b7e6309151c08a292879 | [
"Apache-2.0"
] | 1 | 2015-08-26T05:55:33.000Z | 2015-08-26T05:55:33.000Z | from __future__ import absolute_import
import pytest
from itertools import islice
from kafka import (
KafkaClient,
SimpleProducer,
)
from tests.pgshovel.fixtures import (
cluster,
create_temporary_database,
)
from tests.pgshovel.streams.fixtures import (
DEFAULT_PUBLISHER,
begin,
transaction,
transactions,
)
from pgshovel.interfaces.common_pb2 import Snapshot
from pgshovel.interfaces.configurations_pb2 import ReplicationSetConfiguration
from pgshovel.interfaces.replication_pb2 import (
ConsumerState,
State,
BootstrapState,
TransactionState,
)
from pgshovel.interfaces.streams_pb2 import (
Header,
Message,
)
from pgshovel.replication.streams.kafka import KafkaStream
from pgshovel.replication.validation.consumers import SequencingError
from pgshovel.replication.validation.transactions import InvalidEventError
from pgshovel.relay.streams.kafka import KafkaWriter
from pgshovel.streams.utilities import UnableToPrimeError
@pytest.yield_fixture
def configuration():
yield {'hosts': 'kafka:9092'}
@pytest.yield_fixture
def stream(configuration, cluster, client):
stream = KafkaStream.configure(configuration, cluster, 'default')
client.ensure_topic_exists(stream.topic)
yield stream
@pytest.yield_fixture
def client(configuration):
yield KafkaClient(configuration['hosts'])
@pytest.yield_fixture
def writer(client, stream):
producer = SimpleProducer(client)
yield KafkaWriter(producer, stream.topic)
@pytest.yield_fixture
def state():
bootstrap_state = BootstrapState(
node='1234',
snapshot=Snapshot(min=1, max=2),
)
yield State(bootstrap_state=bootstrap_state)
@pytest.yield_fixture
def sliced_transaction():
two_transactions = list(islice(transactions(), 6))
head, remainder = two_transactions[0], two_transactions[1:]
assert head.batch_operation.begin_operation == begin
yield remainder
def test_starts_at_beginning_of_stream_for_bootstrapped_state(writer, stream, state):
writer.push(transaction)
consumed = list(islice(stream.consume(state), 3))
assert [message for _, _, message in consumed] == transaction
def test_yields_new_update_state_after_each_message(writer, stream, state):
expected_states = {
0: 'in_transaction',
1: 'in_transaction',
2: 'committed'
}
writer.push(transaction)
for state, offset, message in islice(stream.consume(state), 3):
assert state.stream_state.consumer_state.offset == offset
assert state.stream_state.consumer_state.header == message.header
assert state.stream_state.transaction_state.WhichOneof('state') == expected_states[offset]
def test_uses_existing_stream_state_if_it_exists(writer, stream, state):
writer.push(islice(transactions(), 6))
iterator = stream.consume(state)
next(iterator)
next(iterator)
(new_state, offset, message) = next(iterator)
new_iterator = stream.consume(new_state)
(_, new_offset, _) = next(new_iterator)
assert new_offset == 3
def test_crashes_on_no_state(stream):
with pytest.raises(AttributeError):
next(stream.consume(None))
def test_validates_stream_and_crashes_when_invalid(writer, stream, state):
messages = list(islice(transactions(), 3))
messages[1] = messages[0]
writer.push(messages)
with pytest.raises(SequencingError):
list(stream.consume(state))
def test_discards_messages_until_start_of_transaction(writer, stream, state, sliced_transaction):
writer.push(sliced_transaction)
consumed = list(islice(stream.consume(state), 3))
assert [message for _, _, message in consumed] == sliced_transaction[-3:]
def test_discarded_messages_is_configurable(configuration, cluster, client, state, writer, sliced_transaction):
writer.push(sliced_transaction)
configuration['prime_threshold'] = 1
antsy_stream = KafkaStream.configure(configuration, cluster, 'default')
less_antsy_config = configuration.copy()
less_antsy_config['prime_threshold'] = 3
less_antsy_stream = KafkaStream.configure(less_antsy_config, cluster, 'default')
client.ensure_topic_exists(antsy_stream.topic)
with pytest.raises(UnableToPrimeError):
list(islice(antsy_stream.consume(state), 3))
consumed = list(islice(less_antsy_stream.consume(state), 3))
assert [message for _, _, message in consumed] == sliced_transaction[-3:]
| 28.683871 | 111 | 0.752587 |
f58ddb1ddf62a05f0e24140373df29ae83ad3606 | 712 | py | Python | setup.py | xinabox/Python-SH01 | 7805cc375d20ebbe05d15e032b9bab61bb94f9b8 | [
"MIT"
] | null | null | null | setup.py | xinabox/Python-SH01 | 7805cc375d20ebbe05d15e032b9bab61bb94f9b8 | [
"MIT"
] | null | null | null | setup.py | xinabox/Python-SH01 | 7805cc375d20ebbe05d15e032b9bab61bb94f9b8 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="xinabox-SH01",
version="0.0.4",
author="Luqmaan Baboo",
author_email="luqmaanbaboo@gmail.com",
description="Capacitive touch sensor",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/xinabox/Python-SH01",
install_requires=["xinabox-CORE",],
py_modules=["xSH01",],
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
| 29.666667 | 50 | 0.661517 |
b3dfbff3d725337976a3c708ae16db1c5e2f0e20 | 31,340 | py | Python | tools/Parser_and_Converters/python/pynmrml/nmrML.py | proccaserra/nmrML | ddddca05880c93e9f7bcdbbe5f1b2a3d0e2f8949 | [
"MIT"
] | 20 | 2015-02-04T23:23:11.000Z | 2022-03-06T12:58:01.000Z | tools/Parser_and_Converters/python/pynmrml/nmrML.py | proccaserra/nmrML | ddddca05880c93e9f7bcdbbe5f1b2a3d0e2f8949 | [
"MIT"
] | 85 | 2015-03-04T19:31:03.000Z | 2022-02-03T10:03:08.000Z | tools/Parser_and_Converters/python/pynmrml/nmrML.py | proccaserra/nmrML | ddddca05880c93e9f7bcdbbe5f1b2a3d0e2f8949 | [
"MIT"
] | 11 | 2015-01-19T06:19:16.000Z | 2022-02-03T10:04:11.000Z | #!/usr/bin/env python
#
# Generated Wed Oct 30 15:59:51 2013 by generateDS.py version 2.11a.
#
import sys
import nmrML_lib as supermod
etree_ = None
Verbose_import_ = False
(
XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError(
"Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# Globals
#
ExternalEncoding = 'utf-8'
#
# Data representation classes
#
class nmrMLType(supermod.nmrMLType):
def __init__(self, version=None, accession_url=None, accession=None, id=None, cvList=None, fileDescription=None, contactList=None, referenceableParamGroupList=None, sourceFileList=None, softwareList=None, instrumentConfigurationList=None, dataProcessingList=None, sampleList=None, acquisition=None, spectrumList=None):
super(nmrMLType, self).__init__(version, accession_url, accession, id, cvList, fileDescription, contactList, referenceableParamGroupList, sourceFileList, softwareList, instrumentConfigurationList, dataProcessingList, sampleList, acquisition, spectrumList, )
supermod.nmrMLType.subclass = nmrMLType
# end class nmrMLType
class CVListType(supermod.CVListType):
def __init__(self, count=None, cv=None):
super(CVListType, self).__init__(count, cv, )
supermod.CVListType.subclass = CVListType
# end class CVListType
class CVType(supermod.CVType):
def __init__(self, fullName=None, version=None, id=None, URI=None):
super(CVType, self).__init__(fullName, version, id, URI, )
supermod.CVType.subclass = CVType
# end class CVType
class ContactListType(supermod.ContactListType):
def __init__(self, contact=None):
super(ContactListType, self).__init__(contact, )
supermod.ContactListType.subclass = ContactListType
# end class ContactListType
class ContactRefType(supermod.ContactRefType):
def __init__(self, ref=None):
super(ContactRefType, self).__init__(ref, )
supermod.ContactRefType.subclass = ContactRefType
# end class ContactRefType
class ContactRefListType(supermod.ContactRefListType):
def __init__(self, count=None, contactRef=None):
super(ContactRefListType, self).__init__(count, contactRef, )
supermod.ContactRefListType.subclass = ContactRefListType
# end class ContactRefListType
class FileDescriptionType(supermod.FileDescriptionType):
def __init__(self, fileContent=None):
super(FileDescriptionType, self).__init__(fileContent, )
supermod.FileDescriptionType.subclass = FileDescriptionType
# end class FileDescriptionType
class CVTermType(supermod.CVTermType):
def __init__(self, cvRef=None, accession=None, name=None, extensiontype_=None):
super(CVTermType, self).__init__(cvRef, accession, name, extensiontype_, )
supermod.CVTermType.subclass = CVTermType
# end class CVTermType
class CVParamType(supermod.CVParamType):
def __init__(self, cvRef=None, accession=None, value=None, name=None):
super(CVParamType, self).__init__(cvRef, accession, value, name, )
supermod.CVParamType.subclass = CVParamType
# end class CVParamType
class CVParamWithUnitType(supermod.CVParamWithUnitType):
def __init__(self, name=None, unitName=None, accession=None, value=None, unitAccession=None, cvRef=None, unitCvRef=None):
super(CVParamWithUnitType, self).__init__(name, unitName, accession, value, unitAccession, cvRef, unitCvRef, )
supermod.CVParamWithUnitType.subclass = CVParamWithUnitType
# end class CVParamWithUnitType
class ValueWithUnitType(supermod.ValueWithUnitType):
def __init__(self, unitName=None, unitCvRef=None, value=None, unitAccession=None):
super(ValueWithUnitType, self).__init__(unitName, unitCvRef, value, unitAccession, )
supermod.ValueWithUnitType.subclass = ValueWithUnitType
# end class ValueWithUnitType
class UserParamType(supermod.UserParamType):
def __init__(self, name=None, unitName=None, valueType=None, value=None, unitAccession=None, unitCvRef=None):
super(UserParamType, self).__init__(name, unitName, valueType, value, unitAccession, unitCvRef, )
supermod.UserParamType.subclass = UserParamType
# end class UserParamType
class ParamGroupType(supermod.ParamGroupType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, extensiontype_=None):
super(ParamGroupType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, extensiontype_, )
supermod.ParamGroupType.subclass = ParamGroupType
# end class ParamGroupType
class ReferenceableParamGroupType(supermod.ReferenceableParamGroupType):
def __init__(self, id=None, cvParam=None, userParam=None):
super(ReferenceableParamGroupType, self).__init__(id, cvParam, userParam, )
supermod.ReferenceableParamGroupType.subclass = ReferenceableParamGroupType
# end class ReferenceableParamGroupType
class ReferenceableParamGroupRefType(supermod.ReferenceableParamGroupRefType):
def __init__(self, ref=None):
super(ReferenceableParamGroupRefType, self).__init__(ref, )
supermod.ReferenceableParamGroupRefType.subclass = ReferenceableParamGroupRefType
# end class ReferenceableParamGroupRefType
class ReferenceableParamGroupListType(supermod.ReferenceableParamGroupListType):
def __init__(self, count=None, referenceableParamGroup=None):
super(ReferenceableParamGroupListType, self).__init__(count, referenceableParamGroup, )
supermod.ReferenceableParamGroupListType.subclass = ReferenceableParamGroupListType
# end class ReferenceableParamGroupListType
class SourceFileListType(supermod.SourceFileListType):
def __init__(self, count=None, sourceFile=None):
super(SourceFileListType, self).__init__(count, sourceFile, )
supermod.SourceFileListType.subclass = SourceFileListType
# end class SourceFileListType
class SampleListType(supermod.SampleListType):
def __init__(self, count=None, sample=None):
super(SampleListType, self).__init__(count, sample, )
supermod.SampleListType.subclass = SampleListType
# end class SampleListType
class SampleType(supermod.SampleType):
def __init__(self, originalBiologicalSampleReference=None, originalBiologicalSamplepH=None, postBufferpH=None, buffer=None, fieldFrequencyLock=None, chemicalShiftStandard=None, solventType=None, additionalSoluteList=None, solventConcentration=None, concentrationStandard=None):
super(SampleType, self).__init__(originalBiologicalSampleReference, originalBiologicalSamplepH, postBufferpH, buffer, fieldFrequencyLock, chemicalShiftStandard, solventType, additionalSoluteList, solventConcentration, concentrationStandard, )
supermod.SampleType.subclass = SampleType
# end class SampleType
class SoftwareListType(supermod.SoftwareListType):
def __init__(self, count=None, software=None):
super(SoftwareListType, self).__init__(count, software, )
supermod.SoftwareListType.subclass = SoftwareListType
# end class SoftwareListType
class SoftwareType(supermod.SoftwareType):
def __init__(self, cvRef=None, accession=None, name=None, version=None, id=None):
super(SoftwareType, self).__init__(cvRef, accession, name, version, id, )
supermod.SoftwareType.subclass = SoftwareType
# end class SoftwareType
class SoftwareRefType(supermod.SoftwareRefType):
def __init__(self, ref=None):
super(SoftwareRefType, self).__init__(ref, )
supermod.SoftwareRefType.subclass = SoftwareRefType
# end class SoftwareRefType
class SoftwareRefListType(supermod.SoftwareRefListType):
def __init__(self, count=None, softwareRef=None):
super(SoftwareRefListType, self).__init__(count, softwareRef, )
supermod.SoftwareRefListType.subclass = SoftwareRefListType
# end class SoftwareRefListType
class SourceFileType(supermod.SourceFileType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, sha1=None, location=None, id=None, name=None):
super(SourceFileType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, sha1, location, id, name, )
supermod.SourceFileType.subclass = SourceFileType
# end class SourceFileType
class SourceFileRefType(supermod.SourceFileRefType):
def __init__(self, ref=None):
super(SourceFileRefType, self).__init__(ref, )
supermod.SourceFileRefType.subclass = SourceFileRefType
# end class SourceFileRefType
class SourceFileRefListType(supermod.SourceFileRefListType):
def __init__(self, count=None, sourceFileRef=None):
super(SourceFileRefListType, self).__init__(count, sourceFileRef, )
supermod.SourceFileRefListType.subclass = SourceFileRefListType
# end class SourceFileRefListType
class InstrumentConfigurationType(supermod.InstrumentConfigurationType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, id=None, softwareRef=None):
super(InstrumentConfigurationType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, id, softwareRef, )
supermod.InstrumentConfigurationType.subclass = InstrumentConfigurationType
# end class InstrumentConfigurationType
class InstrumentConfigurationListType(supermod.InstrumentConfigurationListType):
def __init__(self, count=None, instrumentConfiguration=None):
super(InstrumentConfigurationListType, self).__init__(count, instrumentConfiguration, )
supermod.InstrumentConfigurationListType.subclass = InstrumentConfigurationListType
# end class InstrumentConfigurationListType
class DataProcessingType(supermod.DataProcessingType):
def __init__(self, id=None, processingMethod=None):
super(DataProcessingType, self).__init__(id, processingMethod, )
supermod.DataProcessingType.subclass = DataProcessingType
# end class DataProcessingType
class DataProcessingListType(supermod.DataProcessingListType):
def __init__(self, count=None, dataProcessing=None):
super(DataProcessingListType, self).__init__(count, dataProcessing, )
supermod.DataProcessingListType.subclass = DataProcessingListType
# end class DataProcessingListType
class ProcessingMethodType(supermod.ProcessingMethodType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, order=None, softwareRef=None):
super(ProcessingMethodType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, order, softwareRef, )
supermod.ProcessingMethodType.subclass = ProcessingMethodType
# end class ProcessingMethodType
class BinaryDataArrayType(supermod.BinaryDataArrayType):
def __init__(self, byteFormat=None, encodedLength=None, compressed=None, dataProcessingRef=None, valueOf_=None):
super(BinaryDataArrayType, self).__init__(byteFormat, encodedLength, compressed, dataProcessingRef, valueOf_, )
supermod.BinaryDataArrayType.subclass = BinaryDataArrayType
# end class BinaryDataArrayType
class SoluteType(supermod.SoluteType):
def __init__(self, name=None, concentrationInSample=None):
super(SoluteType, self).__init__(name, concentrationInSample, )
supermod.SoluteType.subclass = SoluteType
# end class SoluteType
class TemperatureType(supermod.TemperatureType):
def __init__(self, temperatureUnitID=None, temperature=None, temperatureUnitName=None):
super(TemperatureType, self).__init__(temperatureUnitID, temperature, temperatureUnitName, )
supermod.TemperatureType.subclass = TemperatureType
# end class TemperatureType
class AdditionalSoluteListType(supermod.AdditionalSoluteListType):
def __init__(self, solute=None):
super(AdditionalSoluteListType, self).__init__(solute, )
supermod.AdditionalSoluteListType.subclass = AdditionalSoluteListType
# end class AdditionalSoluteListType
class AcquisitionDimensionParameterSetType(supermod.AcquisitionDimensionParameterSetType):
def __init__(self, numberOfDataPoints=None, decoupled=None, acquisitionNucleus=None, gammaB1PulseFieldStrength=None, sweepWidth=None, irradiationFrequency=None, decouplingMethod=None, samplingStrategy=None, samplingTimePoints=None):
super(AcquisitionDimensionParameterSetType, self).__init__(numberOfDataPoints, decoupled, acquisitionNucleus, gammaB1PulseFieldStrength, sweepWidth, irradiationFrequency, decouplingMethod, samplingStrategy, samplingTimePoints, )
supermod.AcquisitionDimensionParameterSetType.subclass = AcquisitionDimensionParameterSetType
# end class AcquisitionDimensionParameterSetType
class AcquisitionIndirectDimensionParameterSetType(supermod.AcquisitionIndirectDimensionParameterSetType):
def __init__(self, numberOfDataPoints=None, acquisitionParamsFileRef=None, decoupled=None, acquisitionNucleus=None, gammaB1PulseFieldStrength=None, sweepWidth=None, timeDomain=None, encodingMethod=None, irradiationFrequency=None):
super(AcquisitionIndirectDimensionParameterSetType, self).__init__(numberOfDataPoints, acquisitionParamsFileRef, decoupled, acquisitionNucleus, gammaB1PulseFieldStrength, sweepWidth, timeDomain, encodingMethod, irradiationFrequency, )
supermod.AcquisitionIndirectDimensionParameterSetType.subclass = AcquisitionIndirectDimensionParameterSetType
# end class AcquisitionIndirectDimensionParameterSetType
class AcquisitionParameterSetType(supermod.AcquisitionParameterSetType):
def __init__(self, numberOfScans=None, numberOfSteadyStateScans=None, contactRefList=None, acquisitionParameterFileRefList=None, softwareRef=None, sampleContainer=None, sampleAcquisitionTemperature=None, solventSuppressionMethod=None, spinningRate=None, relaxationDelay=None, pulseSequence=None, shapedPulseFile=None, extensiontype_=None):
super(AcquisitionParameterSetType, self).__init__(numberOfScans, numberOfSteadyStateScans, contactRefList, acquisitionParameterFileRefList, softwareRef, sampleContainer, sampleAcquisitionTemperature, solventSuppressionMethod, spinningRate, relaxationDelay, pulseSequence, shapedPulseFile, extensiontype_, )
supermod.AcquisitionParameterSetType.subclass = AcquisitionParameterSetType
# end class AcquisitionParameterSetType
class AcquisitionParameterSet1DType(supermod.AcquisitionParameterSet1DType):
def __init__(self, numberOfScans=None, numberOfSteadyStateScans=None, contactRefList=None, acquisitionParameterFileRefList=None, softwareRef=None, sampleContainer=None, sampleAcquisitionTemperature=None, solventSuppressionMethod=None, spinningRate=None, relaxationDelay=None, pulseSequence=None, shapedPulseFile=None, DirectDimensionParameterSet=None):
super(AcquisitionParameterSet1DType, self).__init__(numberOfScans, numberOfSteadyStateScans, contactRefList, acquisitionParameterFileRefList, softwareRef, sampleContainer, sampleAcquisitionTemperature, solventSuppressionMethod, spinningRate, relaxationDelay, pulseSequence, shapedPulseFile, DirectDimensionParameterSet, )
supermod.AcquisitionParameterSet1DType.subclass = AcquisitionParameterSet1DType
# end class AcquisitionParameterSet1DType
class AcquisitionParameterSetMultiDType(supermod.AcquisitionParameterSetMultiDType):
def __init__(self, numberOfScans=None, numberOfSteadyStateScans=None, contactRefList=None, acquisitionParameterFileRefList=None, softwareRef=None, sampleContainer=None, sampleAcquisitionTemperature=None, solventSuppressionMethod=None, spinningRate=None, relaxationDelay=None, pulseSequence=None, shapedPulseFile=None, hadamardParameterSet=None, directDimensionParameterSet=None, encodingScheme=None, indirectDimensionParameterSet=None):
super(AcquisitionParameterSetMultiDType, self).__init__(numberOfScans, numberOfSteadyStateScans, contactRefList, acquisitionParameterFileRefList, softwareRef, sampleContainer, sampleAcquisitionTemperature, solventSuppressionMethod, spinningRate, relaxationDelay, pulseSequence, shapedPulseFile, hadamardParameterSet, directDimensionParameterSet, encodingScheme, indirectDimensionParameterSet, )
supermod.AcquisitionParameterSetMultiDType.subclass = AcquisitionParameterSetMultiDType
# end class AcquisitionParameterSetMultiDType
class PulseSequenceType(supermod.PulseSequenceType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, pulseSequenceFileRefList=None):
super(PulseSequenceType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, pulseSequenceFileRefList, )
supermod.PulseSequenceType.subclass = PulseSequenceType
# end class PulseSequenceType
class AcquisitionType(supermod.AcquisitionType):
def __init__(self, acquisition1D=None, acquisitionMultiD=None):
super(AcquisitionType, self).__init__(acquisition1D, acquisitionMultiD, )
supermod.AcquisitionType.subclass = AcquisitionType
# end class AcquisitionType
class Acquisition1DType(supermod.Acquisition1DType):
def __init__(self, acquisitionParameterSet=None, fidData=None):
super(Acquisition1DType, self).__init__(acquisitionParameterSet, fidData, )
supermod.Acquisition1DType.subclass = Acquisition1DType
# end class Acquisition1DType
class AcquisitionMultiDType(supermod.AcquisitionMultiDType):
def __init__(self, acquisitionParameterSet=None, fidData=None):
super(AcquisitionMultiDType, self).__init__(acquisitionParameterSet, fidData, )
supermod.AcquisitionMultiDType.subclass = AcquisitionMultiDType
# end class AcquisitionMultiDType
class SpectrumListType(supermod.SpectrumListType):
def __init__(self, count=None, defaultDataProcessingRef=None, spectrum1D=None, spectrumMultiD=None):
super(SpectrumListType, self).__init__(count, defaultDataProcessingRef, spectrum1D, spectrumMultiD, )
supermod.SpectrumListType.subclass = SpectrumListType
# end class SpectrumListType
class SpectrumType(supermod.SpectrumType):
def __init__(self, numberOfDataPoints=None, processingSoftwareRefList=None, processingContactRefList=None, spectrumDataArray=None, xAxis=None, yAxisType=None, processingParameterSet=None, extensiontype_=None):
super(SpectrumType, self).__init__(numberOfDataPoints, processingSoftwareRefList, processingContactRefList, spectrumDataArray, xAxis, yAxisType, processingParameterSet, extensiontype_, )
supermod.SpectrumType.subclass = SpectrumType
# end class SpectrumType
class Spectrum1DType(supermod.Spectrum1DType):
def __init__(self, numberOfDataPoints=None, processingSoftwareRefList=None, processingContactRefList=None, spectrumDataArray=None, xAxis=None, yAxisType=None, processingParameterSet=None, firstDimensionProcessingParameterSet=None):
super(Spectrum1DType, self).__init__(numberOfDataPoints, processingSoftwareRefList, processingContactRefList, spectrumDataArray, xAxis, yAxisType, processingParameterSet, firstDimensionProcessingParameterSet, )
supermod.Spectrum1DType.subclass = Spectrum1DType
# end class Spectrum1DType
class SpectrumMultiDType(supermod.SpectrumMultiDType):
def __init__(self, numberOfDataPoints=None, processingSoftwareRefList=None, processingContactRefList=None, spectrumDataArray=None, xAxis=None, yAxisType=None, processingParameterSet=None, firstDimensionProcessingParameterSet=None, higherDimensionProcessingParameterSet=None, projected3DProcessingParamaterSet=None):
super(SpectrumMultiDType, self).__init__(numberOfDataPoints, processingSoftwareRefList, processingContactRefList, spectrumDataArray, xAxis, yAxisType, processingParameterSet, firstDimensionProcessingParameterSet, higherDimensionProcessingParameterSet, projected3DProcessingParamaterSet, )
supermod.SpectrumMultiDType.subclass = SpectrumMultiDType
# end class SpectrumMultiDType
class SpectralProcessingParameterSetType(supermod.SpectralProcessingParameterSetType):
def __init__(self, processingSoftwareRefList=None, postAcquisitionSolventSuppressionMethod=None, dataTransformationMethod=None, calibrationCompound=None, extensiontype_=None):
super(SpectralProcessingParameterSetType, self).__init__(processingSoftwareRefList, postAcquisitionSolventSuppressionMethod, dataTransformationMethod, calibrationCompound, extensiontype_, )
supermod.SpectralProcessingParameterSetType.subclass = SpectralProcessingParameterSetType
# end class SpectralProcessingParameterSetType
class SpectralProjectionParameterSetType(supermod.SpectralProjectionParameterSetType):
def __init__(self, projectionAxis=None, projectionMethod=None):
super(SpectralProjectionParameterSetType, self).__init__(projectionAxis, projectionMethod, )
supermod.SpectralProjectionParameterSetType.subclass = SpectralProjectionParameterSetType
# end class SpectralProjectionParameterSetType
class SpectralProcessingParameterSet2DType(supermod.SpectralProcessingParameterSet2DType):
def __init__(self, processingSoftwareRefList=None, postAcquisitionSolventSuppressionMethod=None, dataTransformationMethod=None, calibrationCompound=None, directDimensionParameterSet=None, higherDimensionParameterSet=None):
super(SpectralProcessingParameterSet2DType, self).__init__(processingSoftwareRefList, postAcquisitionSolventSuppressionMethod, dataTransformationMethod, calibrationCompound, directDimensionParameterSet, higherDimensionParameterSet, )
supermod.SpectralProcessingParameterSet2DType.subclass = SpectralProcessingParameterSet2DType
# end class SpectralProcessingParameterSet2DType
class FirstDimensionProcessingParameterSetType(supermod.FirstDimensionProcessingParameterSetType):
def __init__(self, noOfDataPoints=None, zeroOrderPhaseCorrection=None, firstOrderPhaseCorrection=None, calibrationReferenceShift=None, spectralDenoisingMethod=None, windowFunction=None, baselineCorrectionMethod=None, parameterFileRef=None, extensiontype_=None):
super(FirstDimensionProcessingParameterSetType, self).__init__(noOfDataPoints, zeroOrderPhaseCorrection, firstOrderPhaseCorrection, calibrationReferenceShift, spectralDenoisingMethod, windowFunction, baselineCorrectionMethod, parameterFileRef, extensiontype_, )
supermod.FirstDimensionProcessingParameterSetType.subclass = FirstDimensionProcessingParameterSetType
# end class FirstDimensionProcessingParameterSetType
class AxisWithUnitType(supermod.AxisWithUnitType):
def __init__(self, endValue=None, unitName=None, unitCvRef=None, startValue=None, unitAccession=None):
super(AxisWithUnitType, self).__init__(endValue, unitName, unitCvRef, startValue, unitAccession, )
supermod.AxisWithUnitType.subclass = AxisWithUnitType
# end class AxisWithUnitType
class HigherDimensionProcessingParameterSetType(supermod.HigherDimensionProcessingParameterSetType):
def __init__(self, noOfDataPoints=None, zeroOrderPhaseCorrection=None, firstOrderPhaseCorrection=None, calibrationReferenceShift=None, spectralDenoisingMethod=None, windowFunction=None, baselineCorrectionMethod=None, parameterFileRef=None):
super(HigherDimensionProcessingParameterSetType, self).__init__(noOfDataPoints, zeroOrderPhaseCorrection, firstOrderPhaseCorrection, calibrationReferenceShift, spectralDenoisingMethod, windowFunction, baselineCorrectionMethod, parameterFileRef, )
supermod.HigherDimensionProcessingParameterSetType.subclass = HigherDimensionProcessingParameterSetType
# end class HigherDimensionProcessingParameterSetType
class Projected3DProcessingParamaterSetType(supermod.Projected3DProcessingParamaterSetType):
def __init__(self, positiveProjectionMethod=None, projectionAngle=None):
super(Projected3DProcessingParamaterSetType, self).__init__(positiveProjectionMethod, projectionAngle, )
supermod.Projected3DProcessingParamaterSetType.subclass = Projected3DProcessingParamaterSetType
# end class Projected3DProcessingParamaterSetType
class fieldFrequencyLockType(supermod.fieldFrequencyLockType):
def __init__(self, fieldFrequencyLockName=None):
super(fieldFrequencyLockType, self).__init__(fieldFrequencyLockName, )
supermod.fieldFrequencyLockType.subclass = fieldFrequencyLockType
# end class fieldFrequencyLockType
class concentrationStandardType(supermod.concentrationStandardType):
def __init__(self, type_=None, concentrationInSample=None, name=None):
super(concentrationStandardType, self).__init__(type_, concentrationInSample, name, )
supermod.concentrationStandardType.subclass = concentrationStandardType
# end class concentrationStandardType
class hadamardParameterSetType(supermod.hadamardParameterSetType):
def __init__(self, hadamardFrequency=None):
super(hadamardParameterSetType, self).__init__(hadamardFrequency, )
supermod.hadamardParameterSetType.subclass = hadamardParameterSetType
# end class hadamardParameterSetType
class pulseSequenceFileRefListType(supermod.pulseSequenceFileRefListType):
def __init__(self, pulseSequenceFileRef=None):
super(pulseSequenceFileRefListType, self).__init__(pulseSequenceFileRef, )
supermod.pulseSequenceFileRefListType.subclass = pulseSequenceFileRefListType
# end class pulseSequenceFileRefListType
class processingParameterSetType(supermod.processingParameterSetType):
def __init__(self, postAcquisitionSolventSuppressionMethod=None, calibrationCompound=None, dataTransformationMethod=None):
super(processingParameterSetType, self).__init__(postAcquisitionSolventSuppressionMethod, calibrationCompound, dataTransformationMethod, )
supermod.processingParameterSetType.subclass = processingParameterSetType
# end class processingParameterSetType
class windowFunctionType(supermod.windowFunctionType):
def __init__(self, windowFunctionMethod=None, windowFunctionParameter=None):
super(windowFunctionType, self).__init__(windowFunctionMethod, windowFunctionParameter, )
supermod.windowFunctionType.subclass = windowFunctionType
# end class windowFunctionType
class ContactType(supermod.ContactType):
def __init__(self, referenceableParamGroupRef=None, cvParam=None, cvParamWithUnit=None, cvTerm=None, userParam=None, url=None, id=None, address=None, organization=None, fullname=None, email=None):
super(ContactType, self).__init__(referenceableParamGroupRef, cvParam, cvParamWithUnit, cvTerm, userParam, url, id, address, organization, fullname, email, )
supermod.ContactType.subclass = ContactType
# end class ContactType
def get_root_tag(node):
tag = supermod.Tag_pattern_.match(node.tag).groups()[-1]
rootClass = None
rootClass = supermod.GDSClassesMapping.get(tag)
if rootClass is None and hasattr(supermod, tag):
rootClass = getattr(supermod, tag)
return tag, rootClass
def parse(inFilename, silence=False):
doc = parsexml_(inFilename)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'nmrML'
rootClass = supermod.nmrML
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:dx="http://nmrml.org/schema"',
pretty_print=True)
return rootObj
def parseEtree(inFilename, silence=False):
doc = parsexml_(inFilename)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'nmrML'
rootClass = supermod.nmrML
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
if not silence:
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString, silence=False):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'nmrML'
rootClass = supermod.nmrML
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns:dx="http://nmrml.org/schema"')
return rootObj
def parseLiteral(inFilename, silence=False):
doc = parsexml_(inFilename)
rootNode = doc.getroot()
roots = get_root_tag(rootNode)
rootClass = roots[1]
if rootClass is None:
rootClass = supermod.nmrML
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
sys.stdout.write('#from nmrML_lib import *\n\n')
sys.stdout.write('import nmrML_lib as model_\n\n')
sys.stdout.write('rootObj = model_.nmrML(\n')
rootObj.exportLiteral(sys.stdout, 0, name_="nmrML")
sys.stdout.write(')\n')
return rootObj
USAGE_TEXT = """
Usage: python ???.py <infilename>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def main():
args = sys.argv[1:]
if len(args) != 1:
usage()
infilename = args[0]
parse(infilename)
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
| 50.224359 | 440 | 0.802074 |
7ecd589e27698d2148cf6f79674dfc710bf8077d | 17,481 | py | Python | bt/backtest.py | mirca/bt | 0363e6fa100d9392dd18e32e3d8379d5e83c28fa | [
"MIT"
] | 1 | 2021-05-07T19:40:16.000Z | 2021-05-07T19:40:16.000Z | bt/backtest.py | mirca/bt | 0363e6fa100d9392dd18e32e3d8379d5e83c28fa | [
"MIT"
] | null | null | null | bt/backtest.py | mirca/bt | 0363e6fa100d9392dd18e32e3d8379d5e83c28fa | [
"MIT"
] | 3 | 2021-05-07T19:40:22.000Z | 2022-01-19T19:37:15.000Z | """
Contains backtesting logic and objects.
"""
from __future__ import division
from copy import deepcopy
import bt
import ffn
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import pyprind
def run(*backtests):
"""
Runs a series of backtests and returns a Result
object containing the results of the backtests.
Args:
* backtest (*list): List of backtests.
Returns:
Result
"""
# run each backtest
for bkt in backtests:
bkt.run()
return Result(*backtests)
def benchmark_random(backtest, random_strategy, nsim=100):
"""
Given a backtest and a random strategy, compare backtest to
a number of random portfolios.
The idea here is to benchmark your strategy vs a bunch of
random strategies that have a similar structure but execute
some part of the logic randomly - basically you are trying to
determine if your strategy has any merit - does it beat
randomly picking weight? Or randomly picking the selected
securities?
Args:
* backtest (Backtest): A backtest you want to benchmark
* random_strategy (Strategy): A strategy you want to benchmark
against. The strategy should have a random component to
emulate skilless behavior.
* nsim (int): number of random strategies to create.
Returns:
RandomBenchmarkResult
"""
# save name for future use
if backtest.name is None:
backtest.name = 'original'
# run if necessary
if not backtest.has_run:
backtest.run()
bts = []
bts.append(backtest)
data = backtest.data
# create and run random backtests
for i in range(nsim):
random_strategy.name = 'random_%s' % i
rbt = bt.Backtest(random_strategy, data)
rbt.run()
bts.append(rbt)
# now create new RandomBenchmarkResult
res = RandomBenchmarkResult(*bts)
return res
class Backtest(object):
"""
A Backtest combines a Strategy with data to
produce a Result.
A backtest is basically testing a strategy over a data set.
Note:
The Strategy will be deepcopied so it is re-usable in other
backtests. To access the backtested strategy, simply access
the strategy attribute.
Args:
* strategy (Strategy, Node, StrategyBase): The Strategy to be tested.
* data (DataFrame): DataFrame containing data used in backtest. This
will be the Strategy's "universe".
* name (str): Backtest name - defaults to strategy name
* initial_capital (float): Initial amount of capital passed to
Strategy.
* commissions (fn(quantity, price)): The commission function
to be used. Ex: commissions=lambda q, p: max(1, abs(q) * 0.01)
* progress_bar (Bool): Display progress bar while running backtest
Attributes:
* strategy (Strategy): The Backtest's Strategy. This will be a deepcopy
of the Strategy that was passed in.
* data (DataFrame): Data passed in
* dates (DateTimeIndex): Data's index
* initial_capital (float): Initial capital
* name (str): Backtest name
* stats (ffn.PerformanceStats): Performance statistics
* has_run (bool): Run flag
* weights (DataFrame): Weights of each component over time
* security_weights (DataFrame): Weights of each security as a
percentage of the whole portfolio over time
"""
def __init__(self, strategy, data,
name=None,
initial_capital=1000000.0,
commissions=None,
integer_positions=True,
progress_bar=True):
if data.columns.duplicated().any():
cols = data.columns[data.columns.duplicated().tolist()].tolist()
raise Exception(
'data provided has some duplicate column names: \n%s \n'
'Please remove duplicates!' % cols)
# we want to reuse strategy logic - copy it!
# basically strategy is a template
self.strategy = deepcopy(strategy)
self.strategy.use_integer_positions(integer_positions)
# add virtual row at t0-1day with NaNs
# this is so that any trading action at t0 can be evaluated relative to
# a clean starting point. This is related to #83. Basically, if you
# have a big trade / commision on day 0, then the Strategy.prices will
# be adjusted at 0, and hide the 'total' return. The series should
# start at 100, but may start at 90, for example. Here, we add a
# starting point at t0-1day, and this is the reference starting point
data = pd.concat([
pd.DataFrame(np.nan, columns=data.columns,
index=[data.index[0] - pd.DateOffset(days=1)]),
data])
self.data = data
self.dates = data.index
self.initial_capital = initial_capital
self.name = name if name is not None else strategy.name
self.progress_bar = progress_bar
if commissions is not None:
self.strategy.set_commissions(commissions)
self.stats = {}
self._original_prices = None
self._weights = None
self._sweights = None
self.has_run = False
def run(self):
"""
Runs the Backtest.
"""
if self.has_run:
return
# set run flag to avoid running same test more than once
self.has_run = True
# setup strategy
self.strategy.setup(self.data)
# adjust strategy with initial capital
self.strategy.adjust(self.initial_capital)
# loop through dates
# init progress bar
if self.progress_bar:
bar = pyprind.ProgBar(len(self.dates), title=self.name, stream=1)
# since there is a dummy row at time 0, start backtest at date 1.
# we must still update for t0
self.strategy.update(self.dates[0])
# and for the backtest loop, start at date 1
for dt in self.dates[1:]:
# update progress bar
if self.progress_bar:
bar.update()
# update strategy
self.strategy.update(dt)
if not self.strategy.bankrupt:
self.strategy.run()
# need update after to save weights, values and such
self.strategy.update(dt)
else:
if self.progress_bar:
bar.stop()
self.stats = self.strategy.prices.calc_perf_stats()
self._original_prices = self.strategy.prices
@property
def weights(self):
"""
DataFrame of each component's weight over time
"""
if self._weights is not None:
return self._weights
else:
vals = pd.DataFrame({x.full_name: x.values for x in
self.strategy.members})
vals = vals.div(self.strategy.values, axis=0)
self._weights = vals
return vals
@property
def positions(self):
"""
DataFrame of each component's position over time
"""
return self.strategy.positions
@property
def security_weights(self):
"""
DataFrame containing weights of each security as a
percentage of the whole portfolio over time
"""
if self._sweights is not None:
return self._sweights
else:
# get values for all securities in tree and divide by root values
# for security weights
vals = {}
for m in self.strategy.members:
if isinstance(m, bt.core.SecurityBase):
if m.name in vals:
vals[m.name] += m.values
else:
vals[m.name] = m.values
vals = pd.DataFrame(vals)
# divide by root strategy values
vals = vals.div(self.strategy.values, axis=0)
# save for future use
self._sweights = vals
return vals
@property
def herfindahl_index(self):
"""
Calculate Herfindahl-Hirschman Index (HHI) for the portfolio.
For each given day, HHI is defined as a sum of squared weights of
securities in a portfolio; and varies from 1/N to 1.
Value of 1/N would correspond to an equally weighted portfolio and
value of 1 corresponds to an extreme case when all amount is invested
in a single asset.
1 / HHI is often considered as "an effective number of assets" in
a given portfolio
"""
w = self.security_weights
return (w ** 2).sum(axis=1)
@property
def turnover(self):
"""
Calculate the turnover for the backtest.
This function will calculate the turnover for the strategy. Turnover is
defined as the lesser of positive or negative outlays divided by NAV
"""
s = self.strategy
outlays = s.outlays
# seperate positive and negative outlays, sum them up, and keep min
outlaysp = outlays[outlays >= 0].fillna(value=0).sum(axis=1)
outlaysn = np.abs(outlays[outlays < 0].fillna(value=0).sum(axis=1))
# merge and keep minimum
min_outlay = pd.DataFrame(
{'pos': outlaysp, 'neg': outlaysn}).min(axis=1)
# turnover is defined as min outlay / nav
mrg = pd.DataFrame({'outlay': min_outlay, 'nav': s.values})
return mrg['outlay'] / mrg['nav']
class Result(ffn.GroupStats):
"""
Based on ffn's GroupStats with a few extra helper methods.
Args:
* backtests (list): List of backtests
Attributes:
* backtest_list (list): List of bactests in the same order as provided
* backtests (dict): Dict of backtests by name
"""
def __init__(self, *backtests):
tmp = [pd.DataFrame({x.name: x.strategy.prices}) for x in backtests]
super(Result, self).__init__(*tmp)
self.backtest_list = backtests
self.backtests = {x.name: x for x in backtests}
def display_monthly_returns(self, backtest=0):
"""
Display monthly returns for a specific backtest.
Args:
* backtest (str, int): Backtest. Can be either a index (int) or the
name (str)
"""
key = self._get_backtest(backtest)
self[key].display_monthly_returns()
def get_weights(self, backtest=0, filter=None):
"""
:param backtest: (str, int) Backtest can be either a index (int) or the
name (str)
:param filter: (list, str) filter columns for specific columns. Filter
is simply passed as is to DataFrame[filter], so use something
that makes sense with a DataFrame.
:return: (pd.DataFrame) DataFrame of weights
"""
key = self._get_backtest(backtest)
if filter is not None:
data = self.backtests[key].weights[filter]
else:
data = self.backtests[key].weights
return data
def plot_weights(self, backtest=0, filter=None,
figsize=(15, 5), **kwds):
"""
Plots the weights of a given backtest over time.
Args:
* backtest (str, int): Backtest can be either a index (int) or the
name (str)
* filter (list, str): filter columns for specific columns. Filter
is simply passed as is to DataFrame[filter], so use something
that makes sense with a DataFrame.
* figsize ((width, height)): figure size
* kwds (dict): Keywords passed to plot
"""
data = self.get_weights(backtest, filter)
data.plot(figsize=figsize, **kwds)
def get_security_weights(self, backtest=0, filter=None):
"""
:param backtest: (str, int) Backtest can be either a index (int) or the
name (str)
:param filter: (list, str) filter columns for specific columns. Filter
is simply passed as is to DataFrame[filter], so use something
that makes sense with a DataFrame.
:return: (pd.DataFrame) DataFrame of security weights
"""
key = self._get_backtest(backtest)
if filter is not None:
data = self.backtests[key].security_weights[filter]
else:
data = self.backtests[key].security_weights
return data
def plot_security_weights(self, backtest=0, filter=None,
figsize=(15, 5), **kwds):
"""
Plots the security weights of a given backtest over time.
Args:
* backtest (str, int): Backtest. Can be either a index (int) or the
name (str)
* filter (list, str): filter columns for specific columns. Filter
is simply passed as is to DataFrame[filter], so use something
that makes sense with a DataFrame.
* figsize ((width, height)): figure size
* kwds (dict): Keywords passed to plot
"""
data = self.get_security_weights(backtest, filter)
data.plot(figsize=figsize, **kwds)
def plot_histogram(self, backtest=0, **kwds):
"""
Plots the return histogram of a given backtest over time.
Args:
* backtest (str, int): Backtest. Can be either a index (int) or the
name (str)
* kwds (dict): Keywords passed to plot_histogram
"""
key = self._get_backtest(backtest)
self[key].plot_histogram(**kwds)
def _get_backtest(self, backtest):
# based on input order
if type(backtest) == int:
return self.backtest_list[backtest].name
# default case assume ok
return backtest
def get_transactions(self, strategy_name=None):
"""
Helper function that returns the transactions in the following format:
dt, security | quantity, price
The result is a MultiIndex DataFrame.
Args:
* strategy_name (str): If none, it will take the first backtest's
strategy (self.backtest_list[0].name)
"""
if strategy_name is None:
strategy_name = self.backtest_list[0].name
# extract strategy given strategy_name
s = self.backtests[strategy_name].strategy
# get prices for each security in the strategy & create unstacked
# series
prc = pd.DataFrame({x.name: x.prices for x in s.securities}).unstack()
# get security positions
positions = pd.DataFrame({x.name: x.positions for x in s.securities})
# trades are diff
trades = positions.diff()
# must adjust first row
trades.iloc[0] = positions.iloc[0]
# now convert to unstacked series, dropping nans along the way
trades = trades[trades != 0].unstack().dropna()
res = pd.DataFrame({'price': prc, 'quantity': trades}).dropna(
subset=['quantity'])
# set names
res.index.names = ['Security', 'Date']
# swap levels so that we have (date, security) as index and sort
res = res.swaplevel().sort_index()
return res
class RandomBenchmarkResult(Result):
"""
RandomBenchmarkResult expands on Result to add methods specific
to random strategy benchmarking.
Args:
* backtests (list): List of backtests
Attributes:
* base_name (str): Name of backtest being benchmarked
* r_stats (Result): Stats for random strategies
* b_stats (Result): Stats for benchmarked strategy
"""
def __init__(self, *backtests):
super(RandomBenchmarkResult, self).__init__(*backtests)
self.base_name = backtests[0].name
# seperate stats to make
self.r_stats = self.stats.drop(self.base_name, axis=1)
self.b_stats = self.stats[self.base_name]
def plot_histogram(self, statistic='monthly_sharpe',
figsize=(15, 5), title=None,
bins=20, **kwargs):
"""
Plots the distribution of a given statistic. The histogram
represents the distribution of the random strategies' statistic
and the vertical line is the value of the benchmarked strategy's
statistic.
This helps you determine if your strategy is statistically 'better'
than the random versions.
Args:
* statistic (str): Statistic - any numeric statistic in
Result is valid.
* figsize ((x, y)): Figure size
* title (str): Chart title
* bins (int): Number of bins
* kwargs (dict): Passed to pandas hist function.
"""
if statistic not in self.r_stats.index:
raise ValueError("Invalid statistic. Valid statistics"
"are the statistics in self.stats")
if title is None:
title = '%s histogram' % statistic
plt.figure(figsize=figsize)
ser = self.r_stats.ix[statistic]
ax = ser.hist(bins=bins, figsize=figsize, normed=True, **kwargs)
ax.set_title(title)
plt.axvline(self.b_stats[statistic], linewidth=4)
ser.plot(kind='kde')
| 32.674766 | 79 | 0.59968 |
cc98866363e8e35e981b96bac7669d44081138cc | 571 | py | Python | app/models/book.py | hxj951103/fisher | 25208721970e7fd2b80743fce5648683578952ca | [
"MIT"
] | null | null | null | app/models/book.py | hxj951103/fisher | 25208721970e7fd2b80743fce5648683578952ca | [
"MIT"
] | null | null | null | app/models/book.py | hxj951103/fisher | 25208721970e7fd2b80743fce5648683578952ca | [
"MIT"
] | null | null | null | from sqlalchemy import Column, String, Integer
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Book(db.Model):
id = Column(Integer, primary_key=True, autoincrement=True)
title = Column(String(50), nullable=False)
author = Column(String(30), default="伏名")
binding = Column(String(20))
publisher = Column(String(50))
price = Column(String(20))
pages = Column(Integer)
pubdate = Column(String(20))
isbn = Column(String(15), nullable=False, unique=True)
summary = Column(String(1000))
image = Column(String(50))
| 30.052632 | 62 | 0.69352 |
e0d054c1db72205e5113ae85fd80b75a499b60aa | 533 | py | Python | xanthosvis/tests/test_model.py | JGCRI/xanthosvis | 1a8c3396881f099272c298cfacd0ae95b1cc2195 | [
"BSD-2-Clause"
] | null | null | null | xanthosvis/tests/test_model.py | JGCRI/xanthosvis | 1a8c3396881f099272c298cfacd0ae95b1cc2195 | [
"BSD-2-Clause"
] | 1 | 2020-12-03T15:53:35.000Z | 2020-12-03T15:53:44.000Z | xanthosvis/tests/test_model.py | JGCRI/xanthosvis | 1a8c3396881f099272c298cfacd0ae95b1cc2195 | [
"BSD-2-Clause"
] | null | null | null | """Tests for the main functionality.
:author: Chris R. Vernon
:email: chris.vernon@pnnl.gov
License: BSD 2-Clause, see LICENSE and DISCLAIMER files
"""
import unittest
import xanthosvis.main as tester
class TestModel(unittest.TestCase):
"""Tests for the `ReadConfig` class that reads the input configuration from the user."""
def test_model_outputs(self):
"""Ensure model outputs are what is expected."""
self.assertEqual(tester.dump_this(), 0)
if __name__ == '__main__':
unittest.main()
| 20.5 | 92 | 0.701689 |
635f78da043974a74d583768256154d7fc648719 | 390 | py | Python | bitmovin_api_sdk/models/audio_volume_format.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 11 | 2019-07-03T10:41:16.000Z | 2022-02-25T21:48:06.000Z | bitmovin_api_sdk/models/audio_volume_format.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 8 | 2019-11-23T00:01:25.000Z | 2021-04-29T12:30:31.000Z | bitmovin_api_sdk/models/audio_volume_format.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 13 | 2020-01-02T14:58:18.000Z | 2022-03-26T12:10:30.000Z | # coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
class AudioVolumeFormat(Enum):
U8 = "U8"
S16 = "S16"
S32 = "S32"
U8P = "U8P"
S16P = "S16P"
S32P = "S32P"
S64 = "S64"
S64P = "S64P"
FLT = "FLT"
FLTP = "FLTP"
NONE = "NONE"
DBL = "DBL"
DBLP = "DBLP"
| 17.727273 | 59 | 0.589744 |
ef524cd6c1a54a78b8ab3e73b6bad837db20aba6 | 1,220 | py | Python | adv/dragonyule_cleo.py | pfleg/dl | 0566bd5cac32feea7dfd082b199a44b05e8e1e04 | [
"Apache-2.0"
] | null | null | null | adv/dragonyule_cleo.py | pfleg/dl | 0566bd5cac32feea7dfd082b199a44b05e8e1e04 | [
"Apache-2.0"
] | null | null | null | adv/dragonyule_cleo.py | pfleg/dl | 0566bd5cac32feea7dfd082b199a44b05e8e1e04 | [
"Apache-2.0"
] | null | null | null | from core.advbase import *
from slot.d import *
def module():
return Dragonyule_Cleo
class Dragonyule_Cleo(Adv):
a1 = ('a',0.13,'hp70')
a3 = ('ecombo',30)
conf = {}
conf['slots.d'] = Gaibhne_and_Creidhne()
conf['acl'] = """
`dragon.act('c3 s end')
`s1
`s4
`s3, cancel
`s2, cancel
`fs, x=5
"""
coab = ['Blade', 'Xander', 'Summer_Estelle']
share = ['Gala_Elisanne', 'Eugene']
def prerun(self):
self.buff_class = Teambuff if self.condition('buff all team') else Selfbuff
self.phase['s1'] = 0
@staticmethod
def prerun_skillshare(adv, dst):
adv.buff_class = Teambuff if adv.condition('buff all team') else Selfbuff
adv.phase[dst] = 0
def s1_proc(self, e):
self.energy.add(1, team=self.condition('buff all team'))
self.phase[e.name] += 1
if self.phase[e.name] > 1:
self.buff_class(e.name,0.1,10).on()
if self.phase[e.name] > 2:
self.buff_class(f'{e.name}_crit',0.08,10,'crit','chance').on()
self.phase[e.name] %= 3
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv)
| 27.111111 | 83 | 0.57541 |
48acdc5cefbedacbedcfb15402e9476c7b25226e | 431 | py | Python | examples/test_varargs.py | Naereen/clime | 21507e263870690b42595a0dc2abee7e882079bc | [
"MIT"
] | 61 | 2015-01-06T13:10:11.000Z | 2021-01-21T07:07:23.000Z | examples/test_varargs.py | Naereen/clime | 21507e263870690b42595a0dc2abee7e882079bc | [
"MIT"
] | 11 | 2015-04-08T12:13:20.000Z | 2021-08-08T08:44:59.000Z | examples/test_varargs.py | Naereen/clime | 21507e263870690b42595a0dc2abee7e882079bc | [
"MIT"
] | 5 | 2015-12-09T16:06:59.000Z | 2018-11-10T00:42:06.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def repeat(message, times=2, count=False, *args, **kargs):
'''It repeats the message.
options:
-m=<str>, --message=<str> The description of this option.
-t=<int>, --times=<int>
-c, --count
'''
print args, kargs
s = message * times
return len(s) if count else s
if __name__ == '__main__':
import clime
clime.start(debug=True)
| 22.684211 | 66 | 0.573086 |
7719d8da6675149a68666f7a7b27217f088de7da | 2,407 | py | Python | nimbleclient/v1/api/subnets.py | prachiruparelia-hpe/nimble-python-sdk | a3e99d89e647291caf7936300ae853d21d94d6e5 | [
"Apache-2.0"
] | 10 | 2020-03-10T20:06:36.000Z | 2022-02-16T17:55:21.000Z | nimbleclient/v1/api/subnets.py | prachiruparelia-hpe/nimble-python-sdk | a3e99d89e647291caf7936300ae853d21d94d6e5 | [
"Apache-2.0"
] | 21 | 2020-04-07T16:00:11.000Z | 2021-05-07T16:09:53.000Z | nimbleclient/v1/api/subnets.py | prachiruparelia-hpe/nimble-python-sdk | a3e99d89e647291caf7936300ae853d21d94d6e5 | [
"Apache-2.0"
] | 7 | 2020-03-11T03:45:31.000Z | 2020-09-14T18:06:03.000Z | #
# © Copyright 2020 Hewlett Packard Enterprise Development LP
#
# This file was auto-generated by the Python SDK generator; DO NOT EDIT.
#
from ...resource import Resource, Collection
from ...exceptions import NimOSAPIOperationUnsupported
class Subnet(Resource):
"""Search subnets information. Many networking tasks require that objects such as replication partners are either on the same network or have a route to a secondary network.
Subnets let you create logical addressing for selective routing.
# Parameters
id : Identifier for the initiator group.
name : Name of subnet configuration.
network : Subnet network address.
netmask : Subnet netmask address.
type : Subnet type. Options include 'mgmt', 'data', and 'mgmt,data'.
allow_iscsi : Subnet type.
allow_group : Subnet type.
discovery_ip : Subnet network address.
mtu : MTU for specified subnet. Valid MTU's are in the 512-16000 range.
netzone_type : Specify Network Affinity Zone type for iSCSI enabled subnets. Valid types are Single, Bisect, and EvenOdd for iSCSI subnets.
vlan_id : VLAN ID for specified subnet. Valid ID's are in the 1-4094 range.
creation_time : Time when this subnet configuration was created.
last_modified : Time when this subnet configuration was last modified.
failover : Failover setting of the subnet.
failover_enable_time : Failover for this subnet will be enabled again at the time specified by failover_enable_time.
"""
def create(self, **kwargs):
raise NimOSAPIOperationUnsupported("create operation not supported")
def delete(self, **kwargs):
raise NimOSAPIOperationUnsupported("delete operation not supported")
def update(self, **kwargs):
raise NimOSAPIOperationUnsupported("update operation not supported")
class SubnetList(Collection):
resource = Subnet
resource_type = "subnets"
def create(self, **kwargs):
raise NimOSAPIOperationUnsupported("create operation not supported")
def delete(self, **kwargs):
raise NimOSAPIOperationUnsupported("delete operation not supported")
def update(self, **kwargs):
raise NimOSAPIOperationUnsupported("update operation not supported")
| 42.982143 | 177 | 0.687993 |
3013cc41bfe35741818a47ec5e0f8726016a26ed | 1,951 | py | Python | allauth/socialaccount/providers/openid/provider.py | manuganji/django-allauth | e7c5fe72c9f35190113572fd0e1c163d6d5e85ef | [
"MIT"
] | 2 | 2016-04-29T10:54:39.000Z | 2021-11-17T23:43:29.000Z | allauth/socialaccount/providers/openid/provider.py | manuganji/django-allauth | e7c5fe72c9f35190113572fd0e1c163d6d5e85ef | [
"MIT"
] | null | null | null | allauth/socialaccount/providers/openid/provider.py | manuganji/django-allauth | e7c5fe72c9f35190113572fd0e1c163d6d5e85ef | [
"MIT"
] | null | null | null | try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import Provider, ProviderAccount
class OpenIDAccount(ProviderAccount):
def get_brand(self):
ret = super(OpenIDAccount, self).get_brand()
domain = urlparse(self.account.uid).netloc
# FIXME: Instead of hardcoding, derive this from the domains
# listed in the openid endpoints setting.
provider_map = {'yahoo': dict(id='yahoo',
name='Yahoo'),
'hyves': dict(id='hyves',
name='Hyves'),
'google': dict(id='google',
name='Google')}
for d, p in provider_map.items():
if domain.lower().find(d) >= 0:
ret = p
break
return ret
def to_str(self):
return self.account.uid
class OpenIDProvider(Provider):
id = 'openid'
name = 'OpenID'
package = 'allauth.socialaccount.providers.openid'
account_class = OpenIDAccount
def get_login_url(self, request, **kwargs):
url = reverse('openid_login')
if kwargs:
url += '?' + urlencode(kwargs)
return url
def get_brands(self):
# These defaults are a bit too arbitrary...
default_servers = [dict(id='yahoo',
name='Yahoo',
openid_url='http://me.yahoo.com'),
dict(id='hyves',
name='Hyves',
openid_url='http://hyves.nl')]
return self.get_settings().get('SERVERS', default_servers)
providers.registry.register(OpenIDProvider)
| 34.22807 | 74 | 0.549974 |
fa1268b0be73386ef19d7e32fb1c43bf437572d9 | 1,212 | py | Python | vehicle_detection/visualize.py | ssarangi/self_driving_cars | 5494ad50ece98399a3f4b718249b34ce3ede864f | [
"Apache-2.0"
] | null | null | null | vehicle_detection/visualize.py | ssarangi/self_driving_cars | 5494ad50ece98399a3f4b718249b34ce3ede864f | [
"Apache-2.0"
] | null | null | null | vehicle_detection/visualize.py | ssarangi/self_driving_cars | 5494ad50ece98399a3f4b718249b34ce3ede864f | [
"Apache-2.0"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
import cv2
import matplotlib.image as mpimg
from sklearn.externals import joblib
import glob
from vehicle_detection import *
def view_results(model):
files = list(glob.iglob("cropped/*.png"))
cols = 4
rows = len(files) // 5 + 1
fig, axis = plt.subplots(rows, cols, figsize=(20, 20))
spatial_size = (16, 16)
for i, f in enumerate(files):
img = mpimg.imread(f)
row = i // 5
cropped = cv2.resize(img, spatial_size)
features, hog_images = extract_feature_from_image(cropped, spatial_size)
features = create_final_feature_vector(features)
res = model.predict(features)[0]
if res == 1:
print(f)
axis[row, 0].imshow(img)
txt = "Car" if res == 1 else "Not Car"
axis[row, 0].set_title(txt)
axis[row, 0].axis('off')
i = 1
for hog_image in hog_images:
axis[row, i].imshow(hog_image, cmap='gray')
axis[row, i].axis('off')
i += 1
plt.savefig('images.png', bbox_inches='tight')
def main():
model = joblib.load('model.pkl')
view_results(model)
if __name__ == "__main__":
main() | 27.545455 | 80 | 0.605611 |
61f2d3528d92cba2d2fd6ab0f455ba4df9e87361 | 11,425 | py | Python | code/python/Publisher/v1/fds/sdk/Publisher/model/pa_calculation_column.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 6 | 2022-02-07T16:34:18.000Z | 2022-03-30T08:04:57.000Z | code/python/Publisher/v1/fds/sdk/Publisher/model/pa_calculation_column.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | 2 | 2022-02-07T05:25:57.000Z | 2022-03-07T14:18:04.000Z | code/python/Publisher/v1/fds/sdk/Publisher/model/pa_calculation_column.py | factset/enterprise-sdk | 3fd4d1360756c515c9737a0c9a992c7451d7de7e | [
"Apache-2.0"
] | null | null | null | """
Publisher API
Allow clients to fetch Publisher Analytics through APIs. # noqa: E501
The version of the OpenAPI document: 2
Contact: analytics.api.support@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.Publisher.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.Publisher.exceptions import ApiAttributeError
class PACalculationColumn(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (str,), # noqa: E501
'statistics': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'statistics': 'statistics', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""PACalculationColumn - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (str): FactSet-defined or User-defined Column identifier.. [optional] # noqa: E501
statistics ([str]): Column Statistic identifier. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PACalculationColumn - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (str): FactSet-defined or User-defined Column identifier.. [optional] # noqa: E501
statistics ([str]): Column Statistic identifier. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 43.773946 | 121 | 0.571729 |
5a0b3b25545a353aea5a949734d7e680e83ecb16 | 2,080 | py | Python | system_metrics_collector/test/test_topic_statistics_e2e.py | ros-tooling/system_metrics_collector | 59acb990b3007210c11138ca22d34bb4a892cc05 | [
"Apache-2.0"
] | 19 | 2019-10-30T23:57:46.000Z | 2021-06-11T09:24:20.000Z | system_metrics_collector/test/test_topic_statistics_e2e.py | ros-tooling/system_metrics_collector | 59acb990b3007210c11138ca22d34bb4a892cc05 | [
"Apache-2.0"
] | 192 | 2019-11-04T17:32:06.000Z | 2021-06-09T17:08:20.000Z | system_metrics_collector/test/test_topic_statistics_e2e.py | ros-tooling/system_metrics_collector | 59acb990b3007210c11138ca22d34bb4a892cc05 | [
"Apache-2.0"
] | 9 | 2019-12-10T13:02:39.000Z | 2022-01-11T20:20:16.000Z | # Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from launch import LaunchDescription
import launch_testing
import pytest
# Allow relative import even though we are not in a real module
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from base_metrics_test import include_python_launch_file, TestMetricsBase # noqa: E402, I100
EXPECTED_LIFECYCLE_NODES = {'/topic_stats_collector'}
EXPECTED_REGULAR_NODES = {'/dummy_talker'}
@pytest.mark.launch_test
def generate_test_description():
return LaunchDescription([
include_python_launch_file(
'system_metrics_collector',
'examples/dummy_talker.launch.py'),
include_python_launch_file(
'system_metrics_collector',
'examples/topic_statistics_node.launch.py'),
launch_testing.actions.ReadyToTest(),
])
class TestTopicStatisticsLaunch(TestMetricsBase):
def test_nodes_exist(self):
return self._test_nodes_exist(EXPECTED_LIFECYCLE_NODES.union(EXPECTED_REGULAR_NODES))
def test_lifecycle_nodes_exist(self):
return self._test_lifecycle_nodes_exist(EXPECTED_LIFECYCLE_NODES)
def test_lifecycle_nodes_active(self):
return self._test_lifecycle_nodes_active(EXPECTED_LIFECYCLE_NODES)
def test_topics_exist(self):
return self._test_topic_exists('/system_metrics')
def test_statistic_publication(self):
return self._test_statistic_publication('/system_metrics', EXPECTED_LIFECYCLE_NODES)
| 35.254237 | 93 | 0.764904 |
07719e42dc2098d5f03540cfe4df762e06d1bc26 | 11,591 | py | Python | avglyriccounter/musicbrainz.py | anttikyl/avglyriccounter | 18da6d3fac123a1fedb68f4fca9c4482756a5fdd | [
"MIT"
] | null | null | null | avglyriccounter/musicbrainz.py | anttikyl/avglyriccounter | 18da6d3fac123a1fedb68f4fca9c4482756a5fdd | [
"MIT"
] | null | null | null | avglyriccounter/musicbrainz.py | anttikyl/avglyriccounter | 18da6d3fac123a1fedb68f4fca9c4482756a5fdd | [
"MIT"
] | null | null | null | import requests
import threading
from time import sleep
import logging
log = logging.getLogger("avglyriccounter")
class MusicBrainzClient():
"""
A class used to communicate with the MusicBrainz API.
Wraps endpoints into easy to use methods.
Only allows one request per second to honor MusicBrainz's rate limiting rules.
"""
def __init__(self):
self.base_url = "https://musicbrainz.org/ws/2/"
self.headers = {
'User-Agent': 'AKWordAverageCounter/1.0 ( anttikyl@protonmail.com )'
}
self.lock = threading.Lock()
def __unlock_calls(self):
"""
Releases this object's lock after one second.
"""
sleep(1)
self.lock.release()
log.debug("Released MusicBrainzClient lock")
def __make_request(self, url):
"""
This method takes a lock and defers the release of the lock to one (1) second later.
The MusicBrainz API has a restriction of 1 call per second per client. By using this method
for every request, we ensure that we do not get blocked by making calls too frequently.
"""
self.lock.acquire()
log.debug("Acquired MusicBrainzClient lock")
log.debug("Sending GET request to " + url)
res = requests.get(url, headers=self.headers)
# Set off a lock release with a 1s sleep timer in another thread
unlock_thread = threading.Thread(target=self.__unlock_calls)
unlock_thread.start()
return res
def search_artist(self, artist_name):
""" /artist?query=artist:<ARTIST_NAME>
Searches for an artist by their name
:param artist_name name of the artist to search for
:returns json response body returned from MusicBrainz
:raises requests.HttpError if the returned HTTP status code was 4xx/5xx
:raises ValueError if the response is not decodable json
"""
url = self.base_url + "artist/" + "?query=artist:" + artist_name + "&fmt=json"
res = self.__make_request(url)
res.raise_for_status()
try:
retval = res.json()
except ValueError: # includes simplejson.decoder.JSONDecodeError
raise ValueError # raise ValueError to abstract away simplejson
return retval
def get_artist_with_releases(self, artist_mbid):
""" /artist/<MBID>?inc=releases
Gets the artist entity including the artist's releases
:param artist_mbid MBID of the artist to get
:returns json response body returned from MusicBrainz
:raises requests.HttpError if the returned HTTP status code was 4xx/5xx
:raises ValueError if the response is not decodable json
"""
url = self.base_url + "artist/" + artist_mbid + "?inc=releases&fmt=json"
res = self.__make_request(url)
res.raise_for_status()
try:
retval = res.json()
except ValueError: # includes simplejson.decoder.JSONDecodeError
raise ValueError # raise ValueError to abstract away simplejson
return retval
def get_release_with_recordings(self, release_mbid):
""" /release/<MBID>
Gets the recording entity
:param release_mbid MBID of the release to get
:returns json response body returned from MusicBrainz
:raises requests.HttpError if the returned HTTP status code was 4xx/5xx
:raises ValueError if the response is not decodable json
"""
url = self.base_url + "release/" + release_mbid + "?inc=recordings&fmt=json"
res = self.__make_request(url)
res.raise_for_status()
try:
retval = res.json()
except ValueError: # includes simplejson.decoder.JSONDecodeError
raise ValueError # raise ValueError to abstract away simplejson
return retval
def search_artist_release_groups(self, artist_name, **kwargs):
""" /release-group/?query=artist:<ARTIST>
Searches for an artist's release groups by artist name
:param artist_name name of the artist to search for
Kwargs:
exclude_live (bool) whether to exclude live releases from the search
exclude_compilation (bool) whether to exclude compilation releases from the search
exclude_remix (bool) whether to exclude remix releases from the search
exclude_demo (bool) whether to exclude demo releases from the search
:returns json response body returned from MusicBrainz
:raises requests.HttpError if the returned HTTP status code was 4xx/5xx
:raises ValueError if the response is not decodable json
"""
url = self.base_url + "release-group?limit=100&fmt=json&query=artist:" + artist_name + " AND primarytype:\"album\""
if 'exclude_live' in kwargs and kwargs['exclude_live'] == True:
url += " AND NOT secondarytype:\"Live\""
if 'exclude_compilation' in kwargs and kwargs['exclude_compilation'] == True:
url += " AND NOT secondarytype:\"Compilation\""
if 'exclude_remix' in kwargs and kwargs['exclude_remix'] == True:
url += " AND NOT secondarytype:\"Remix\""
if 'exclude_demo' in kwargs and kwargs['exclude_demo'] == True:
url += " AND NOT secondarytype:\"Demo\""
res = self.__make_request(url)
res.raise_for_status()
try:
retval = res.json()
except ValueError: # includes simplejson.decoder.JSONDecodeError
raise ValueError # raise ValueError to abstract away simplejson
return retval
class MusicBrainzHandlerError(Exception):
pass
class MusicBrainzHandler():
"""
Handler for abstracting MusicBrainz endpoint functionality
"""
def __init__(self, client):
self.client = client
def get_artist_mbid(self, artist_name):
"""
Gets the artist MBID by making a search in the MusicBrainz API
:param artist_name name of the artist to search for
:returns artist MBID if artist was found, otherwise empty string
:raises MusicBrainzHandlerError on any caught exception
:raises TypeError if the arg is not a string
"""
if type(artist_name) != str:
raise TypeError("Unsupported type for arg 'artist_name'")
try:
artist_json = self.client.search_artist(artist_name)
if len(artist_json['artists']) > 0:
# The artists received are in order of "score", with the highest being the best guess of what the search was after.
# In terms of usability, the user could be given a chance to select one of the results to see if that's what they
# meant, otherwise they will always get the most popular artist's results.
artist_mbid = artist_json['artists'][0]['id']
else:
artist_mbid = ""
except:
raise MusicBrainzHandlerError
log.info("Found artist MBID " + artist_mbid + " for artist " + artist_name)
return artist_mbid
def __is_valid_release_group(self, release_group, artist_mbid):
"""
Validates a release group against an artist's mbid
:param release_group json contents of a single entry of 'release-group'
:param artist_mbid MBID of the artist whose releases to filter by
:returns True if the release group is valid for the given artist, otherwise False
:raises MusicBrainzHandlerError on any caught exception
:raises TypeError if the args are not strings
"""
# If the artist_mbid is not in the artist credits for the release group, it's not valid
is_same_artist_id = False
for artist_credit in release_group['artist-credit']:
if artist_mbid == artist_credit['artist']['id']:
is_same_artist_id = True
if is_same_artist_id == False:
return False
return True
def get_release_ids(self, artist_name, artist_mbid):
"""
Gets the artist's release_ids
Using release-groups we get unique releases by picking the first index release in the
'releases' array of the response.
:param artist_name name of the artist to search for
:param artist_mbid MBID of the artist whose releases to filter by
:returns release_ids for the artist
:raises MusicBrainzHandlerError on any caught exception
:raises TypeError if the args are not strings
"""
if type(artist_name) != str and type(artist_mbid) != str:
raise TypeError("Unsupported type for args 'artist_name' and 'artist_mbid'")
releases = {}
try:
# A single search returns 100 results, but with compilation and live albums removed from the equation, it is very
# likely that all of the releases are included in the results.
# TODO: browse results by using an offset until all results have been checked
artist_json = self.client.search_artist_release_groups(artist_name, exclude_compilation=True, exclude_live=True, exclude_remix=True, exclude_demo=True)
for release_group in artist_json['release-groups']:
if self.__is_valid_release_group(release_group, artist_mbid):
title = release_group['title'].lower()
releases[title] = release_group['releases'][0]['id']
except:
raise MusicBrainzHandlerError
log.info("Found releases " + str(list(releases.keys())) + " for artist_name " + artist_name)
return list(releases.values())
def get_tracks(self, release_id, exclusion_filters):
"""
Gets the tracks found on the given release, in lower case characters
:param release_id ID of the release whose tracks to get
:param exclusion_filters list of strings to use to exclude tracks with at least one of them in the title
:returns list of tracks on the given release
:raises MusicBrainzHandlerError on any caught exception
:raises TypeError if the arg is not a string
"""
if type(release_id) != str:
raise TypeError("Unsupported type for arg 'release_id'")
tracks = []
try:
recordings_json = self.client.get_release_with_recordings(release_id)
tracks_on_release = 0
# Traverse through the 'media' array, which contains for example CDs
for media in recordings_json['media']:
tracks_on_release += len(media['tracks'])
# Add all the track on the media to a list
for track in media['tracks']:
track_title = track['title'].lower()
# Don't add tracks with any of the exclusion filters in their titles
if not any(x in track_title for x in exclusion_filters):
tracks.append(track_title)
except:
raise MusicBrainzHandlerError
excluded_track_count = tracks_on_release - len(tracks)
log.info("Found " + str(len(tracks)) + " tracks: " + str(tracks) + " for release_id " + release_id + " (excluded " + str(excluded_track_count) + " tracks)" )
return tracks
| 38.380795 | 165 | 0.632905 |
1d4f61b7a5a15cc09af03bee105371aba575dc67 | 1,926 | py | Python | 1d_cnn-iot/webhook.py | zibuyu1995/DeepLearning | 686c33984d9fb392fdc19f8f9c4a12f6114dc82f | [
"MIT"
] | null | null | null | 1d_cnn-iot/webhook.py | zibuyu1995/DeepLearning | 686c33984d9fb392fdc19f8f9c4a12f6114dc82f | [
"MIT"
] | 5 | 2021-06-08T22:32:23.000Z | 2022-02-10T02:48:44.000Z | 1d_cnn-iot/webhook.py | zibuyu1995/DeepLearning | 686c33984d9fb392fdc19f8f9c4a12f6114dc82f | [
"MIT"
] | null | null | null | import asyncio
import json
import numpy as np
import uvicorn
from keras.models import load_model
from sklearn.preprocessing import StandardScaler
from starlette.applications import Starlette
from starlette.background import BackgroundTask
from starlette.responses import JSONResponse
app = Starlette()
queue = asyncio.Queue()
model = load_model('./1d-cnn.h5')
@app.on_event('startup')
async def on_startup():
print('startup webhook')
@app.route('/webhook', methods=['POST'])
async def webhook(request):
request_dict = await request.json()
print(request_dict)
payload = request_dict['payload']
data = json.loads(payload)
values = list(data.values())
if queue.qsize() == 60:
items = clear_queue(queue)
task = BackgroundTask(predictive, data=items)
else:
task = None
queue.put_nowait(values)
record = {'status': 'success'}
return JSONResponse(record, status_code=201, background=task)
async def predictive(data):
y_label = {
0: 3,
1: 20,
2: 100
}
y_status = {
3: 'close to total failure',
20: 'reduced efficiency',
100: 'full efficiency'
}
x_test = np.array(data)
scaler = StandardScaler()
x_test = scaler.fit_transform(x_test.reshape(-1, x_test.shape[-1])).reshape(x_test.shape)
x_test = x_test.reshape(-1, x_test.shape[0], x_test.shape[1])
results = model.predict(x_test)
msg = "Current cooler state probability: "
for i, probability in enumerate(results[0]):
status = y_status[y_label[i]]
msg += f"{probability * 100:.2f}% {status}({y_label[i]}), "
print(msg)
def clear_queue(q):
items = []
while not q.empty():
items.append(q.get_nowait())
return items
if __name__ == '__main__':
uvicorn.run(
app,
host='127.0.0.1',
port=8080,
loop='uvloop',
log_level='warning'
)
| 24.692308 | 93 | 0.643302 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.